android設備間實現無線投屏的示例代碼

前言

Android提供瞭MediaProjection來實現錄屏,通過MediaProjection可以獲取當前屏幕的視頻流,而視頻流需要通過編解碼來壓縮進行傳輸,通過MediaCodec可實現視頻的編碼和解碼。視頻流的推送和接收可通過Socket或WebSocket來實現,服務端推送編碼的視頻流,客戶端接收視頻流並進行解碼,然後渲染在SurfaceView上即可顯示服務端的畫面。

投屏服務端的實現

服務端主入口負責請求錄屏和啟動錄屏服務,服務端主入口的實現如下:

package com.example.screenprojection;
import android.content.Intent;
import android.media.projection.MediaProjectionManager;
import android.os.Build;
import android.os.Bundle;
import android.view.View;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
  private static final int PROJECTION_REQUEST_CODE = 1;
  private MediaProjectionManager mediaProjectionManager;
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    init();
  }
  private void init() {
    mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
  }
  public void onClick(View view) {
    if (view.getId() == R.id.btn_start) {
      startProjection();
    }
  }
  // 請求開始錄屏
  private void startProjection() {
    Intent intent = mediaProjectionManager.createScreenCaptureIntent();
    startActivityForResult(intent, PROJECTION_REQUEST_CODE);
  }
  @Override
  protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
    super.onActivityResult(requestCode, resultCode, data);
    if (resultCode != RESULT_OK) {
      return;
    }
    if (requestCode == PROJECTION_REQUEST_CODE) {
      Intent service = new Intent(this, ScreenService.class);
      service.putExtra("code", resultCode);
      service.putExtra("data", data);
      if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
        startForegroundService(service);
      } else {
        startService(service);
      }
    }
  }
  @Override
  protected void onDestroy() {
    super.onDestroy();
  }
}

Android8.0後錄屏需要開啟前臺服務通知,錄屏服務開啟後同時啟動WebSocketServer服務端,前臺服務代碼如下:

package com.example.screenprojection;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.graphics.BitmapFactory;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Build;
import android.os.IBinder;
public class ScreenService extends Service {
  private MediaProjectionManager mMediaProjectionManager;
  private SocketManager mSocketManager;
  @Override
  public void onCreate() {
    super.onCreate();
    mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
    createNotificationChannel();
  }
  @Override
  public int onStartCommand(Intent intent, int flags, int startId) {
    int resultCode = intent.getIntExtra("code", -1);
    Intent resultData = intent.getParcelableExtra("data");
    startProject(resultCode, resultData);
    return super.onStartCommand(intent, flags, startId);
  }
  // 錄屏開始後進行編碼推流
  private void startProject(int resultCode, Intent data) {
    MediaProjection mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);
    if (mediaProjection == null) {
      return;
    }
    // 初始化服務器端
    mSocketManager = new SocketManager();
    mSocketManager.start(mediaProjection);
  }
  private void createNotificationChannel() {
    Notification.Builder builder = new Notification.Builder(this.getApplicationContext());
    Intent nfIntent = new Intent(this, MainActivity.class);
    builder
        .setContentIntent(PendingIntent.getActivity(this, 0, nfIntent, 0))
        .setLargeIcon(
            BitmapFactory.decodeResource(
                this.getResources(), R.mipmap.ic_launcher)) // 設置下拉列表中的圖標(大圖標)
        .setSmallIcon(R.mipmap.ic_launcher) // 設置狀態欄內的小圖標
        .setContentText("is running......") // 設置上下文內容
        .setWhen(System.currentTimeMillis()); // 設置該通知發生的時間
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
      builder.setChannelId("notification_id");
      // 前臺服務notification適配
      NotificationManager notificationManager =
          (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
      NotificationChannel channel =
          new NotificationChannel(
              "notification_id", "notification_name", NotificationManager.IMPORTANCE_LOW);
      notificationManager.createNotificationChannel(channel);
    }
    Notification notification = builder.build();
    notification.defaults = Notification.DEFAULT_SOUND; // 設置為默認通知音
    startForeground(110, notification);
  }
  @Override
  public IBinder onBind(Intent intent) {
    return null;
  }
  @Override
  public void onDestroy() {
    mSocketManager.close();
    super.onDestroy();
  }
}

SocketManager為WebSocketServer的控制類,並同時將錄屏對象MediaProjection與視頻編碼器ScreenEncoder綁定。

package com.example.screenprojection;
import android.media.projection.MediaProjection;
import java.net.InetSocketAddress;
public class SocketManager {
  private static final String TAG = SocketManager.class.getSimpleName();
  private static final int SOCKET_PORT = 50000;
  private final ScreenSocketServer mScreenSocketServer;
  private ScreenEncoder mScreenEncoder;
  public SocketManager() {
    mScreenSocketServer = new ScreenSocketServer(new InetSocketAddress(SOCKET_PORT));
  }
  public void start(MediaProjection mediaProjection) {
    mScreenSocketServer.start();
    mScreenEncoder = new ScreenEncoder(this, mediaProjection);
    mScreenEncoder.startEncode();
  }
  public void close() {
    try {
      mScreenSocketServer.stop();
      mScreenSocketServer.close();
    } catch (InterruptedException e) {
      e.printStackTrace();
    }
    if (mScreenEncoder != null) {
      mScreenEncoder.stopEncode();
    }
  }
  public void sendData(byte[] bytes) {
    mScreenSocketServer.sendData(bytes);
  }
}

推流服務端ScreenSocketServer代碼如下:

package com.example.screenprojection;
import android.util.Log;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import java.net.InetSocketAddress;
public class ScreenSocketServer extends WebSocketServer {
  private final String TAG = ScreenSocketServer.class.getSimpleName();
  private WebSocket mWebSocket;
  public ScreenSocketServer(InetSocketAddress inetSocketAddress) {
    super(inetSocketAddress);
  }
  @Override
  public void onOpen(WebSocket conn, ClientHandshake handshake) {
    Log.d(TAG, "onOpen");
    mWebSocket = conn;
  }
  @Override
  public void onClose(WebSocket conn, int code, String reason, boolean remote) {
    Log.d(TAG, "onClose:" + reason);
  }
  @Override
  public void onMessage(WebSocket conn, String message) {}
  @Override
  public void onError(WebSocket conn, Exception ex) {
    Log.d(TAG, "onError:" + ex.toString());
  }
  @Override
  public void onStart() {
    Log.d(TAG, "onStart");
  }
  public void sendData(byte[] bytes) {
    if (mWebSocket != null && mWebSocket.isOpen()) {
      // 通過WebSocket 發送數據
      Log.d(TAG, "sendData:");
      mWebSocket.send(bytes);
    }
  }
  public void close() {
    mWebSocket.close();
  }
}

WebSocketServer啟動成功會回調onStart方法。

       @Override
       public void onStart() {
            Log.d(TAG, "onStart");
       }

當有客戶端連接回調onOpen方法,通過回調的WebSocket對象即可向客戶端發送數據。

      @Override
      public void onOpen(WebSocket conn, ClientHandshake handshake) {
          Log.d(TAG, "onOpen");
          mWebSocket = conn;
      }

視頻編碼器ScreenEncoder采用采用H.265編碼(H.265/HEVC),需要註意不同手機支持的編碼最大分辨率不同。

package com.example.screenprojection;
import android.hardware.display.DisplayManager;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.projection.MediaProjection;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
 * 采用H.265編碼 H.265/HEVC的編碼架構大致上和H.264/AVC的架構相似 H.265又稱為HEVC(全稱High Efficiency Video Coding,高效率視頻編碼)
 */
public class ScreenEncoder extends Thread {
  //不同手機支持的編碼最大分辨率不同
  private static final int VIDEO_WIDTH = 2160;
  private static final int VIDEO_HEIGHT = 3840;
  private static final int SCREEN_FRAME_RATE = 20;
  private static final int SCREEN_FRAME_INTERVAL = 1;
  private static final long SOCKET_TIME_OUT = 10000;
  // I幀
  private static final int TYPE_FRAME_INTERVAL = 19;
  // vps幀
  private static final int TYPE_FRAME_VPS = 32;
  private final MediaProjection mMediaProjection;
  private final SocketManager mSocketManager;
  private MediaCodec mMediaCodec;
  private boolean mPlaying = true;
  // 記錄vps pps sps
  private byte[] vps_pps_sps;
  public ScreenEncoder(SocketManager socketManager, MediaProjection mediaProjection) {
    mSocketManager = socketManager;
    mMediaProjection = mediaProjection;
  }
  public void startEncode() {
    MediaFormat mediaFormat =
        MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, VIDEO_WIDTH, VIDEO_HEIGHT);
    mediaFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    // 比特率(比特/秒)
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT);
    // 幀率
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, SCREEN_FRAME_RATE);
    // I幀的頻率
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, SCREEN_FRAME_INTERVAL);
    try {
      mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
      mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
      Surface surface = mMediaCodec.createInputSurface();
      mMediaProjection.createVirtualDisplay(
          "screen",
          VIDEO_WIDTH,
          VIDEO_HEIGHT,
          1,
          DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
          surface,
          null,
          null);
    } catch (IOException e) {
      e.printStackTrace();
    }
    start();
  }
  @Override
  public void run() {
    mMediaCodec.start();
    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    while (mPlaying) {
      int outPutBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, SOCKET_TIME_OUT);
      if (outPutBufferId >= 0) {
        ByteBuffer byteBuffer = mMediaCodec.getOutputBuffer(outPutBufferId);
        encodeData(byteBuffer, bufferInfo);
        mMediaCodec.releaseOutputBuffer(outPutBufferId, false);
      }
    }
  }
  private void encodeData(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
    int offSet = 4;
    if (byteBuffer.get(2) == 0x01) {
      offSet = 3;
    }
    int type = (byteBuffer.get(offSet) & 0x7E) >> 1;
    if (type == TYPE_FRAME_VPS) {
      vps_pps_sps = new byte[bufferInfo.size];
      byteBuffer.get(vps_pps_sps);
    } else if (type == TYPE_FRAME_INTERVAL) {
      final byte[] bytes = new byte[bufferInfo.size];
      byteBuffer.get(bytes);
      byte[] newBytes = new byte[vps_pps_sps.length + bytes.length];
      System.arraycopy(vps_pps_sps, 0, newBytes, 0, vps_pps_sps.length);
      System.arraycopy(bytes, 0, newBytes, vps_pps_sps.length, bytes.length);
      mSocketManager.sendData(newBytes);
    } else {
      byte[] bytes = new byte[bufferInfo.size];
      byteBuffer.get(bytes);
      mSocketManager.sendData(bytes);
    }
  }
  public void stopEncode() {
    mPlaying = false;
    if (mMediaCodec != null) {
      mMediaCodec.release();
    }
    if (mMediaProjection != null) {
      mMediaProjection.stop();
    }
  }
}

註意點:
1.需要依賴WebSocket相關jar。

implementation “org.java-websocket:Java-WebSocket:1.5.3”

2.AndroidManifest配置網絡和前臺服務權限。

<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />

3.配置前臺錄屏服務foregroundServiceType屬性。

    <service
            android:name=".ScreenService"
            android:enabled="true"
            android:exported="true"
            android:foregroundServiceType="mediaProjection" />

投屏客戶端的實現

客戶端入口負責創建SurfaceView並初始化WebSocket客戶端管理器SocketClientManager。

package com.example.screenplayer;

import android.os.Bundle;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;

public class MainActivity extends AppCompatActivity {

  private SocketClientManager mSocketClientManager;
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    SurfaceView surfaceView = findViewById(R.id.sv_screen);
    surfaceView
        .getHolder()
        .addCallback(
            new SurfaceHolder.Callback() {
              @Override
              public void surfaceCreated(@NonNull SurfaceHolder holder) {
                // 連接到服務端
                initSocketManager(holder.getSurface());
              }

              @Override
              public void surfaceChanged(
                  @NonNull SurfaceHolder holder, int format, int width, int height) {}

              @Override
              public void surfaceDestroyed(@NonNull SurfaceHolder holder) {}
            });
  }

  private void initSocketManager(Surface surface) {
    mSocketClientManager = new SocketClientManager();
    mSocketClientManager.start(surface);
  }

  @Override
  protected void onDestroy() {
    if (mSocketClientManager != null) {
      mSocketClientManager.stop();
    }
    super.onDestroy();
  }
}

SocketClientManager將Surface與解碼器綁定,創建WebSocket客戶端並接收服務端推送的視頻流。其中URI 需要修改為服務端的IP地址和端口。

package com.example.screenplayer;
import android.view.Surface;
import java.net.URI;
import java.net.URISyntaxException;
public class SocketClientManager implements ScreenSocketClient.SocketCallback {
  private static final int SOCKET_PORT = 50000;
  private ScreenDecoder mScreenDecoder;
  private ScreenSocketClient mSocketClient;
  public void start(Surface surface) {
    mScreenDecoder = new ScreenDecoder();
    mScreenDecoder.startDecode(surface);
    try {
      // 需要修改為服務端的IP地址與端口
      URI uri = new URI("ws://192.168.1.3:" + SOCKET_PORT);
      mSocketClient = new ScreenSocketClient(this, uri);
      mSocketClient.connect();
    } catch (URISyntaxException e) {
      e.printStackTrace();
    }
  }
  public void stop() {
    if (mSocketClient != null) {
      mSocketClient.close();
    }
    if (mScreenDecoder != null) {
      mScreenDecoder.stopDecode();
    }
  }
  @Override
  public void onReceiveData(byte[] data) {
    if (mScreenDecoder != null) {
      mScreenDecoder.decodeData(data);
    }
  }
}

ScreenSocketClient代碼如下:

package com.example.screenplayer;
import android.util.Log;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake;
import java.net.URI;
import java.nio.ByteBuffer;
public class ScreenSocketClient extends WebSocketClient {
  private static final String TAG = ScreenSocketClient.class.getSimpleName();
  private final SocketCallback mSocketCallback;
  public ScreenSocketClient(SocketCallback socketCallback, URI serverUri) {
    super(serverUri);
    mSocketCallback = socketCallback;
  }
  @Override
  public void onOpen(ServerHandshake serverHandshake) {
    Log.d(TAG, "onOpen");
  }
  @Override
  public void onMessage(String message) {}
  @Override
  public void onMessage(ByteBuffer bytes) {
    byte[] buf = new byte[bytes.remaining()];
    bytes.get(buf);
    if (mSocketCallback != null) {
      mSocketCallback.onReceiveData(buf);
    }
  }
  @Override
  public void onClose(int code, String reason, boolean remote) {
    Log.d(TAG, "onClose =" + reason);
  }
  @Override
  public void onError(Exception ex) {
    Log.d(TAG, "onError =" + ex.toString());
  }
  public interface SocketCallback {
    void onReceiveData(byte[] data);
  }
}

不同手機支持的解碼最大分辨率不同,需要設置正確的分辨率才不會報錯。投屏解碼器ScreenDecoder代碼如下:

package com.example.screenplayer;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
public class ScreenDecoder {
  private static final int VIDEO_WIDTH = 2160;
  private static final int VIDEO_HEIGHT = 3840;
  private static final long DECODE_TIME_OUT = 10000;
  private static final int SCREEN_FRAME_RATE = 20;
  private static final int SCREEN_FRAME_INTERVAL = 1;
  private MediaCodec mMediaCodec;
  public ScreenDecoder() {}
  public void startDecode(Surface surface) {
    try {
      // 配置MediaCodec
      mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
      MediaFormat mediaFormat =
          MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, VIDEO_WIDTH, VIDEO_HEIGHT);
      mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT);
      mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, SCREEN_FRAME_RATE);
      mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, SCREEN_FRAME_INTERVAL);
      mMediaCodec.configure(mediaFormat, surface, null, 0);
      mMediaCodec.start();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
  public void decodeData(byte[] data) {
    int index = mMediaCodec.dequeueInputBuffer(DECODE_TIME_OUT);
    if (index >= 0) {
      ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(index);
      inputBuffer.clear();
      inputBuffer.put(data, 0, data.length);
      mMediaCodec.queueInputBuffer(index, 0, data.length, System.currentTimeMillis(), 0);
    }
    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
    int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, DECODE_TIME_OUT);
    while (outputBufferIndex > 0) {
      mMediaCodec.releaseOutputBuffer(outputBufferIndex, true);
      outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
    }
  }
  public void stopDecode() {
    if (mMediaCodec != null) {
      mMediaCodec.release();
    }
  }
}

註意點:
1.需要依賴WebSocket相關jar。

implementation “org.java-websocket:Java-WebSocket:1.5.3”

2.AndroidManifest配置網絡權限。

<uses-permission android:name="android.permission.INTERNET"/>

效果

左手邊為服務端,右手邊為客戶端,服務端投屏到客戶端的效果如下:

遇到的錯誤

1.WebSocket出現Permission denied的報錯:

onerror =java.net.SocketException: socket failed: EACCES (Permission denied)

問題原因:需要網絡權限。

<uses-permission android:name=“android.permission.INTERNET”/>

2.WebSocket出現Host unreachable報錯:

onerror =java.net.NoRouteToHostException: Host unreachable

問題原因:不在同一局域網內或服務端未正常啟動。

3.WebSocket出現failed to connect報錯:

failed to connect to /192.168.1.3 (port 50000) from /:: (port 38262):,不在同一局域網內或服務端未正常啟動

問題原因:不在同一局域網內或服務端未正常啟動。

客戶端連接成功回調onOpen方法

      @Override
      public void onOpen(ServerHandshake handshakedata) {
           Log.d(TAG,"SocketClient onOpen");
      }

4.初始化MediaFormat時報IllegalStateException異常:

Caused by: java.lang.IllegalArgumentException
at android.media.MediaCodec.native_configure(Native Method)
at android.media.MediaCodec.configure(MediaCodec.java:2023)
at android.media.MediaCodec.configure(MediaCodec.java:1951)
at com.example.screenprojection.ScreenEncoder.startEncode(ScreenEncoder.java:56)

問題原因:不同手機支持的最大編解碼分辨率不同,可通過adb shell cat /system/etc/media_codecs.xml的adb命令查看手機支持的編解碼分辨率。未設置編碼強制要求的一些配置 會拋出 IllegalStateException。

       <MediaCodec name="OMX.hisi.video.decoder.hevc.secure" type="video/hevc" >
            <Quirk name="needs-flush-on-all-ports" />
            <Limit name="size" min="128x128" max="4096x2304" />
            <Limit name="alignment" value="2x2" />
            <Limit name="block-size" value="16x16" />
            <Limit name="block-count" range="64-36896" />
            <Limit name="blocks-per-second" range="99-1106880" />
            <Limit name="bitrate" range="1-52428800" />
            <Feature name="adaptive-playback" />
            <Feature name="secure-playback" required="true" />
            <Quirk name="requires-allocate-on-input-ports" />
            <Quirk name="requires-allocate-on-output-ports" />
            <Limit name="concurrent-instances" max="1" />
        </MediaCodec>

        <!--config suggestion for hevc -->
        <!--VT:1280x720@1Mbps@30fps,640x360@450kpbps@30fps,640x360@250kbps@30fps,640x360@170kbps@15fps-->
        <!--DestkTop Share: 1280x720@500kbps@3-7fps,720x450@200kbps@3-7fps-->
        <!--Wifi Display:1920x1080@8Mbps@30fps 4Kx2k@27M@30fps-->
        <!--Recoding:1920x1080@12Mbps@30fps,4k2k@30Mbps@30fps,1920x1080@40M@120fps,1280X720@40M@120fps-->
        <MediaCodec name="OMX.hisi.video.encoder.hevc" type="video/hevc" >
            <Limit name="size" min="176x144" max="3840x2160" />
            <Limit name="alignment" value="4x4" />
            <Limit name="block-size" value="64x64" />
            <Limit name="blocks-per-second" range="99-244800" />
            <Limit name="bitrate" range="200000-70000000" />
            <Quirk name="requires-allocate-on-output-ports" />
            <Limit name="concurrent-instances" max="3" />
        </MediaCodec>

源碼下載地址:無線投屏源碼,百度網盤下載地址如下:

鏈接: https://pan.baidu.com/s/1YQ6UHpUuLOMjp7Wf9DmvSw 提取碼: crri 

到此這篇關於android設備間實現無線投屏的文章就介紹到這瞭,更多相關android無線投屏內容請搜索WalkonNet以前的文章或繼續瀏覽下面的相關文章希望大傢以後多多支持WalkonNet!

推薦閱讀: