简介
简单实现两个android设备之间的投屏功能。设备间通信是通过局域网,需要连接同一个wifi。
录屏用到系统的MediaProjection,MediaProjectionManager,而编解码用的是MediaCodec,所以设备需要有DSP芯片,大部分手机应该都有。两台设备间通信使用websocket,录屏端作为服务器进行推流,显示端就是客户端,收到码流进行解码并显示。
先看看最终效果:
实现
首先需要先导入Java-WebSocket库,WebSocket是一种在单个TCP连接上进行全双工通信的协议,允许服务端主动向客户端推送数据。
implementation "org.java-websocket:Java-WebSocket:1.4.0"
整个工程有两个module,app是作为服务端。playscreen是客户端。需要分别运行在两台机器上面。
服务端 app module
MainActivity 就一个button布局就不贴出来了。
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Bundle;
import android.view.View;
import com.example.castscreen.socket.SocketService;
public class MainActivity extends AppCompatActivity {
private int permissionRequestCode = 100;
private int captureRequestCode = 1;
private MediaProjectionManager mediaProjectionManager;
private SocketService socketService;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
init();
}
private void init() {
//拿到MediaProjectionManager
mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
}
public void onClick(View view) {
switch (view.getId()){
case R.id.btn_start:
startCast();
break;
}
}
//请求开始录屏
private void startCast(){
PermissionUtil.checkPermission(this,PermissionUtil.storagePermissions,permissionRequestCode);
Intent intent = mediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(intent,captureRequestCode);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode != RESULT_OK){
return;
}
if(requestCode == this.captureRequestCode){
startCast(resultCode,data);
}
}
//录屏开始后进行编码推流
private void startCast(int resultCode,Intent data){
//这里需要传入resultCode而不是requestCode,在这里踩了个坑大家注意
MediaProjection mediaProjection = mediaProjectionManager.getMediaProjection(resultCode,data);
if (mediaProjection == null){
return;
}
//初始化服务器端
socketService = new SocketService();
//将MediaProjection传给 socketService
socketService.start(mediaProjection);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (socketService != null){
socketService.colse();
}
}
}
SocketService 作用是启动SocketServer并设置端口号 。启动CodecH265 进行编码,CodecH265 编码完一帧再通过SocketServer将数据发送出去。
import android.media.projection.MediaProjection;
import com.example.castscreen.encode.CodecH265;
import java.io.IOException;
import java.net.InetSocketAddress;
public class SocketService {
private static final String TAG = "SocketService";
//端口号,尽量设大一些
private int port = 11006;
private CodecH265 codecH265;
private SocketServer webSocketServer;
public SocketService(){
webSocketServer = new SocketServer(new InetSocketAddress(port));
}
public void start(MediaProjection mediaProjection){
//启动webSocketServer 此时当前设备就可以作为一个服务器了
webSocketServer.start();
codecH265 = new CodecH265(this,mediaProjection);
//开始编码
codecH265.startEncode();
}
//关闭服务端
public void colse(){
try {
webSocketServer.stop();
webSocketServer.close();
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
codecH265.stopEncode();
}
//发送编码后的数据
public void sendData(byte[] bytes){
webSocketServer.sendData(bytes);
}
}
SocketServer 继承自WebSocketServer,调用它的start方法就启动服务端了。
import android.util.Log;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import java.net.InetSocketAddress;
public class SocketServer extends WebSocketServer {
private final String TAG = "SocketServer";
private WebSocket webSocket;
public SocketServer(InetSocketAddress inetSocketAddress){
super(inetSocketAddress);
}
@Override
public void onOpen(WebSocket conn, ClientHandshake handshake) {
Log.d(TAG,"SocketServer onOpen");
this.webSocket = conn;
}
@Override
public void onClose(WebSocket conn, int code, String reason, boolean remote) {
}
@Override
public void onMessage(WebSocket conn, String message) {
}
@Override
public void onError(WebSocket conn, Exception ex) {
}
@Override
public void onStart() {
}
public void sendData(byte[] bytes){
if(webSocket != null && webSocket.isOpen()){
//通过WebSocket 发送数据
webSocket.send(bytes);
}
}
public void close(){
webSocket.close();
}
}
CodecH265是实现编码的类,这里用的是video/hevc,也就是H265编码。需要注意的是因为H265编码只有第一帧才有vps,sps,pps,其他帧不带这些信息,所以需要我们在发送时候为每个I帧添加上vps信息,否则如果客户端不是从头开始接受的数据,那么就没办法进行解码操作。而且H265用的是哥伦布编码,如果对reEncode方法有疑问可以先了解一下哥伦布编码。另外因为编码是比较耗时的操作,肯定要方法子线程中去做。
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.projection.MediaProjection;
import android.util.Log;
import android.view.Surface;
import com.example.castscreen.socket.SocketService;
import java.io.IOException;
import java.nio.ByteBuffer;
public class CodecH265 extends Thread {
private static final String TAG = "gsy";
//图省事宽高直接固定了
private int width = 720;
private int height = 1280;
//h265编码
private final String enCodeType = "video/hevc";
private MediaCodec mediaCodec;
private MediaProjection mediaProjection;
private SocketService socketService;
private boolean play = true;
private long timeOut = 10000;
//记录vps pps sps
private byte[] vps_pps_sps;
//I帧
private final int NAL_I = 19;
//vps帧
private final int NAL_VPS = 32;
public CodecH265(SocketService socketService, MediaProjection mediaProjection) {
this.socketService = socketService;
this.mediaProjection = mediaProjection;
}
public void startEncode() {
//声明MediaFormat,创建视频格式。
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, width, height);
//描述视频格式的内容的颜色格式
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//比特率(比特/秒)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height);
//帧率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
//I帧的频率
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
//创建编码MediaCodec 类型是video/hevc
mediaCodec = MediaCodec.createEncoderByType(enCodeType);
//配置编码器
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//创建一个目的surface来存放输入数据
Surface surface = mediaCodec.createInputSurface();
//获取屏幕流
mediaProjection.createVirtualDisplay("screen", width, height, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
, surface, null, null);
} catch (IOException e) {
Log.d(TAG,"initEncode IOException");
e.printStackTrace();
}
//启动子线程
this.start();
}
@Override
public void run() {
//编解码器立即进入刷新子状态
mediaCodec.start();
//缓存区的元数据
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
//子线程需要一直运行,进行编码推流,所以要一直循环
while (play) {
//查询编码输出
int outPutBufferId = mediaCodec.dequeueOutputBuffer(bufferInfo, timeOut);
if (outPutBufferId >= 0) {
//获取编码之后的数据输出流队列
ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outPutBufferId);
//添加上vps,sps,pps
reEncode(byteBuffer, bufferInfo);
//处理完成,释放ByteBuffer数据
mediaCodec.releaseOutputBuffer(outPutBufferId, false);
}
}
}
private void reEncode(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
//偏移4 00 00 00 01为分隔符需要跳过
int offSet = 4;
if (byteBuffer.get(2) == 0x01) {
offSet = 3;
}
//计算出当前帧的类型
int type = (byteBuffer.get(offSet) & 0x7E) >> 1;
if (type == NAL_VPS) {
//保存vps sps pps信息
vps_pps_sps = new byte[bufferInfo.size];
byteBuffer.get(vps_pps_sps);
} else if (type == NAL_I) {
//将保存的vps sps pps添加到I帧前
final byte[] bytes = new byte[bufferInfo.size];
byteBuffer.get(bytes);
byte[] newBytes = new byte[vps_pps_sps.length + bytes.length];
System.arraycopy(vps_pps_sps, 0, newBytes, 0, vps_pps_sps.length);
System.arraycopy(bytes, 0, newBytes, vps_pps_sps.length, bytes.length);
//将重新编码好的数据发送出去
socketService.sendData(newBytes);
} else {
//B帧 P帧 直接发送
byte[] bytes = new byte[bufferInfo.size];
byteBuffer.get(bytes);
socketService.sendData(bytes);
}
}
public void stopEncode() {
play = false;
}
}
客户端 playescreen module
客户端的页面只有一个SurfaceView,用来显示,布局代码就不贴了。MainActivity 实现了SocketServer.SocketCallback接口,当SocketServer收到数据后,通过回调将收到的码流传给MainActivity 进行解码渲染到SurfaceView。
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
import java.nio.ByteBuffer;
public class MainActivity extends AppCompatActivity implements SocketServer.SocketCallback{
private static final String TAG = "gsy";
private Surface surface;
private SurfaceView surfaceView;
private MediaCodec mediaCodec;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
init();
}
private void init() {
surfaceView = findViewById(R.id.sfv_play);
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(@NonNull SurfaceHolder holder) {
surface = holder.getSurface();
//连接到服务端
initSocket();
//配置MediaCodec
initDecoder(surface);
}
@Override
public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
}
});
}
private void initDecoder(Surface surface) {
try {
//配置MediaFormat MediaCodec
mediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC,720,1280);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,720*1280);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE,20);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,1);
mediaCodec.configure(mediaFormat,surface,null,0);
mediaCodec.start();
} catch (IOException e) {
Log.d(TAG,"initDecoder IOException ");
e.printStackTrace();
}
}
private void initSocket() {
Log.d(TAG,"initSocket");
//启动客户端
SocketServer socketServer = new SocketServer();
socketServer.setSocketCallback(this);
socketServer.start();
}
@Override
public void callBack(byte[] data) {
Log.d(TAG,"mainActivity callBack");
//得到填充了有效数据的input buffer的索引
int index = mediaCodec.dequeueInputBuffer(10000);
if (index >= 0){
//获取输入缓冲区
ByteBuffer inputBuffer = mediaCodec.getInputBuffer(index);
//清除原来的内容以接收新的内容
inputBuffer.clear();
inputBuffer.put(data,0,data.length);
//将其提交给编解码器 把缓存数据入队
mediaCodec.queueInputBuffer(index,0,data.length,System.currentTimeMillis(),0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
//请求一个输出缓存
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,10000);
//直到outputBufferIndex < 0 才算处理完所有数据
while (outputBufferIndex > 0){
mediaCodec.releaseOutputBuffer(outputBufferIndex,true);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);
}
}
}
SocketServer 就是客户端了,作用就是连接服务器,获取视频码流进行回调。注意ip地址不要,是服务端的ip地址,可以在设置中查看,比如:
import android.util.Log;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
public class SocketServer {
private final String TAG = "gsy";
private SocketClient socketClient;
private SocketCallback socketCallback;
//设置回调
public void setSocketCallback(SocketCallback socketCallback){
this.socketCallback = socketCallback;
}
public void start(){
try {
//这里要填服务端的ip
URI uri = new URI("ws://192.168.1.103:11006");
socketClient = new SocketClient(uri);
socketClient.connect();
} catch (URISyntaxException e) {
Log.e(TAG,"error:"+e.toString());
e.printStackTrace();
}
}
private class SocketClient extends WebSocketClient{
public SocketClient(URI serverUri) {
super(serverUri);
Log.d(TAG,"new SocketClient");
}
@Override
public void onOpen(ServerHandshake handshakedata) {
Log.d(TAG,"SocketClient onOpen");
}
@Override
public void onMessage(String message) {
Log.d(TAG,"onMessage");
}
@Override
public void onMessage(ByteBuffer bytes) {
Log.d(TAG,"onMessage");
//收到数据 进行回调
byte[] buf = new byte[bytes.remaining()];
bytes.get(buf);
socketCallback.callBack(buf);
}
@Override
public void onClose(int code, String reason, boolean remote) {
Log.d(TAG,"onClose ="+reason);
}
@Override
public void onError(Exception ex) {
Log.d(TAG,"onerror ="+ex.toString());
}
}
//回调
public interface SocketCallback{
void callBack(byte[] data);
}
}
总共代码大概就这么多,推荐一个在线测试websocket的网站,http://www.websocket-test.com/ chrome浏览器打不开,可以使用IE浏览器,首先要保证服务端可以访问。今天太冷了,就不再多说了,下面是源码,有兴趣的同学可以玩一下。
源码:demo源码