两个Android之间设备实现局域网共享屏幕以及数据传输

该文描述了一种在Android设备上使用UDP进行屏幕视频传输的方法,包括录屏权限、服务创建、数据推送及接收。通过MediaProjectionManager获取屏幕数据,使用VideoEncoderUtil进行编码并通过UDP发送,同时在接收端接收并解码视频数据。整个过程涉及到了Android服务、通知、数据包处理和视频编码解码技术。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

传输屏幕视频我们需要一个接收端一个传输端 数据传输我这边是写的双向的 为了让视频更流畅这里我们采用udp的传输形式 数据呢也是udp的传输形式不过有丢包的情况可以根据自己的需求改成tcp的传输形式
1传输端口
Android高版本后开启录屏不仅仅需要权限

<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />

还需要我们开启一个服务并开启通知告知用户并弹框让用户选择

<service
    android:name="com.jzi.aviator.screen.AudioCaptureService"
    android:enabled="true"
    android:exported="true"
    android:foregroundServiceType="mediaProjection"></service>

写一个服务AudioCaptureService.java

public class AudioCaptureService extends Service {

    private static final String TAG = "AudioCaptureService";
    private VideoEncoderUtil videoEncoder;


    public AudioCaptureService() {
    }

    @Override
    public IBinder onBind(Intent intent) {
        // TODO: Return the communication channel to the service.
        throw new UnsupportedOperationException("Not yet implemented");
    }

    @Override
    public void onCreate() {
        super.onCreate();
    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            createNotificationChannel(); //创建通知栏,你正在录屏
            MediaProjectionManager mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
            MediaProjection mediaProjection = mediaProjectionManager.getMediaProjection(intent.getIntExtra("resultCode", -1),intent.getParcelableExtra("resultData"));//必须在通知显示之后调用
            videoEncoder = new VideoEncoderUtil(mediaProjection, MainActivity.ip);
            videoEncoder.stop();
            videoEncoder.start();
        }
        return START_STICKY;
    }
    private void createNotificationChannel() {
        Notification.Builder builder = new Notification.Builder(this.getApplicationContext()); //获取一个Notification构造器
        Intent nfIntent = new Intent(this, MainActivity.class); //点击后跳转的界面,可以设置跳转数据

        builder.setContentIntent(PendingIntent.getActivity(this, 0, nfIntent, 0)) // 设置PendingIntent
                .setLargeIcon(BitmapFactory.decodeResource(this.getResources(), R.mipmap.ic_launcher)) // 设置下拉列表中的图标(大图标)
                //.setContentTitle("SMI InstantView") // 设置下拉列表里的标题
                .setSmallIcon(R.mipmap.ic_launcher) // 设置状态栏内的小图标
                .setContentText("is running......") // 设置上下文内容
                .setWhen(System.currentTimeMillis()); // 设置该通知发生的时间

        /*以下是对Android 8.0的适配*/
        //普通notification适配
        if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            builder.setChannelId("notification_id");
        }
        //前台服务notification适配
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            NotificationManager notificationManager = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
            NotificationChannel channel = new NotificationChannel("notification_id", "notification_name", NotificationManager.IMPORTANCE_LOW);
            notificationManager.createNotificationChannel(channel);
        }

        Notification notification = builder.build(); // 获取构建好的Notification
        notification.defaults = Notification.DEFAULT_SOUND; //设置为默认的声音
        startForeground(110, notification);

    }
    @Override
    public void onDestroy(){
        super.onDestroy();
        videoEncoder.stop();
//        Toast.makeText(this,"停止捕获屏幕",Toast.LENGTH_SHORT).show();
    }
}

ArrayUtil.java数组合并工具

public class ArrayUtil {

    /**
     * 合并数组
     * @param first
     * @param second
     * @return
     */
    public static byte[] concat(byte[] first, byte[] second) {
        byte[] result = Arrays.copyOf(first, first.length + second.length);
        System.arraycopy(second, 0, result, first.length, second.length);
        return result;
    }
}

getUdpDataUtil.java 获取udp数据工具

public class getUdpDataUtil {
    private static final int FRAME_MAX_DATANUMBER = 40964;
    private byte[] frameDataBytes = new byte[FRAME_MAX_DATANUMBER];
    private DatagramSocket mDatagramSocketdata;
    private boolean getData = true;
    /**
     * getUdpDataUtil
     */
    private static getUdpDataUtil INSTANCE;

    /**
     * getUdpDataUtil
     */
    private getUdpDataUtil() {

    }
    /**
     * getUdpDataUtil ,单例模式
     */
    public static getUdpDataUtil getInstance() {
        if (INSTANCE == null) {
            synchronized (getUdpDataUtil.class) {
                if (INSTANCE == null) {
                    INSTANCE = new getUdpDataUtil();
                }
            }
        }
        return INSTANCE;
    }


    public void initData(UdpDataCallBack callBack) {
        try {
            mDatagramSocketdata = new DatagramSocket(6666);
            final DatagramPacket dp = new DatagramPacket(frameDataBytes, frameDataBytes.length);
            new Thread(new Runnable() {
                @Override
                public void run() {
                    while (true){
                        try {
                            mDatagramSocketdata.receive(dp);
                        } catch (IOException e) {
                            Log.e("LLLL", "IOException: "+e.toString());
                            e.printStackTrace();
                        }
                        byte[] data = dp.getData();
                        String[] s=new String(data,0,dp.getLength()).split("!");
                        callBack.udpData(s[0],s[1]);
                    }

                }
            }).start();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

TyteUtil.java int与数组转换工具

public class TyteUtil {

    //byte 数组与 int 的相互转换
    public static int byteArrayToInt(byte[] b) {
        return   b[3] & 0xFF |
                (b[2] & 0xFF) << 8 |
                (b[1] & 0xFF) << 16 |
                (b[0] & 0xFF) << 24;
    }

    public static byte[] intToByteArray(int a) {
        return new byte[] {
                (byte) ((a >> 24) & 0xFF),
                (byte) ((a >> 16) & 0xFF),
                (byte) ((a >> 8) & 0xFF),
                (byte) (a & 0xFF)
        };
    }
}

UdpDataCallBack.java udp数据回调接口

public interface UdpDataCallBack {
    void udpData(String s1,String s2);
}

VideoEncoderUtil.java 数据推送工具

public class VideoEncoderUtil {
    private static Encoder encoder;
    private MediaProjection mediaProjection;
    private VirtualDisplay virtualDisplay;
    private long timeStamp = 0;
    private int secondFrame = 5000;//5s一帧关键帧, GOP
    private String ip;
    private byte[] sps;
    private byte[] pps;

    public VideoEncoderUtil(MediaProjection m, String ip) {
        this.mediaProjection = m;
        this.ip = ip;
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void onSurfaceBind(Surface surface, int mWidth, int mHeight) {
        virtualDisplay = mediaProjection.createVirtualDisplay("-display",
                mWidth, mHeight, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
                surface, null, null);//将屏幕数据与surface进行关联

    }

    private void onSurfaceDestroyed(Surface surface) {
        virtualDisplay.release();
        surface.release();
    }

    public void start() {
        if (encoder == null) {
            encoder = new Encoder();
        }
        new Thread(encoder).start();
    }

    public void stop() {
        if (encoder != null) {
            encoder.release();
            encoder = null;
        }
    }

    public static void upData(String s) {
        Message message = new Message();
        message.obj = s;
        message.what = 2;
        try {
            encoder.threadHandler.sendMessage(message);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    private class Encoder implements Runnable {
        private Handler threadHandler;
        String MIME_TYPE = "video/avc";//编码格式,  h264
        int VIDEO_FRAME_PER_SECOND = 15;//fps
        int VIDEO_I_FRAME_INTERVAL = 5;
        private int mWidth = 1280;//大屏上会因为分辨率显示马赛克
        private int mHeight = 720;
        //        private int VIDEO_BITRATE = 2 * 1024 * 1024; //2M码率
        private int VIDEO_BITRATE = 500 * 1024;
        /**
         * 子线程的hanlder
         */

        private DatagramSocket mDatagramSocket;
        private MediaCodec mCodec;
        private Surface mSurface;
        private Bundle params = new Bundle();

        Encoder() {
            try {
                if (null == mDatagramSocket) {
                    mDatagramSocket = new DatagramSocket(null);
                    mDatagramSocket.setReuseAddress(true);
                    mDatagramSocket.bind(new InetSocketAddress(2333));
                }
            } catch (SocketException e) {
                e.printStackTrace();
            }
            params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);//做Bundle初始化  主要目的是请求编码器“即时”产生同步帧
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {

            } else {
                threadHandler = new Handler() {

                    @Override
                    public void handleMessage(Message msg) {
                        super.handleMessage(msg);
                        switch (msg.what) {
                            case 1:
                                byte[] dataFrame = (byte[]) msg.obj;
                                int frameLength = dataFrame.length;
                                byte[] lengthByte = TyteUtil.intToByteArray(frameLength);
                                byte[] concat = ArrayUtil.concat(lengthByte, dataFrame);
                                try {
                                    DatagramPacket dp = new DatagramPacket(concat, concat.length, InetAddress.getByName(ip), 2333);
                                    mDatagramSocket.send(dp);
                                } catch (IOException e) {
                                    e.printStackTrace();
                                }
                                break;
                            case 2:
                                try {
                                    byte[] clientMsgBytes = msg.obj.toString().getBytes();
                                    DatagramPacket clientPacket = new DatagramPacket(clientMsgBytes,
                                            clientMsgBytes.length,
                                            InetAddress.getByName(ip), 2666);
                                    mDatagramSocket.send(clientPacket);
                                } catch (IOException e) {
                                    e.printStackTrace();
                                }
                                break;
                        }

                    }

                };
            }

            prepare();
        }

        @Override
        public void run() {
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
                Looper.prepare();
                threadHandler = new Handler() {
                    @Override
                    public void handleMessage(Message msg) {
                        super.handleMessage(msg);
                        switch (msg.what) {
                            case 1:
                                byte[] dataFrame = (byte[]) msg.obj;
                                int frameLength = dataFrame.length;
                                byte[] lengthByte = TyteUtil.intToByteArray(frameLength);
                                byte[] concat = ArrayUtil.concat(lengthByte, dataFrame);
                                try {
                                    DatagramPacket dp = new DatagramPacket(concat, concat.length, InetAddress.getByName(ip), 2333);
                                    mDatagramSocket.send(dp);
                                } catch (IOException e) {
                                    e.printStackTrace();
                                }
                                break;
                            case 2:
                                try {
                                    byte[] clientMsgBytes = msg.obj.toString().getBytes();
                                    DatagramPacket clientPacket = new DatagramPacket(clientMsgBytes,
                                            clientMsgBytes.length,
                                            InetAddress.getByName(ip), 2666);
                                    mDatagramSocket.send(clientPacket);
                                } catch (IOException e) {
                                    e.printStackTrace();
                                }
                                break;
                        }

                    }

                };
                Looper.loop();
            } else {

            }

        }


        void sendData(byte[] data) {
            Message message = new Message();
            message.obj = data;
            message.what = 1;
            threadHandler.sendMessage(message);
        }

        private void release() {
            onSurfaceDestroyed(mSurface);
            if (mCodec != null) {
                mCodec.stop();
                mCodec.release();
                mCodec = null;
            }
        }

        @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
        private boolean prepare() {
            MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
            //COLOR_FormatSurface这里表明数据将是一个graphicbuffer元数据s
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                    MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
                format.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);//编码器需要, 解码器可选
            } else {
                format.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE);//编码器需要, 解码器可选
            }
            format.setInteger(MediaFormat.KEY_FRAME_RATE, VIDEO_FRAME_PER_SECOND);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VIDEO_I_FRAME_INTERVAL);//帧间隔  这个参数在很多手机上无效, 第二帧关键帧过了之后全是P帧。 GOP实现还有其它方法,全局搜关键字

            try {
                mCodec = MediaCodec.createEncoderByType(MIME_TYPE);
            } catch (IOException e) {
                e.printStackTrace();
                return false;
            }
            mCodec.setCallback(new MediaCodec.Callback() {
                @Override
                public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
                }

                @Override
                public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
                    if (index > -1) {
                        ByteBuffer outputBuffer = codec.getOutputBuffer(index);
                        byte[] data = new byte[info.size];
                        assert outputBuffer != null;
                        outputBuffer.get(data);
                        if (sps == null) {
                            sps = data;
                        }
                        if (pps == null && sps != null) {
                            pps = data;
                        }
                        sendData(data);
                        codec.releaseOutputBuffer(index, false);
                    }
                    if (System.currentTimeMillis() - timeStamp >= secondFrame) {//1秒后,设置请求关键帧的参数    GOP
                        sendData(sps);
                        sendData(pps);

                    }
                }

                @Override
                public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
                    codec.reset();
                }

                @Override
                public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {

                }
            });
            mCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            //创建关联的输入surface
            mSurface = mCodec.createInputSurface();
            mCodec.start();
            onSurfaceBind(mSurface, mWidth, mHeight);
            return true;
        }

    }
//    转换一个视频(各项参数都很高),转换参数假设:帧率20fps,分辨率640*480,,去掉声音。
//    那么按照此参数,视频中一个像素点占据2个字节,
//    一帧就占用:640*480*2=614400个字节,
//            20帧就占用:614400*20=12288000个字节,
//    也就是每秒:12288000*8=98304000=98304k比特,也即:比特率为98304kbps
//    也就是说,在“分辨率与帧率”都已经确定的情况下,视频应有的、固有的比特率就会被唯一确定下来(至于采用H264或者AVC编码压缩,实质上还是跟刚才计算的“固有的”比特率成正比例缩小,假设压缩为原来的1%,其实还是是相当于固定码率983k)。


}

这里是我们的推送端
两者联动时接收端知道发送端的ip后连接成功会把自己的ip传输过来下面代码我们拿到ip后就再将码流推过去

getUdpDataUtil.getInstance().initData(new UdpDataCallBack() {
            @Override
            public void udpData(String s1, String s2) {
                try {
                    if (s1.equals("ip")) {
                        ip = s2;
                        projectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
                        Intent captureIntent = projectionManager.createScreenCaptureIntent();
                        startActivityForResult(captureIntent, ACTIVITY_RESULT_CODE);
                    } else{
                       
                    }
                } catch (Exception e) {
                    L.e("getUdpDataUtil", e.getMessage());
                }
            }

        });

然后就是接收端的代码
xml文件需要一个surfaceview控件用于播放视频

  <SurfaceView
            android:id="@+id/screen_share_re_surface_view"
            android:layout_width="match_parent"
            android:layout_height="match_parent" />

需要一个推送工具
VideoEncoderUtil.java

public class VideoEncoderUtil {
    private static Encoder encoder;
    private String ip;

    public VideoEncoderUtil(String ip) {
        this.ip = ip;
    }


    public void start() {
        if (encoder == null) {
            encoder = new Encoder();
        }
        new Thread(encoder).start();
    }

    public void stop() {
        if (encoder != null) {
            encoder = null;
        }
    }

    public static void prepare(String s) {
        Message message = new Message();
        message.obj = s;
        if(null!=encoder.threadHandler) {
            encoder.threadHandler.sendMessage(message);
        }
    }

    private class Encoder implements Runnable {
        /**
         * 子线程的hanlder
         */
        private Handler threadHandler;
        private DatagramSocket mDatagramSocket;

        Encoder() {
            try {
                if(mDatagramSocket == null){
                    mDatagramSocket = new DatagramSocket(null);
                    mDatagramSocket.setReuseAddress(true);
                    mDatagramSocket.bind(new InetSocketAddress(6666));
                }
            } catch (SocketException e) {
                e.printStackTrace();
            }
            threadHandler = new Handler() {

                @Override
                public void handleMessage(Message msg) {
                    super.handleMessage(msg);
                    byte[] bys =msg.obj.toString().getBytes();
                    try {
                        DatagramPacket dp = new DatagramPacket(bys, bys.length, InetAddress.getByName(ip), 6666);
                        mDatagramSocket.send(dp);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

            };
        }

        @Override
        public void run() {
        
        }
        
    }
}

然后就是一个接收工具
getUdpDataUtil.java

public class getUdpDataUtil {
    private static final int FRAME_MAX_DATANUMBER = 40964;
    private byte[] frameDataBytes = new byte[FRAME_MAX_DATANUMBER];
    private DatagramSocket mDatagramSocketdata;
    /**
     * getUdpDataUtil
     */
    private static getUdpDataUtil INSTANCE;

    /**
     * getUdpDataUtil
     */
    private getUdpDataUtil() {

    }
    /**
     * getUdpDataUtil ,单例模式
     */
    public static getUdpDataUtil getInstance() {
        if (INSTANCE == null) {
            synchronized (getUdpDataUtil.class) {
                if (INSTANCE == null) {
                    INSTANCE = new getUdpDataUtil();
                }
            }
        }
        return INSTANCE;
    }


    public void initData(UdpDataCallBack<String> callBack) {
        try {
            mDatagramSocketdata = new DatagramSocket(2666);
            final DatagramPacket dp = new DatagramPacket(frameDataBytes, frameDataBytes.length);
            new Thread(new Runnable() {
                @Override
                public void run() {
                    while (true){
                        try {
                            mDatagramSocketdata.receive(dp);
                        } catch (IOException e) {
                            Log.e("LLLL", "IOException: "+e.toString());
                            e.printStackTrace();
                        }
                        byte[] data = dp.getData();
                        callBack.onData(new String(data,0,dp.getLength()));
                    }

                }
            }).start();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

udp数据回调接口
UdpDataCallBack.java

public interface UdpDataCallBack<X> {
    void onData(X s);
}

类型转换工具
TyteUtil .java

public class TyteUtil {
    /**
     * 把byte数组转换成int类型
     *
     * @param b 源byte数组
     * @return 返回的int值
     */
    public static int byteArrayToInt(byte[] b) {
        return   b[3] & 0xFF |
                (b[2] & 0xFF) << 8 |
                (b[1] & 0xFF) << 16 |
                (b[0] & 0xFF) << 24;
    }

    public static byte[] intToByteArray(int a) {
        return new byte[] {
                (byte) ((a >> 24) & 0xFF),
                (byte) ((a >> 16) & 0xFF),
                (byte) ((a >> 8) & 0xFF),
                (byte) (a & 0xFF)
        };
    }
}

IPUtils.java本地ip获取

public class IPUtils {

    /**
     * 获取本机IPv4地址
     *
     * @param context
     * @return 本机IPv4地址;null:无网络连接
     */
    public static String getIpAddress(Context context) {
        // 获取WiFi服务
        WifiManager wifiManager = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
        // 判断WiFi是否开启
        if (wifiManager.isWifiEnabled()) {
            // 已经开启了WiFi
            WifiInfo wifiInfo = wifiManager.getConnectionInfo();
            int ipAddress = wifiInfo.getIpAddress();
            String ip = intToIp(ipAddress);
            return ip;
        } else {
            // 未开启WiFi
            return getIpAddress();
        }
    }

    private static String intToIp(int ipAddress) {
        return (ipAddress & 0xFF) + "." +
                ((ipAddress >> 8) & 0xFF) + "." +
                ((ipAddress >> 16) & 0xFF) + "." +
                (ipAddress >> 24 & 0xFF);
    }

    /**
     * 获取本机IPv4地址
     *
     * @return 本机IPv4地址;null:无网络连接
     */
    private static String getIpAddress() {
        try {
            NetworkInterface networkInterface;
            InetAddress inetAddress;
            for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements(); ) {
                networkInterface = en.nextElement();
                for (Enumeration<InetAddress> enumIpAddr = networkInterface.getInetAddresses(); enumIpAddr.hasMoreElements(); ) {
                    inetAddress = enumIpAddr.nextElement();
                    if (!inetAddress.isLoopbackAddress() && !inetAddress.isLinkLocalAddress()) {
                        return inetAddress.getHostAddress();
                    }
                }
            }
            return null;
        } catch (SocketException ex) {
            ex.printStackTrace();
            return null;
        }
    }
}

接收端先输入ip连接上对面后 将本地ip发送过去

  videoEncoderUtil=new VideoEncoderUtil("发送端ip");
  videoEncoderUtil.start();
  videoEncoderUtil.prepare("ip!"+IPUtils.getIpAddress(application));

  //这里是接收数据
  getUdpDataUtil.getInstance().initData(new UdpDataCallBack<String>(){
            @Override
            public void onData(String s) {
                
            }
        });

接着是视频解码 这里我写得不是很好 大佬可以自己去优化

surface_view = (SurfaceView) findViewById(R.id.screen_share_re_surface_view);
        mSurfaceHolder = surface_view.getHolder();
        mSurfaceHolder.setFormat(PixelFormat.RGBX_8888);//优化花屏
        //    android:theme="@android:style/Theme.Translucent.NoTitleBar.Fullscreen" // 花屏可以考虑设置这个样式的acticity,不过尝试了效果有限~
        mSurfaceHolder.addCallback(this);
    private Boolean yahaha=true;
    private int yhaxi=0;
	private static final int FRAME_MAX_NUMBER = 40964;
    private byte[] frameBytes = new byte[FRAME_MAX_NUMBER];
    private void initUdp() {
        try {
            mDatagramSocket = new DatagramSocket(2333);
            final DatagramPacket dp = new DatagramPacket(frameBytes, frameBytes.length);
            new Thread(new Runnable() {
                @Override
                public void run() {
                    while (getFrameData){
                        try {
                            mDatagramSocket.receive(dp);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                        byte[] data = dp.getData();
//                        Log.e(TAG, data.length+ " run: "+ Arrays.toString(data));
                        byte[] lengthByte = new byte[4];
                        System.arraycopy(data, 0, lengthByte, 0, 4);
                        int frameLenth = TyteUtil.byteArrayToInt(lengthByte);
                        Log.e(TAG, frameLenth+ " 接收的长度: ");
                        if(frameLenth == 0) continue;
                        frameLenth = 40960 < frameLenth ? 40960 : frameLenth;
                        byte[] frame = new byte[frameLenth];
                        System.arraycopy(data, 4, frame, 0, frameLenth);
                        Log.e(TAG, frameLenth+ " 喂的数据: "+ Arrays.toString(frame));
                        onFrame(frame);
                    }
                    Log.e(TAG, "....:结束");
                    mDatagramSocket.close();
                }
            }).start();
        } catch (Exception e) {
            Log.e(TAG, "Exception:"+e.toString());
            e.printStackTrace();
        }
    }
    
    public void onFrame(byte[] buf) {
        try {
            if (buf == null||null==mediaCodec)
                return;
            int length = buf.length;
            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
            //返回一个填充了有效数据的input buffer的索引,如果没有可用的buffer则返回-1.当timeoutUs==0时,该方法立即返回;当timeoutUs<0时,无限期地等待一个可用的input buffer;当timeoutUs>0时,至多等待timeoutUs微妙
//        int inputBufferIndex = mediaCodec.dequeueInputBuffer(1);// =>0时,至多等待x微妙   如果发送源快速滑动比如放视频, 花屏明显.. ...
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);//    <-1时,无限期地等待一个可用的input buffer  会出现:一直等待导致加载异常, 甚至会吃掉网络通道, 没有任何异常出现...(调试中大部分是因为sps和pps没有写入到解码器, 保证图像信息的参数写入解码器很重要)
            if (inputBufferIndex >= 0) {//当输入缓冲区有效时,就是>=0
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                inputBuffer.put(buf, 0, length);//往输入缓冲区写入数据,关键点
//            int value = buf[4] & 0x0f;//nalu, 5是I帧, 7是sps 8是pps.
//            if (value == 7)//如果不能保证第一帧写入的是sps和pps, 可以用这种方式等待sps和pps发送到之后写入解码器
//                mediaCodec.queueInputBuffer(inputBufferIndex, 0, length, mCount * 30, MediaCodec.BUFFER_FLAG_CODEC_CONFIG);//更新sps和pps
//            else
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, length, mCount * 30, 0);//将缓冲区入队
                mCount++;//用于queueInputBuffer presentationTimeUs 此缓冲区的显示时间戳(以微秒为单位),通常是这个缓冲区应该呈现的媒体时间
            }

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);//拿到输出缓冲区的索引
//        Log.e(TAG, "outputBufferIndex" + outputBufferIndex);
            while (outputBufferIndex >= 0) {
                mediaCodec.releaseOutputBuffer(outputBufferIndex, true);//显示并释放资源
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);//再次获取数据,如果没有数据输出则outIndex=-1 循环结束
            }
        }catch (Exception e){
            Log.d("sfdssssssss",e+"");
            if(yahaha){
                surfaceDestroyed(mSurfaceHolder);
                surfaceCreated(mSurfaceHolder);
            }else{
                yhaxi++;
                if(yhaxi>20){
                    yhaxi=0;
                    yahaha=true;
                }
            }

        }

    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        Log.e(TAG, "surfaceCreated");
        getFrameData = true;
        creatMediaCodec(holder);
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        getFrameData = false;
        yahaha=false;
        Log.e(TAG, "surfaceDestroyed");
        if(mediaCodec != null) {
            try {
                mediaCodec.stop();
            }catch (Exception e){

            }
            mediaCodec = null;
        }
    }

    void creatMediaCodec(SurfaceHolder holder) {
        try {
            //通过多媒体格式名创建一个可用的解码器
            mediaCodec = MediaCodec.createDecoderByType("video/avc");
        } catch (IOException e) {
            Log.e(TAG, "通过多媒体格式名创建一个可用的解码器" + e.toString());
            e.printStackTrace();
        }
        //初始化编码器
        final MediaFormat mediaformat = MediaFormat.createVideoFormat("video/avc", width, height);
        //这里可以尝试写死SPS和PPS,部分机型上的解码器可以正常工作。
//        byte[] header_sps = {0, 0, 0, 1, 103, 66, 0, 42, (byte) 149, (byte) 168, 30, 0, (byte) 137, (byte) 249, 102, (byte) 224, 32, 32, 32, 64};
//        byte[] header_pps = {0, 0, 0, 1, 104, (byte) 206, 60, (byte) 128, 0, 0, 0, 1, 6, (byte) 229, 1, (byte) 151, (byte) 128};
//        mediaformat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
//        mediaformat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
        mediaformat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
        //指定解码后的帧格式
//            mediaformat.setInteger(MediaFormat.KEY_FRAME_RATE, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);//解码器将编码的帧解码为这种指定的格式,YUV420Flexible是几乎所有解码器都支持的

        //设置配置参数,参数介绍 :
        // format   如果为解码器,此处表示输入数据的格式;如果为编码器,此处表示输出数据的格式。
        //surface   指定一个surface,可用作decode的输出渲染。
        //crypto    如果需要给媒体数据加密,此处指定一个crypto类.
        //   flags  如果正在配置的对象是用作编码器,此处加上CONFIGURE_FLAG_ENCODE 标签。
        mediaCodec.configure(mediaformat, holder.getSurface(), null, 0);
        mediaCodec.start();
        Log.e(TAG, "创建解码器");
        initUdp();
    }

    @Override
    protected void onStop() {
        super.onStop();
    }

    @Override
    protected void onDestroy() {
        Log.e(TAG, "onDestroy");
        mSurfaceHolder.getSurface().release();
        mSurfaceHolder = null;
        surface_view = null;
        mDatagramSocket.close();
        super.onDestroy();
        System.exit(0);
    }

更新一下 给个源码
地址是源码
源码加了大疆的msdk 因为之前做这款开发是用作于b遥控控制a飞机 后续可以自行删除

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值