Android音视频之不同设备之间的视频通话(webSocket)

学习目标:

Android音视频之不同设备之间的视频通话(webSocket),实现局域网内的视频通话(建议使用Android6.0及以上的设备)

学习内容:

注意点:

  • 手机的摄像头其实歪的,我们需要通过旋转的方式将画面进行旋转,你有会问?为什么相册里的照片都是正的,那是因为Android系统在拍照的时候就帮你做了处理了。
  • 摄像头捕捉到的数据是NV21,在网络传输的时候我们转成了NV12
  • 进行网络传输视频流的时候,一定要带每个I帧前面拼上sps和pps

首先我们既然要实现通话,我们就需要先实现本地的视频捕捉及存储。关键代码如下,主要是通过mediaCodec实现编码存储到本地。然后想要验证编码是否正确,可以利用ffplay -f h264 test.h264,验证视频是否能播放。关键代码如下:
代码链接: 项目链接
代码功能介绍 : 首先需要按Push,启动推流,这样就会把摄像头捕捉到的数据存放到本地,然后当你按下take,就把当前画面截图成一张图片。

public class CustomSurfaceView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback, SensorEventListener {

    private String filePath = Environment.getExternalStorageDirectory() + "/codec.h264";
    private String filePath1 = Environment.getExternalStorageDirectory() + "/codec.txt";

    public static final int STATUS_NONE = 0;
    public static final int STATUS_STATIC = 1;
    public static final int STATUS_MOVE = 2;
    private int STATUE = STATUS_NONE;
    private int DELAY_DURATION = 500;

    private int mX;
    private int mY;
    private int mZ;
    private long lastStaticStamp;

    boolean isFocusing = false;
    boolean canFocusIn = false;
    boolean canFocus = false;

    private Calendar calendar;


    private SensorManager mSensorManager;
    private Sensor mAccelerometer;
    public static final int FRONT = Camera.CameraInfo.CAMERA_FACING_FRONT;
    public static final int BACK = Camera.CameraInfo.CAMERA_FACING_BACK;
    private static boolean captureFlag = false;
    private static boolean pushFlag = false;
    private boolean mAutoFocus = false;
    private Camera.AutoFocusCallback myAutoFocusCallback = (autoFocusSuccess, arg1) -> {
        mAutoFocus = true;
    };

    CameraListener listener;
    Camera camera;
    Camera.Size previewSize;
    byte[] buffer;
    byte[] nv21_rotated;
    byte[] nv12;

    MediaCodec mediaCodec;

    public CustomSurfaceView(Context context) {
        this(context, null);
    }

    public CustomSurfaceView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public CustomSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        init();
    }

    private void init() {
        restParams();
        getHolder().addCallback(this);
        mSensorManager = (SensorManager) getContext().getSystemService(SENSOR_SERVICE);
        mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
        mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_NORMAL);
    }


    @Override
    public void surfaceCreated(@NonNull SurfaceHolder holder) {
        if (this.listener != null) {
            listener.onCreate();
        }
    }

    //初始化dsp芯片
    private void initMediaCodec() {
        try {
            mediaCodec = MediaCodec.createEncoderByType(MIMETYPE_VIDEO_AVC);
            //宽高调换,因为我们的输入数据就被我们旋转了90度
            MediaFormat format = MediaFormat.createVideoFormat(MIMETYPE_VIDEO_AVC, previewSize.height, previewSize.width);
            //内容的颜色格式(yuv420)
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            //码率
            format.setInteger(MediaFormat.KEY_BIT_RATE, previewSize.height*previewSize.width);
            //帧率
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
            //I帧
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(@NonNull SurfaceHolder holder) {

    }

    public void takeCapture() {
        captureFlag = true;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        if (pushFlag) {
            rotate_90(data);
            if (captureFlag) {
                capture();
                captureFlag = false;
            }
            byte[] tem = nv21toNV12(nv21_rotated);
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int index = mediaCodec.dequeueInputBuffer(10_000);
            if (index >= 0) {
                ByteBuffer buffer = mediaCodec.getInputBuffer(index);
                buffer.clear();
                buffer.put(tem, 0, tem.length);
                mediaCodec.queueInputBuffer(index, 0, tem.length, 0, 0);
            }

            int outIndex = mediaCodec.dequeueOutputBuffer(info, 10_000);
            if (outIndex >= 0) {
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outIndex);
                writeBytes(outputBuffer,info);
                mediaCodec.releaseOutputBuffer(outIndex, false);
            }
        }
        camera.addCallbackBuffer(data);
    }

    //开始把摄像头画面渲染到手机
    public void startPreview(Activity activity, int CameraId) {
        releaseCamera();
        resetFile(filePath);
        camera = Camera.open(CameraId);
        camera.setDisplayOrientation(calculateCameraPreviewOrientation(activity, CameraId));
        Camera.Parameters parameters = camera.getParameters();
        previewSize = parameters.getPreviewSize();
        buffer = new byte[previewSize.width * previewSize.height * 3 / 2];
        nv21_rotated = new byte[previewSize.width * previewSize.height * 3 / 2];
        try {
            camera.setPreviewDisplay(getHolder());
            camera.addCallbackBuffer(buffer);
            camera.setPreviewCallbackWithBuffer(this);
            camera.startPreview();
            initMediaCodec();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    //摄像头聚焦
    private void setCameraFocus(Camera.AutoFocusCallback autoFocus) {
        if (camera.getParameters().getFocusMode().equals(camera.getParameters().FOCUS_MODE_AUTO) ||
                camera.getParameters().getFocusMode().equals(camera.getParameters().FOCUS_MODE_MACRO)) {
            camera.autoFocus(autoFocus);
        }
    }

    //计算旋转角度
    private int calculateCameraPreviewOrientation(Activity activity, int cameraID) {
        Camera.CameraInfo info = new Camera.CameraInfo();
        Camera.getCameraInfo(cameraID, info);
        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        int degrees = 0;
        switch (rotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90:
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:
                degrees = 270;
                break;
        }
        int result;
        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360;
        } else {
            result = (info.orientation - degrees + 360) % 360;
        }
        return result;
    }

    public void setListener(CameraListener listener) {
        this.listener = listener;
    }

    @Override
    protected void onDetachedFromWindow() {
        super.onDetachedFromWindow();
        if (camera != null) {
            camera.stopPreview();
            camera.release();
            camera = null;
        }
        if (mSensorManager != null) {
            mSensorManager.unregisterListener(this);
        }
        stopPush();
    }

    private void releaseCamera() {
        if (camera != null) {
            camera.stopPreview();
            camera.release();
            camera = null;
        }
    }

    //保存一张照片
    public void capture() {
        String fileName = "OICQ.jpg";
        File sdRoot = Environment.getExternalStorageDirectory();
        File pictureFile = new File(sdRoot, fileName);
        if (pictureFile.exists()) pictureFile.delete();
        try {
            pictureFile.createNewFile();
            FileOutputStream filecon = new FileOutputStream(pictureFile);
            //注意由于我们旋转了画面,我们摄像头原来的宽是现在的高,参数是对调的
            //将NV21 data保存成YuvImage
            YuvImage image = new YuvImage(nv21_rotated, ImageFormat.NV21, previewSize.height, previewSize.width, null);
            //图像压缩
            image.compressToJpeg(
                    new Rect(0, 0, image.getWidth(), image.getHeight()),
                    100, filecon);
            Toast.makeText(getContext(), "capture sucess!", Toast.LENGTH_SHORT).show();
        } catch (IOException e) {
            e.printStackTrace();
            Toast.makeText(getContext(), "capture failed!", Toast.LENGTH_SHORT).show();
        }
    }

    private byte[] nv21toNV12(byte[] nv21) {
        int size = nv21.length;
        nv12 = new byte[nv21.length];
        int len = size * 2 / 3;
        System.arraycopy(nv21, 0, nv12, 0, len);
        int i = len;
        while (i < size - 1) {
            nv12[i] = nv21[i+1];
            nv12[i+1] = nv21[i];
            i += 2;
        }
        return nv21;
    }

    //旋转画面
    private void rotate_90(byte[] data) {
        int width = previewSize.width;
        int height = previewSize.height;
        int y_size = width * height;
        int buffer_size = y_size * 3 / 2;
        int i = 0;
        int startPos = (height - 1) * width;
        for (int x = 0; x < width; x++) {
            int offset = startPos;
            for (int y = height - 1; y >= 0; y--) {
                nv21_rotated[i] = data[offset + x];
                i++;
                offset -= width;
            }
        }
        i = buffer_size - 1;
        for (int x = width - 1; x > 0; x = x - 2) {
            int offset = y_size;
            for (int y = 0; y < height / 2; y++) {
                nv21_rotated[i] = data[offset + x];
                i--;
                nv21_rotated[i] = data[offset + (x - 1)];
                i--;
                offset += width;
            }
        }
    }

    @Override
    public void onSensorChanged(SensorEvent event) {
        if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
            int x = (int) event.values[0];
            int y = (int) event.values[1];
            int z = (int) event.values[2];
            calendar = Calendar.getInstance();
            long stamp = calendar.getTimeInMillis();// 1393844912

            int second = calendar.get(Calendar.SECOND);// 53

            if (STATUE != STATUS_NONE) {
                int px = Math.abs(mX - x);
                int py = Math.abs(mY - y);
                int pz = Math.abs(mZ - z);
//                Log.d(TAG, "pX:" + px + "  pY:" + py + "  pZ:" + pz + "    stamp:"
//                        + stamp + "  second:" + second);
                double value = Math.sqrt(px * px + py * py + pz * pz);
                if (value > 1.4) {
//                    textviewF.setText("检测手机在移动..");
//                    Log.i(TAG,"mobile moving");
                    STATUE = STATUS_MOVE;
                } else {
//                    textviewF.setText("检测手机静止..");
//                    Log.i(TAG,"mobile static");
                    //上一次状态是move,记录静态时间点
                    if (STATUE == STATUS_MOVE) {
                        lastStaticStamp = stamp;
                        canFocusIn = true;
                    }

                    if (canFocusIn) {
                        if (stamp - lastStaticStamp > DELAY_DURATION) {
                            //移动后静止一段时间,可以发生对焦行为
                            if (!isFocusing) {
                                canFocusIn = false;
                                setCameraFocus(myAutoFocusCallback);
                            }
                        }
                    }

                    STATUE = STATUS_STATIC;
                }
            } else {
                lastStaticStamp = stamp;
                STATUE = STATUS_STATIC;
            }

            mX = x;
            mY = y;
            mZ = z;
        }
    }

    @Override
    public void onAccuracyChanged(Sensor sensor, int accuracy) {

    }

    private void restParams() {
        STATUE = STATUS_NONE;
        canFocusIn = false;
        mX = 0;
        mY = 0;
        mZ = 0;
    }

    //开始推流
    public void startPush() {
        pushFlag = true;
    }

    //停止推流
    public void stopPush() {
        pushFlag = false;
    }

    public static final int NAL_I = 5;
    public static final int NAL_SPS = 7;
    public static final int NAL_PPS = 8;
    private byte[] vps_sps_pps_buf;

    //写二进制文件(在I帧前面添加sps和pps)
    public void writeBytes(ByteBuffer bb, MediaCodec.BufferInfo bufferInfo) {
        byte bytes[];
        //byteBuffer.get(a) 意思是将bytebuffer里的前a.length个元素剪切到a数组里。
        //public static void arraycopy(Object src, int srcPos, Object dest, int destPos, int length)
        //代码解释:
        //Object src : 原数组
        //int srcPos : 从元数据的起始位置开始
        //Object dest : 目标数组
        //int destPos : 目标数组的开始起始位置
        //int length  : 要copy的数组的长度

        //跳过分隔符
        int offset = 4;
        if (bb.get(2) == 0x01) {
            offset = 3;
        }
        //当前帧类型
        int type = bb.get(offset) & 0x1F;
        if (type == NAL_SPS) {
            vps_sps_pps_buf = new byte[bufferInfo.size];
            bb.get(vps_sps_pps_buf);
            return;
        } else if (type == NAL_I) {
            byte[] tem = new byte[bufferInfo.size];
            bb.get(tem);
            bytes = new byte[vps_sps_pps_buf.length + bufferInfo.size];
            System.arraycopy(vps_sps_pps_buf, 0, bytes, 0, vps_sps_pps_buf.length);
            System.arraycopy(tem, 0, bytes, vps_sps_pps_buf.length, tem.length);
        } else {
            bytes = new byte[bufferInfo.size];
            bb.get(bytes);
        }
        FileOutputStream writer = null;
        File file = new File(filePath);
        try {
            // 打开一个写文件器,构造函数中的第二个参数true表示以追加形式写文件
            writer = new FileOutputStream(file, true);
            writer.write(bytes);
            writer.write('\n');
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    //写成文件,方便查看
    public String writeContent(byte[] array) {
        File file = new File(filePath1);
        char[] HEX_CHAR_TABLE = {
                '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
        };
        StringBuilder sb = new StringBuilder();
        for (byte b : array) {
            sb.append(HEX_CHAR_TABLE[(b & 0xf0) >> 4]);
            sb.append(HEX_CHAR_TABLE[b & 0x0f]);
        }
        Log.i("oicq", "writeContent: " + sb.toString());
        FileWriter writer = null;
        try {
            // 打开一个写文件器,构造函数中的第二个参数true表示以追加形式写文件
            writer = new FileWriter(file, true);
            writer.write(sb.toString());
            writer.write("\n");
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return sb.toString();
    }

    //重置文件
    private void resetFile(String filePath) {
        File file = new File(filePath);
        try {
            if (file.exists()) file.delete();
            file.createNewFile();
        } catch (Exception e) {
            e.printStackTrace();
        }
        File file1 = new File(filePath1);
        try {
            if (file1.exists()) file1.delete();
            file1.createNewFile();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

以上的代码就实现了把摄像头采集到的画面数据保存到本地为h264文件。接下来就来实现视频的功能
首先,我们是在局域网内进行通信,采用webScoket的方式进行互相的通信。
A Client
下面是A端Socket的实现方式,就是在这里面的两个回调方法进行数据的通信onMessage()和sendData()

//音视频通话客户端
//通过这个进行发送数据。与另一个端进行通信
public class SocketLive {
    private static final String TAG = "OICQ";
    private final SocketCallback socketCallback;
    MyWebSocketClient myWebSocketClient;

    public SocketLive(SocketCallback socketCallback) {
        this.socketCallback = socketCallback;
    }

    public void start() {
        try {
            URI url = new URI("ws://192.168.0.8:40004");
            myWebSocketClient = new MyWebSocketClient(url);
            myWebSocketClient.connect();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void sendData(byte[] bytes) {
        if (myWebSocketClient != null && (myWebSocketClient.isOpen())) {
            myWebSocketClient.send(bytes);
        }
    }


    private class MyWebSocketClient extends WebSocketClient {

        public MyWebSocketClient(URI serverURI) {
            super(serverURI);
        }

        @Override
        public void onOpen(ServerHandshake serverHandshake) {
            Log.i(TAG, "打开 socket  onOpen: ");
        }

        @Override
        public void onMessage(String s) {
        }

        @Override
        public void onMessage(ByteBuffer bytes) {
            Log.i(TAG, "消息长度  : " + bytes.remaining());
            byte[] buf = new byte[bytes.remaining()];
            bytes.get(buf);
            if (socketCallback != null) {
                socketCallback.callBack(buf);
            }
        }

        @Override
        public void onClose(int i, String s, boolean b) {
            Log.i(TAG, "onClose: ");
        }

        @Override
        public void onError(Exception e) {
            Log.i(TAG, "onError: ");
        }
    }

    public interface SocketCallback {
        void callBack(byte[] data);
    }
}

B Client
下面是A端Socket的实现方式

//音视频通话客户端
public class SocketLive {
    private static final String TAG = "OICQ";
    private WebSocket webSocket;
    private SocketCallback socketCallback;

    public SocketLive(SocketCallback socketCallback) {
        this.socketCallback = socketCallback;
    }

    public void start() {
        webSocketServer.start();
    }

    public void close() {
        try {
            webSocket.close();
            webSocketServer.stop();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

    }

    private WebSocketServer webSocketServer = new WebSocketServer(new InetSocketAddress(40004)) {
        @Override
        public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
            SocketLive.this.webSocket = webSocket;
        }

        @Override
        public void onClose(WebSocket webSocket, int i, String s, boolean b) {
            Log.i(TAG, "onClose: 关闭 socket ");
        }

        @Override
        public void onMessage(WebSocket webSocket, String s) {

        }

        @Override
        public void onMessage(WebSocket conn, ByteBuffer bytes) {
            Log.i(TAG, "消息长度  : " + bytes.remaining());
            byte[] buf = new byte[bytes.remaining()];
            bytes.get(buf);
            socketCallback.callBack(buf);
        }

        @Override
        public void onError(WebSocket webSocket, Exception e) {
            Log.i(TAG, "onError:  " + e.toString());
        }

        @Override
        public void onStart() {

        }
    };

    public void sendData(byte[] bytes) {
        if (webSocket != null && webSocket.isOpen()) {
            webSocket.send(bytes);
        }
    }

    public interface SocketCallback {
        void callBack(byte[] data);
    }
}

剩下的事情就是通过MediaCodec进行数据的处理了,代码就不详细贴了,大家可以看整个工程代码链接:
ChatOnlineApplication.zip

  • 0
    点赞
  • 15
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值