android使用RTP传输Camera数据

最近公司要做一个一对一的视频传输的项目,要求使用Camera2API传输H264格式的数据。我们这篇文章主要是说怎么实现的,原理部分我会附上其他链接。

源码地址

那么来看下发送端的主要逻辑。

1.获取Camera数据
2.将获取到的Camera数据转换成YUV420SP格式
3.使用MediaCodec硬编码成H264数据
4.使用RTP协议跟Socket发送Camera数据

1.获取Camera数据

原理理解的部分可参考:ImageReader获得预览数据
下面就是我所实现的,直接贴代码了。

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public class Camera2Helper {
    private static final String TAG = "Camera2Helper";
    private Context mContext;
    private ImageReader mImageReader;
    private HandlerThread mBackgroundThread;
    private Handler mBackgroundHandler;
    private CameraDevice mCameraDevice;
    private CaptureRequest.Builder mPreviewRequestBuilder;
    private CameraCaptureSession mCaptureSession;
    private ImageDataListener mImageDataListener;
    private String mCameraId = "0";
    private Semaphore mCameraOpenCloseLock = new Semaphore(1);

    private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback(){
        @Override
        public void onOpened(@NonNull CameraDevice camera) {
            Log.i(TAG, "onOpened");
            mCameraOpenCloseLock.release();
            mCameraDevice = camera;
            createCameraPreviewSession();
        }

        @Override
        public void onDisconnected(@NonNull CameraDevice camera) {
            Log.i(TAG, "onDisconnected");
            mCameraOpenCloseLock.release();
            camera.close();
            mCameraDevice = null;
        }

        @Override
        public void onError(@NonNull CameraDevice camera, int error) {
            Log.e(TAG, "onError openDevice error:" + error);
            mCameraOpenCloseLock.release();
            camera.close();
            mCameraDevice = null;
        }
    };

    public Camera2Helper(Context context) {
        this.mContext = context;
    }

    @SuppressLint("MissingPermission")
    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void startCamera(int width, int height) {
        Log.i(TAG, "start Camera.");
        startBackgroundThread();
        setUpCameraOutputs(width, height);
        CameraManager cameraManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
        try {
            cameraManager.openCamera(mCameraId, mStateCallback, mBackgroundHandler);
        } catch (CameraAccessException e) {
            Log.e(TAG, "startCamera error: " + e.getMessage());
        }
    }

    public void playCamera() {
        Log.i(TAG, "pauseCamera");
        try {
            mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    public void pauseCamera() {
        Log.i(TAG, "pauseCamera");
        try {
            mCaptureSession.stopRepeating();
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    public void closeCamera() {
        Log.i(TAG, "closeCamera");
        try {
            mCameraOpenCloseLock.acquire();
            if (mCaptureSession != null) {
                mCaptureSession.close();
                mCaptureSession = null;
            }

            if (mCameraDevice != null) {
                mCameraDevice.close();
                mCameraDevice = null;
            }

            if (mImageReader != null) {
                mImageReader.close();
                mImageReader = null;
            }
        } catch (InterruptedException e) {
            throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
        } finally {
            mCameraOpenCloseLock.release();
        }
    }

    /**
     * Creates a new {@link CameraCaptureSession} for camera preview.
     */
    private void createCameraPreviewSession() {
        try {
            Surface imageSurface = mImageReader.getSurface();
            // We set up a CaptureRequest.Builder with the output Surface.
            mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            mPreviewRequestBuilder.addTarget(imageSurface);

            mCameraDevice.createCaptureSession(Arrays.asList(imageSurface), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession session) {
                    Log.i(TAG, "onConfigured");
                    // The camera is already closed
                    if (null == mCameraDevice) {
                        return;
                    }
                    mCaptureSession = session;
                    mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                    try {
                        mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), new CameraCaptureSession.CaptureCallback() {
                            @Override
                            public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
                                super.onCaptureStarted(session, request, timestamp, frameNumber);
                            }
                        }, mBackgroundHandler);
                    } catch (CameraAccessException e) {
                        e.printStackTrace();
                    }
                }

                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                    Log.i(TAG, "onConfigureFailed");
                }
            },null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private void setUpCameraOutputs(int width, int height) {
        Log.i(TAG, "setUpCameraOutputs start");
        mImageReader = ImageReader.newInstance(width, height, ImageFormat.YUV_420_888, /*maxImages*/2);
        mImageReader.setOnImageAvailableListener(new RTPOnImageAvailableListener(), mBackgroundHandler);
        return;
    }

    /**
     * Starts a background thread and its {@link Handler}.
     */
    private void startBackgroundThread() {
        mBackgroundThread = new HandlerThread("CameraBackground");
        mBackgroundThread.start();
        mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
    }

    private class RTPOnImageAvailableListener implements ImageReader.OnImageAvailableListener{

        @Override
        public void onImageAvailable(ImageReader reader) {
            Log.i(TAG, "onImageAvailable");
            Image readImage = reader.acquireNextImage();
            byte[] data = ImageUtil.getBytesFromImageAsType(readImage, 1);
            //byte[] data = ImageUtil.getBytesFromImageAsType(readImage);
            //byte[] rotateData = ImageUtil.rotateYUVDegree90(data, readImage.getWidth(), readImage.getHeight());
            readImage.close();
            /**if (rotateData == null) {
                Log.e(TAG, "The rotated data is null.");
            }*/
            mImageDataListener.OnImageDataListener(data);
        }
    }

    public void setImageDataListener(ImageDataListener listener) {
        this.mImageDataListener = listener;
    }

    public static interface ImageDataListener{
        public void OnImageDataListener(byte[] reader);
    }
}

2.将获取到的Camera数据转换成YUV420SP格式

原理部分参考:Android: Image类浅析(结合YUV_420_888)
代码:

public static byte[] getBytesFromImageAsType(Image image, int type) {
        try {
            //Get the source data, if it is YUV format data planes.length = 3
            final Image.Plane[] planes = image.getPlanes();

            //Data effective width, in general, image width <= rowStride, which is also the reason for byte []. Length <= capacity
            // So we only take the width part
            int width = image.getWidth();
            int height = image.getHeight();
            Log.i(TAG, "image width = " + image.getWidth() + "; image height = " + image.getHeight());

            //This is used to fill the final YUV data, which requires 1.5 times the picture size, because the YUV ratio is 4: 1: 1
            byte[] yuvBytes = new byte[width * height * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8];
            //The position to which the target array is filled
            int dstIndex = 0;

            //Temporary storage of uv data
            byte uBytes[] = new byte[width * height / 4];
            byte vBytes[] = new byte[width * height / 4];
            int uIndex = 0;
            int vIndex = 0;

            int pixelsStride, rowStride;
            for (int i = 0; i < planes.length; i++) {
                pixelsStride = planes[i].getPixelStride();
                rowStride = planes[i].getRowStride();

                ByteBuffer buffer = planes[i].getBuffer();

                //The index of the source data. The data of y is continuous in byte. The data of u is shifted to the left. It is assumed that both are even-numbered bits.
                byte[] bytes = new byte[buffer.capacity()];
                buffer.get(bytes);

                int srcIndex = 0;
                if (i == 0) {
                    //Take out all the valid areas of Y directly, or store them as a temporary byte, and then copy it to the next step.
                    for (int j = 0; j < height; j++) {
                        System.arraycopy(bytes, srcIndex, yuvBytes, dstIndex, width);
                        srcIndex += rowStride;
                        dstIndex += width;
                    }
                } else if (i == 1) {
                    //Take corresponding data according to pixelsStride
                    for (int j = 0; j < height / 2; j++) {
                        for (int k = 0; k < width / 2; k++) {
                            uBytes[uIndex++] = bytes[srcIndex];
                            srcIndex += pixelsStride;
                        }
                        if (pixelsStride == 2) {
                            srcIndex += rowStride - width;
                        } else if (pixelsStride == 1) {
                            srcIndex += rowStride - width / 2;
                        }
                    }
                } else if (i == 2) {
                    //Take corresponding data according to pixelsStride
                    for (int j = 0; j < height / 2; j++) {
                        for (int k = 0; k < width / 2; k++) {
                            vBytes[vIndex++] = bytes[srcIndex];
                            srcIndex += pixelsStride;
                        }
                        if (pixelsStride == 2) {
                            srcIndex += rowStride - width;
                        } else if (pixelsStride == 1) {
                            srcIndex += rowStride - width / 2;
                        }
                    }
                }
            }
            //Fill based on required result type
            switch (type) {
                case YUV420P:
                    System.arraycopy(uBytes, 0, yuvBytes, dstIndex, uBytes.length);
                    System.arraycopy(vBytes, 0, yuvBytes, dstIndex + uBytes.length, vBytes.length);
                    break;
                case YUV420SP:
                    for (int i = 0; i < vBytes.length; i++) {
                        yuvBytes[dstIndex++] = uBytes[i];
                        yuvBytes[dstIndex++] = vBytes[i];
                    }
                    break;
                case NV21:
                    for (int i = 0; i < vBytes.length; i++) {
                        yuvBytes[dstIndex++] = vBytes[i];
                        yuvBytes[dstIndex++] = uBytes[i];
                    }
                    break;
            }
            return yuvBytes;
        } catch (final Exception e) {
            if (image != null) {
                image.close();
            }
            Log.e(TAG, e.toString());
        }
        return null;
    }

3.使用MediaCodec硬编码成H264数据

原理部分参考:Android MediaCodec 使用说明
代码:

public class AvcEncoder {
	private static final String TAG = "AvcEncoder";
	private static final String MIME_TYPE = "video/avc";
	private MediaCodec mMediaCodec;
	private int mWidth;
	private int mHeight;
	private byte[] mInfo = null;

	@SuppressLint("NewApi")
	public AvcEncoder(int width, int height, int framerate, int bitrate) {
		mWidth  = width;
		mHeight = height;
		Log.i(TAG, "AvcEncoder:" + mWidth + "+" + mHeight);
		try {
			mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
			MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
			mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
			mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
			mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
			mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

			mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
			mMediaCodec.start();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@SuppressLint("NewApi")
	public int offerEncoder(byte[] input, byte[] output) {
		Log.i(TAG, "offerEncoder:"+input.length+"+"+output.length);
		int pos = 0;
	    try {
	        ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
	        ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
	        int inputBufferIndex = mMediaCodec.dequeueInputBuffer(-1);
	        if (inputBufferIndex >= 0) {
	            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
	            inputBuffer.clear();
	            inputBuffer.put(input);
	            mMediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
	        }

	        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
	        int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo,0);
	        while (outputBufferIndex >= 0) {
	            ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
	            byte[] outData = new byte[bufferInfo.size];
	            outputBuffer.get(outData);

	            if(mInfo != null){
	            	System.arraycopy(outData, 0,  output, pos, outData.length);
	 	            pos += outData.length;
	            }else{		//Save pps sps only in the first frame, save it for later use
					ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
					if (spsPpsBuffer.getInt() == 0x00000001) {
						mInfo = new byte[outData.length];
						System.arraycopy(outData, 0, mInfo, 0, outData.length);
					}else {
						return -1;
					}
	            }
	            if(output[4] == 0x65) {		//key frame When the encoder generates the key frame, there is only 00 00 00 01 65 without pps sps.
	                System.arraycopy(mInfo, 0,  output, 0, mInfo.length);
	                System.arraycopy(outData, 0,  output, mInfo.length, outData.length);
		        }
	            mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
	            outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
	        }
	    } catch (Throwable t) {
	        t.printStackTrace();
	    }
		Log.i(TAG, "offerEncoder+pos:" + pos);
	    return pos;
	}

	@SuppressLint("NewApi")
	public void close() {
		try {
			mMediaCodec.stop();
			mMediaCodec.release();
		} catch (Exception e){
			e.printStackTrace();
		}
	}
}

4.使用RTP协议跟Socket发送Camera数据

RTP协议相关原理部分,可参考下面:RTP相关
代码:

   /**
        RTP packet header
        Bit offset[b]	0-1	2	3	4-7	8	9-15	16-31
        0			Version	P	X	CC	M	PT	Sequence Number  31
        32			Timestamp									 63
        64			SSRC identifier								 95
     */
	public void addPacket(byte[] prefixData, byte[] data, int offset, int size, long timeUs) throws IOException{
		ByteBuffer buffer = ByteBuffer.allocate(500000);
		buffer.put((byte)(2 << 6));
		buffer.put((byte)(payloadType));
		buffer.putShort(sequenceNumber++);
		buffer.putInt((int)(timeUs));
		buffer.putInt(12345678);
		buffer.putInt(size);

        if (prefixData != null) {
            buffer.put(prefixData);
        }
		buffer.put(data, offset, size);
		sendPacket(buffer, buffer.position());
	}
	
	protected void sendPacket(ByteBuffer buffer, int size) throws IOException{
		socket.sendPacket(buffer.array(), 0, size);
		buffer.clear();
	}

注意:
1.这个demo没有动态申请权限,运行的时候,需要手动从设置中打开。
2.IP地址也是写死的,需要你动态获取到对方的IP。
3.我在网上看的时候,发现很多RTP协议封装数据的时候,会将一帧拆成好几个包发送,但这个demo是直接将一帧封装成包发过去的。应该是底层协议会自动拆包组包。

上面这些代码是有一个问题,没一帧的数据太大时,会导致Socket发送不出去,报出数据太长的问题。所以要解决这个问题就只能分包。下面我把分包的代码贴出来。如下:


import android.nfc.Tag;
import android.util.Log;
import com.byd.rtpserverdemo.CalculateUtil;
import java.io.IOException;
import java.nio.ByteBuffer;

public class RtpStream {
	private static final String TAG = "RtpStream";
	private static final int BUFFER_SIZE = 1500;
	private static final int MTU = 1400;
	private static final int SSRC = 1;
	private byte[] mNal = new byte[2];
	private int mTimeStamp = 0;
	private short mSequenceNumber;
	private RtpSocket mSocket;
	private int mPayloadType;
	private int mSampleRate;
	private int mFrameRate;

	public RtpStream(int pt, int sampleRate, RtpSocket socket, int frameRate) {
		this.mPayloadType = pt;
		this.mSampleRate = sampleRate;
		this.mSocket = socket;
		this.mFrameRate = frameRate;
	}

	public void addPacket(byte[] data, int offset, int size, long timeUs) throws IOException{
		addPacket(null, data, offset, size, timeUs);
	}

    /**
        RTP packet header
        Bit offset[b]	0-1	2	3	4-7	8	9-15	16-31
        0			Version	P	X	CC	M	PT	Sequence Number  31
        32			Timestamp									 63
        64			SSRC identifier								 95
     */
	public void addPacket(byte[] prefixData, byte[] data, int offset, int size, long timeUs) throws IOException{
		Log.i(TAG, "size: " + size);
		ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE);
		mTimeStamp += mSampleRate / mFrameRate;
		if (size < MTU) {	//如果一帧的数据小于1400,就不分包。否则分包
			buffer.put((byte) ((2 << 6) + (2 << 3)));
			buffer.put((byte) ((1 << 7) + (mPayloadType)));
			buffer.putShort(mSequenceNumber++);		//设置序列号
			buffer.putInt(mTimeStamp);		//设置时间戳
			buffer.putInt(SSRC);	//设置SSRC
			buffer.putInt(size);	//设置数据大小,因为RTP协议是可以扩展的,所以我这里把数据大小直接发送过去,在解协议的时候就不用去计算了。

			buffer.put(data, offset, size);
			sendPacket(buffer, buffer.position());
		} else {
			int packages = size / MTU;
			int remainingSize = size % MTU;
			if (remainingSize == 0) {
				packages = packages - 1;
			}

			for (int i = 0; i <= packages; i++) {
				CalculateUtil.memset(mNal, 0, 2);
				mNal[0] = (byte) (mNal[0] | ((byte) (data[4] & 0x80)) << 7);				//禁止位,为0
				mNal[0] = (byte) (mNal[0] | ((byte) ((data[4] & 0x60) >> 5)) << 5);		//NRI,表示包的重要性
				mNal[0] = (byte) (mNal[0] | (byte) (28));									//TYPE,表示此FU-A包为什么类型,一般此处为28

				if (i == 0) {
					//FU header,一个字节,S,E,R,TYPE
					mNal[1] = (byte) (mNal[1] & 0xBF);										//E=0,表示是否为最后一个包,是则为1
					mNal[1] = (byte) (mNal[1] & 0xDF);										//R=0,保留位,必须设置为0
					mNal[1] = (byte) (mNal[1] | 0x80);										//S=1,表示是否为第一个包,是则为1

					//判断是否为关键帧
                    if (data[4] == 97) {
                        mNal[1] = (byte) (mNal[1] | ((byte) (1 & 0x1f)));					//TYPE,即NALU头对应的TYPE
                    } else {
                        mNal[1] = (byte) (mNal[1] | ((byte) (5 & 0x1f)));
                    }
                    buffer.put((byte) ((2 << 6) + (2 << 3)));
					buffer.put((byte) (mPayloadType));
					buffer.putShort(mSequenceNumber++);
					buffer.putInt(mTimeStamp);
					buffer.putInt(SSRC);
                    buffer.putInt(MTU);
					buffer.put(mNal);

					buffer.put(data, 0, MTU);
					sendPacket(buffer, buffer.position());
				} else if (i == packages) {
					mNal[1] = (byte) (mNal[1] & 0xDF); //R=0,保留位必须设为0
					mNal[1] = (byte) (mNal[1] & 0x7F); //S=0,不是第一个包
					mNal[1] = (byte) (mNal[1] | 0x40); //E=1,是最后一个包

                    //判断是否为关键帧
                    if (data[4] == 97) {
                        mNal[1] = (byte) (mNal[1] | ((byte) (1 & 0x1f)));					//TYPE,即NALU头对应的TYPE
                    } else {
                        mNal[1] = (byte) (mNal[1] | ((byte) (5 & 0x1f)));
                    }

                    buffer.put((byte) ((2 << 6) + (2 << 3)));
                    buffer.put((byte) ((1 << 7) + (mPayloadType)));
                    buffer.putShort(mSequenceNumber++);
                    buffer.putInt(mTimeStamp);
                    buffer.putInt(SSRC);

					if (remainingSize == 0) {
						buffer.putInt(MTU);
						buffer.put(mNal);
						buffer.put(data, i * MTU, MTU);
					} else {
						buffer.putInt(remainingSize);
						buffer.put(mNal);
						buffer.put(data, i * MTU, remainingSize);
					}

					sendPacket(buffer, buffer.position());
				} else {
					mNal[1] = (byte) (mNal[1] & 0xDF); //R=0,保留位必须设为0
					mNal[1] = (byte) (mNal[1] & 0x7F); //S=0,不是第一个包
					mNal[1] = (byte) (mNal[1] & 0xBF); //E=0,不是最后一个包

                    //判断是否为关键帧
                    if (data[4] == 97) {
                        mNal[1] = (byte) (mNal[1] | ((byte) 1));					//TYPE,即NALU头对应的TYPE
                    } else if (data[26] == 101) {   //由于视频数据前有22byte的pps和sps以及4byte分隔符,因此data[26]才是判断帧类型的元素
                        Log.v(TAG, "key frame");
                        mNal[1] = (byte) (mNal[1] | ((byte) 5));
                    }

                    buffer.put((byte) ((2 << 6) + (2 << 3)));
                    buffer.put((byte) (mPayloadType));
                    buffer.putShort(mSequenceNumber++);
                    buffer.putInt(mTimeStamp);
                    buffer.putInt(SSRC);
                    buffer.putInt(MTU);
					buffer.put(mNal);

					buffer.put(data, i * MTU, MTU);
					sendPacket(buffer, buffer.position());
				}
			}
		}
	}

	protected void sendPacket(ByteBuffer buffer, int size) throws IOException{
		mSocket.sendPacket(buffer.array(), 0, size);
		buffer.clear();
	}

	protected void sendPacket(byte[] buffer, int size) throws IOException {
		mSocket.sendPacket(buffer, 0, size);
	}
}

对应的在接受端,要做解协议的操作。我在下面的文章中,把代码贴出来。Android接受RTP传输的Camera数据

参考博客:android硬编码h264数据,并使用rtp推送数据流,实现一个简单的直播-MediaCodec(一)
我的项目地址:gitHub

评论 16
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值