上一篇说了如何发送的,那么这边就看下接收端的代码。
接受端主要是接受到数据后,解码,然后在SufaceView上预览。
这个比较简单,我就直接贴代码了。
主要参考了这篇博客:android硬编码h264数据,并使用rtp推送数据流,实现一个简单的直播-MediaCodec(二)
public class ClientTextureView extends TextureView implements TextureView.SurfaceTextureListener{
private static final String MIME_TYPE = "video/avc";
private static final String TAG = "ClientTextureView" ;
private static final int PORT = 5004;
private DatagramSocket mSocket;
private MediaCodec mDecode;
private byte[] mRtpData = new byte[80000];
private byte[] mH264Data = new byte[80000];
public ClientTextureView(Context context, AttributeSet attrs) {
super(context, attrs);
setSurfaceTextureListener(this);
try {
mSocket = new DatagramSocket(PORT);
mSocket.setReuseAddress(true);
mSocket.setBroadcast(true);
} catch (SocketException e) {
e.printStackTrace();
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.i(TAG, "SurfaceTexture width " + width + "; height" + height);
new PreviewThread(new Surface(surface), width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (mSocket != null){
mSocket.close();
mSocket = null;
}
return false;
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { }
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) { }
private class PreviewThread extends Thread {
DatagramPacket datagramPacket = null;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
public PreviewThread(Surface surface, int width , int height){
Log.i(TAG, "PreviewThread surface width " + width + "; height" + height);
try {
mDecode = MediaCodec.createDecoderByType(MIME_TYPE);
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, 40000);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
byte[] header_sps = {0, 0, 0, 1, 103, 66, 0 , 41, -115, -115, 64, 80 , 30 , -48 , 15 ,8,-124, 83, -128};
byte[] header_pps = {0, 0, 0, 1, 104, -54, 67, -56};
format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
mDecode.configure(format, surface, null, 0);
mDecode.start();
} catch (IOException e) {
e.printStackTrace();
}
start();
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
byte[] data = new byte[80000];
int h264Length = 0;
while (true){
if (mSocket != null){
try {
datagramPacket = new DatagramPacket(data, data.length);
mSocket.receive(datagramPacket);
} catch (IOException e) {
e.printStackTrace();
}
}
mRtpData = datagramPacket.getData();
if (mRtpData != null ){
if (mRtpData[0] == -128 && mRtpData[1] == 96){
int l1 = (mRtpData[12] << 24) & 0xff000000;
int l2 = (mRtpData[13] << 16) & 0x00ff0000;
int l3 = (mRtpData[14] << 8) & 0x0000ff00;
int l4 = mRtpData[15] & 0x000000FF;
h264Length = l1 + l2 + l3 + l4;
Log.i(TAG, "run: h264Length="+h264Length);
System.arraycopy(mRtpData,16, mH264Data,0,h264Length);
Log.i(TAG, "run:h264Data[0]="+mH264Data[0]+","+mH264Data[1]+","+mH264Data[2]+","+mH264Data[3]
+","+mH264Data[4]+","+mH264Data[5]+","+mH264Data[6]+","+mH264Data[7]
+","+mH264Data[8]+","+mH264Data[9]+","+mH264Data[10]
+","+mH264Data[11]+","+mH264Data[12]+","+mH264Data[13]
+","+mH264Data[14]+","+mH264Data[15]+","+mH264Data[16]
+","+mH264Data[17]+","+mH264Data[18]+","+mH264Data[19]
+","+mH264Data[20]+","+mH264Data[21]+","+mH264Data[22]);
offerDecoder(mH264Data,mH264Data.length);
}
}
}
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void offerDecoder(byte[] input, int length) {
Log.d(TAG, "offerDecoder: ");
try {
ByteBuffer[] inputBuffers = mDecode.getInputBuffers();
int inputBufferIndex = mDecode.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
try{
inputBuffer.put(input, 0, length);
}catch (Exception e){
e.printStackTrace();
}
mDecode.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
//If a valid surface was specified when configuring the codec,
//passing true renders this output buffer to the surface.
mDecode.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
MainActivity
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
}
AndroidManifest
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.byd.rtpclientdemo">
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.RECORD_VIDEO"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-feature android:name="android.hardware.wifi" android:required="true" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
这篇的代码跟参考的差不多,我就不上传到GitHub了,大家可以参考上面的那片。如需要上传的话,欢迎留言。
在上一篇说了要分包发送,那么在这里就要有对应的协议解包。代码如下:
package com.byd.rtpclientdemo;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.TextureView;
import androidx.annotation.RequiresApi;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
import java.nio.ByteBuffer;
public class ClientTextureView extends TextureView implements TextureView.SurfaceTextureListener {
private static final String MIME_TYPE = "video/avc";
private static final String TAG = "ClientTextureView";
private byte[] mH264Data = new byte[200000];
private static final int PORT = 5004;
private int mHeight = 1080;
private int mWidth = 1920;
private DatagramSocket mSocket;
private MediaCodec mDecode;
public ClientTextureView(Context context, AttributeSet attrs) {
super(context, attrs);
setSurfaceTextureListener(this);
try {
mSocket = new DatagramSocket(PORT);
mSocket.setReuseAddress(true);
mSocket.setBroadcast(true);
} catch (SocketException e) {
e.printStackTrace();
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
new PreviewThread(new Surface(surface));
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (mSocket != null) {
mSocket.close();
mSocket = null;
}
return false;
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
private class PreviewThread extends Thread {
DatagramPacket datagramPacket = null;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
public PreviewThread(Surface surface) {
try {
mDecode = MediaCodec.createDecoderByType(MIME_TYPE);
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
format.setInteger(MediaFormat.KEY_BIT_RATE, 1900000);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
byte[] header_sps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
byte[] header_pps = {0, 0, 0, 1, 104, -54, 67, -56};
format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
mDecode.configure(format, surface, null, 0);
mDecode.start();
} catch (IOException e) {
e.printStackTrace();
}
start();
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
boolean isFiset = true;
int pre_seq_num = 0;
int destPos = 0;
int h264Length;
//下面便是解协议的操作,对应的是发送方分包发送。
while (true) {
if (mSocket != null) {
try {
byte[] data = new byte[1500];
datagramPacket = new DatagramPacket(data, data.length);
mSocket.receive(datagramPacket);
} catch (IOException e) {
e.printStackTrace();
}
}
//接受到的一次数据
byte[] rtpData = datagramPacket.getData();
if (rtpData != null) {
//取出每次发送的数据的大小
int l3 = (rtpData[12] << 24) & 0xff000000;
int l4 = (rtpData[13] << 16) & 0x00ff0000;
int l5 = (rtpData[14] << 8) & 0x0000ff00;
int l6 = rtpData[15] & 0x000000FF;
h264Length = l3 + l4 + l5 + l6;
Log.i(TAG, "run: h264Length = " + h264Length);
//获取序列号
byte[] snm = new byte[2];
System.arraycopy(rtpData, 2, snm, 0, 2);
int seq_num = CalculateUtil.byte2short(snm);
Log.i(TAG, "seq_num = " + seq_num);
//获取时间戳
int timeStamp1 = (rtpData[4] << 24) & 0xff000000;
int timeStamp2 = (rtpData[5] << 16) & 0x00ff0000;
int timeStamp3 = (rtpData[6] << 8) & 0x0000ff00;
int timeStamp4 = rtpData[7] & 0x000000FF;
int timeStamp = timeStamp1 + timeStamp2 + timeStamp3 + timeStamp4;
Log.i(TAG, "timeStamp = " + timeStamp);
//这个地方是我之前调试用的,就是看看丢包率是多少。
if (isFiset) {
pre_seq_num = seq_num;
isFiset = false;
} else {
if (seq_num - pre_seq_num > 1) {
Log.i(TAG, "Packet loss" + (seq_num - pre_seq_num));
} else if (seq_num - pre_seq_num < 1) {
Log.i(TAG, "Out of order packets" + (seq_num - pre_seq_num));
}
pre_seq_num = seq_num;
}
//此值代表是否分包。
byte indicatorType = (byte) (CalculateUtil.byteToInt(rtpData[16]) & 0x1f);
Log.i(TAG, "indicatorType = " + indicatorType);
//分包
if (indicatorType == 28) {
byte s = (byte) (rtpData[17] & 0x80);
byte e = (byte) (rtpData[17] & 0x40);
Log.i(TAG, "s = " + s + "; e = " + e);
if (s == -128) { // 一帧的第一包
System.arraycopy(rtpData, 18, mH264Data, destPos, h264Length);
destPos += h264Length;
} else if (e == 64) { // 一帧的最后一包
System.arraycopy(rtpData, 18, mH264Data, destPos, h264Length);
destPos = 0;
offerDecoder(mH264Data, mH264Data.length);
CalculateUtil.memset(mH264Data, 0, mH264Data.length);
} else { //一帧的中间的包
System.arraycopy(rtpData, 18, mH264Data, destPos, h264Length);
destPos += h264Length;
}
} else { //不分包
System.arraycopy(rtpData, 16, mH264Data, 0, h264Length);
offerDecoder(mH264Data, mH264Data.length);
CalculateUtil.memset(mH264Data, 0, mH264Data.length);
}
}
}
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private void offerDecoder(byte[] input, int length) {
Log.d(TAG, "offerDecoder");
try {
ByteBuffer[] inputBuffers = mDecode.getInputBuffers();
int inputBufferIndex = mDecode.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
try {
inputBuffer.put(input, 0, length);
} catch (Exception e) {
e.printStackTrace();
}
mDecode.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
//If a valid surface was specified when configuring the codec,
//passing true renders this output buffer to the surface.
mDecode.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}