投屏
1,投屏能不能不编码,直接发送yuv?当然不行,投屏的文件很大的。
2,从一个设备投送到另一个设备,数据很大
3,投屏实现原理
左边是推流,右边是拉流。投屏以屏幕录制为主。有投屏层,编码层,传输层
h264推流和拉流
投屏有三种传输方式
推流
package com.maniu.h265maniupush;
import static android.media.MediaFormat.KEY_BIT_RATE;
import static android.media.MediaFormat.KEY_FRAME_RATE;
import static android.media.MediaFormat.KEY_I_FRAME_INTERVAL;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.projection.MediaProjection;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class CodecLiveH264 extends Thread {
// 录屏
private MediaProjection mediaProjection;
private MediaCodec mediaCodec;
private int width = 720;
private int height = 1280;
private byte[] sps_pps_buf;
VirtualDisplay virtualDisplay;
public static final int NAL_I = 5;
public static final int NAL_SPS = 7;
private SocketLive socketLive;
public CodecLiveH264(SocketLive socketLive, MediaProjection mediaProjection) {
this.mediaProjection = mediaProjection;
this.socketLive = socketLive;
}
public void startLive() {
try {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(KEY_BIT_RATE, width * height);
format.setInteger(KEY_FRAME_RATE, 20);
format.setInteger(KEY_I_FRAME_INTERVAL, 1);
mediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = mediaCodec.createInputSurface();
//创建场地
virtualDisplay = mediaProjection.createVirtualDisplay(
"-display",
width, height, 1,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface,
null, null);
} catch (IOException e) {
e.printStackTrace();
}
start();
}
@Override
public void run() {
mediaCodec.start();
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (true) {
try {
int outputBufferId = mediaCodec.dequeueOutputBuffer(bufferInfo,
10000);
if (outputBufferId >= 0) {
ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outputBufferId);
dealFrame(byteBuffer, bufferInfo);
mediaCodec.releaseOutputBuffer(outputBufferId, false);
}
} catch (Exception e) {
e.printStackTrace();
break;
}
}
}
private void dealFrame(ByteBuffer bb, MediaCodec.BufferInfo bufferInfo) {
//0x67 0是否可用 1-2 重要性 后5 位/帧类型
int offset = 4;//00 00 00 01
if (bb.get(2) == 0x01) {
offset = 3;//00 00 01
}
int type = (bb.get(offset) & 0x1F);//与0x1f相与
// sps/pps只会输出一份,非常宝贵,必须缓存
if (type == NAL_SPS) {//7代表sps.第一帧会走这里
sps_pps_buf = new byte[bufferInfo.size];
bb.get(sps_pps_buf);//不做操作,I帧发出前再输出sps
} else if (NAL_I == type) {//第二帧I帧等于5,第二帧会走这里
final byte[] bytes = new byte[bufferInfo.size];
bb.get(bytes);//I帧数据大小为45459
byte[] newBuf = new byte[sps_pps_buf.length + bytes.length];//sps pps长度+I帧的长度
System.arraycopy(sps_pps_buf, 0, newBuf, 0, sps_pps_buf.length);
System.arraycopy(bytes, 0, newBuf, sps_pps_buf.length, bytes.length);
socketLive.sendData(newBuf);//newBuf就有了sps pps 和I帧的数据,发送走网络只能走socket
}else {//p帧和b帧
final byte[] bytes = new byte[bufferInfo.size];
bb.get(bytes);
this.socketLive.sendData(bytes);
Log.v("david", "视频数据 " + Arrays.toString(bytes));
}
}
}
拉流
package com.maniu.h265maniutoupin;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
public class H264Player implements SocketLive.SocketCallback {
private static final String TAG = "H264Player";
private MediaCodec mediaCodec;
public H264Player(Surface surface) {
try {
mediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
final MediaFormat format = MediaFormat.
createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 720, 1280);
format.setInteger(MediaFormat.KEY_BIT_RATE, 720 * 1280);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mediaCodec.configure(format,
surface,
null, 0);
mediaCodec.start();
} catch (
IOException e) {
e.printStackTrace();
}
}
// 收到消息,处理消息
//
@Override
public void callBack(byte[] data) {
Log.i(TAG, "解码器前长度 : " + data.length);
//跟读取文件不一样,读取文件需要不断读取分隔符,把分隔符的数据推送到解码器进行解码。
//我们收到的data,就是一帧数据,
int index = mediaCodec.dequeueInputBuffer(100000);//可用容器的索引
if (index >= 0) {
//拿到可用的容器
ByteBuffer inputBuffer = mediaCodec.getInputBuffer(index);
inputBuffer.clear();
//数据放到容器中
inputBuffer.put(data, 0, data.length);
//dsp芯片帮我们解码
mediaCodec.queueInputBuffer(index,
0, data.length, System.currentTimeMillis(), 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 100000);
Log.i(TAG, "解码器后长度 : " + bufferInfo.size);
while (outputBufferIndex >= 0) {//这里最好不写if语句,我们输入100k,输出的大小可能大于100k。
mediaCodec.releaseOutputBuffer(outputBufferIndex, true);
//输入一个可能输出5个
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
}
输入前的长度是不一样的,如果是i帧会很大
解码后长度一样。
假如传输的时候I P B 帧大小不一样,但解码后,就无所谓ipb帧了,大小就一样了。解码后只跟宽高编码有关 :计算公式为:长x宽x1.5(yuv计算公式)
我们计算下:720x1280x1.5=1382400
y的范围是0-255,一个字节存储