因为对H265不熟,在写屏幕共享时踩了很多坑,无端浪费巨量时间,在此记录一下。
注意事项:
- 5.0后,可以采用异步方式得到编码结果;
- 解码器必须接收到两个csd才会真正开始解码,所以发送数据时必须在关键帧写入这两个数据;
- KEY_I_FRAME_INTERVAL 的值如果为整数1,是没有关键帧的(我也不知道原因),这也是一个坑;
- MediaCode 编解码,视频的宽高不能超过硬件能支持的最大值,这个最大值据说在一个文件里有,但我硬是没找着。
编码器
public void initEncoder() {
try {
//h265协议,这里设置的宽高不能超过硬件能支持的最大值
MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, mImageWidth, mImageHeight);
//数据来源于Surface
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(KEY_BIT_RATE, mImageWidth * mImageHeight);
format.setInteger(KEY_FRAME_RATE, 20);
format.setFloat(KEY_I_FRAME_INTERVAL, 0.9F); // 此处设置了0.9秒一次关键帧
encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
encoder.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
if (info.flags==MediaCodec.BUFFER_FLAG_KEY_FRAME){
MediaFormat outputFormat = codec.getOutputFormat();
ByteBuffer csd0 = outputFormat.getByteBuffer("csd-0"); // SPS
ByteBuffer csd1 = outputFormat.getByteBuffer("csd-1"); // PPS
/* 然后直接写入传输流 */
if (csd0!=null){
byte[] buffer = new byte[csd0.remaining()];
csd0.get(buffer);
mOnScreenShotListener.onShotFinish(buffer);
}
if (csd1!=null){
byte[] buffer=new byte[csd1.remaining()];
csd1.get(buffer);
mOnScreenShotListener.onShotFinish(buffer);
}
}
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
if (outputBuffer != null && info.size > 0) {
byte[] buffer = new byte[outputBuffer.remaining()];
outputBuffer.get(buffer);
if (mOnScreenShotListener != null) {
// 发送
mOnScreenShotListener.onShotFinish(buffer);
}
codec.releaseOutputBuffer(index, false);
}
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {}
});
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IOException e) {
e.printStackTrace();
}
}
解码器
// 继承了Thread 类
@Override
public void start() {
decoder.start();
running = true;
socketClient = new MWebSocketClient(uri, new MWebSocketClient.CallBack() {
@Override
public void onClientStatus(boolean isConnected) {
if (isConnected){
Action action = new Action("receive ask",targetId);
socketClient.send(new Gson().toJson(action));
}
}
@Override
public void onByteReceived(byte[] buff) {
write(buff); // 接收到数据
}
});
socketClient.setConnectionLostTimeout(5000);
socketClient.connect();
super.start();
}
@Override
public void run() {
while (running) {
// Get output buffer index
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 100);//dequeue一块已经存好数据[a步queueInputBuffer的数据]的 输出buffer索引
while (outputBufferIndex >= 0) {
decoder.releaseOutputBuffer(outputBufferIndex, true);//将数据在surface上渲染[surfaceview上显示]
outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);//不断dequeue,以备渲染
}
}
}
//这个Surface是自己布局文件中定义的
private void initDecoder(Surface surface) {
try {
final MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_HEVC, RuntimeEnv.DEFAULT_WIDTH, RuntimeEnv.DEFAULT_HEIGHT);
format.setInteger(MediaFormat.KEY_BIT_RATE, RuntimeEnv.DEFAULT_WIDTH * RuntimeEnv.DEFAULT_HEIGHT);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
decoder = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC);
decoder.configure(format,
surface,
null, 0);
} catch (IOException e) {
e.printStackTrace();
}
}
private void write(byte[] data) {
// Log.i(TAG, "接收到消息: " + data.length);
int index = decoder.dequeueInputBuffer(100000);
//index小于0的时候,dsp芯片还没有解码完,是提醒应用层需要等待一会
if (index >= 0) {
// Log.i(TAG, "写入: " + Arrays.toString(data));
ByteBuffer inputBuffer = decoder.getInputBuffer(index);
inputBuffer.clear();
inputBuffer.put(data, 0, data.length);
decoder.queueInputBuffer(index,
0, data.length, System.currentTimeMillis(), 0);
}
}