初始化MediaCodec:
解码器类型(mime)例 "video/avc“,视频宽度(width),视频高度(height),最大数据输入大小(max_input_size),csd-0 头信息,csd-1 头信息。
mediaFormat = MediaFormat.createVideoFormat(mime, width, height);
mediaFormat.setInteger(MediaFormat.KEY_WIDTH, width);
mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, height);
mediaFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
mediaFormat.setByteBuffer(“csd-0”, ByteBuffer.wrap(csd0)); //avCodecContext->extradata
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(csd1)); //avCodecContext->extradata
MediaCodec开始解码:
int inputBufferIndex = mediaCodec.dequeueInputBuffer(10);
if(inputBufferIndex >= 0)
{
ByteBuffer byteBuffer = mediaCodec.getInputBuffers()[inputBufferIndex];
byteBuffer.clear();
byteBuffer.put(bytes);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, pts, 0);
}
int index = mediaCodec.dequeueOutputBuffer(info, 10);
while (index >= 0) {
mediaCodec.releaseOutputBuffer(index, true);
index = mediaCodec.dequeueOutputBuffer(info, 10);
}
size:avPacket->size, bytes :avPacket->data
在 WlPlayer 写c++的回调方法 此方法初始化MediaFormat 和 MediaCodec
//初始化
private MediaFormat mediaFormat;
private MediaCodec mediaCodec;
private Surface surfacel;
public void initMediaCodec(String codecName, int width, int height, byte[] csd_0, byte[] csd_1) {
if (surface != null) {
wlGLSurfaceView.getWlRender().setRenderType(WlRender.RENDER_MEDIACODEC);
String mime = WlVideoSupportUtil.findVideoCodecName(codecName);
mediaFormat = MediaFormat.createVideoFormat(mime, width, height);
mediaFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, width * height);
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(csd_0)); //avCodecContext->extradata
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(csd_1)); //avCodecContext->extradata
MyLog.d(mediaFormat.toString());
try {
mediaCodec = MediaCodec.createDecoderByType(mime);
info = new MediaCodec.BufferInfo();
mediaCodec.configure(mediaFormat, surface, null, 0);
mediaCodec.start();
} catch (IOException e) {
e.printStackTrace();
}
} else {
if (wlOnErrorListener != null) {
wlOnErrorListener.onError(2001, "surface is null");
}
}
}
添加c++调用函数
在 WlCallJava.h 中声明 jmethodID 和 声明函数头
void onCallInitMediaCodec(char *mime, int width, int height, int csd0_size, int csd1_size,
uint8_t *csd_0, uint8_t *csd_1);
在 WlCallJava.cpp 实现
void WlCallJava::onCallInitMediaCodec(char *mime, int width, int height, int csd0_size, int csd1_size,
uint8_t *csd_0, uint8_t *csd_1) {
JNIEnv *jniEnv;
if (javaVM->AttachCurrentThread(&jniEnv, 0) != JNI_OK) {
if (LOG_DEBUG) {
LOGE("call onCallInitMediaCodec error");
}
return;
}
//uint8_t * 转 jbyteArray byte[]
jbyteArray csd0 = jniEnv->NewByteArray(csd0_size);
//填充
jniEnv->SetByteArrayRegion(csd0, 0, csd0_size, reinterpret_cast<const jbyte *>(csd_0));
jbyteArray csd1 = jniEnv->NewByteArray(csd1_size);
jniEnv->SetByteArrayRegion(csd1, 0, csd1_size, reinterpret_cast<const jbyte *>(csd_1));
//char* 转 jstring
jstring type = jniEnv->NewStringUTF(mime);
jniEnv->CallVoidMethod(jobj, jmid_initmediacodec, type, width, height, csd0, csd1);
//资源回收
jniEnv->DeleteLocalRef(csd0);
jniEnv->DeleteLocalRef(csd1);
jniEnv->DeleteLocalRef(type);
javaVM->DetachCurrentThread();
}
在 WlFFmpeg.cpp void WlFFmpeg::start() 中
end:
//强行硬解码
// supportMediaCodec = false;
if (supportMediaCodec) {
video->codectype = CODEC_MEDIACODEC;
video->wlCallJava->onCallInitMediaCodec(
codecName,
video->avCodecContext->width,
video->avCodecContext->height,
video->avCodecContext->extradata_size,
video->avCodecContext->extradata_size,
video->avCodecContext->extradata,
video->avCodecContext->extradata);
}
在 WlPlayer 中 新建c++调用解码函数
private Surface surfacel;
private MediaCodec.BufferInfo info;
public void decodeAvpacket(int dataSize, byte[] data) {
if (surface != null && dataSize > 0 && data != null && mediaCodec != null) {
int inputBufferIndex = mediaCodec.dequeueInputBuffer(10);
if (inputBufferIndex >= 0) {
ByteBuffer byteBuffer = mediaCodec.getInputBuffers()[inputBufferIndex];
byteBuffer.clear();
byteBuffer.put(data);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, dataSize, 0, 0);
}
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(info, 10);
while (outputBufferIndex >= 0) {
mediaCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(info, 10);
}
}
}
为上述函数 decodeAvpacket 创建c++回调
WlCallJava.h
jmethodID jmid_decodeavpacket;
void onCallDecodeAVPacket(int datasize, uint8_t *data);
WlCallJava.cpp
jmid_decodeavpacket = env->GetMethodID(jlz, "decodeAvpacket", "(I[B)V");
void WlCallJava::onCallDecodeAVPacket(int datasize, uint8_t *data) {
JNIEnv *jniEnv;
if (javaVM->AttachCurrentThread(&jniEnv, 0) != JNI_OK) {
if (LOG_DEBUG) {
LOGE("call onCallDecodeAVPacket error");
}
return;
}
jbyteArray jdata = jniEnv->NewByteArray(datasize);
jniEnv->SetByteArrayRegion(jdata, 0, datasize, reinterpret_cast<const jbyte *>(data));
jniEnv->CallVoidMethod(jobj, jmid_decodeavpacket, datasize, jdata);
//资源回收
jniEnv->DeleteLocalRef(jdata);
javaVM->DetachCurrentThread();
}
修改一下音视频同步的两个方法
WlVideo.h
double getFrameDiffTime(AVFrame *avFrame, AVPacket *avPacket);
WlVideo.cpp
double WlVideo::getFrameDiffTime(AVFrame *avFrame, AVPacket *avPacket) {
double pts = 0;
if (avFrame != NULL) {
pts = av_frame_get_best_effort_timestamp(avFrame);
} else if (avPacket != NULL) {
pts = avPacket->pts;
}
//返回当前avFrame的显示时间戳
if (pts == AV_NOPTS_VALUE) {
pts = 0;
}
pts *= av_q2d(time_base);
if (pts > 0) {
clock = pts;
}
double diff = audio->clock - clock;
return diff;
}
在 WlVideo.cpp 调用 void *playVideo(void *data)
while (av_bsf_receive_packet(video->abs_ctx, avPacket) == 0) {
//渲染
LOGE("开始硬解码");
double diff = video->getFrameDiffTime(NULL, avPacket);
av_usleep(video->getDelayTime(diff) * 1000 * 1000);
//调用java层的decodeAvpacket
video->wlCallJava->onCallDecodeAVPacket(avPacket->size, avPacket->data);
av_packet_free(&avPacket);
av_free(avPacket);
continue;
}