解码流程:
根据FFmpeg中视频解码器的名称找到对应手机硬解码器,如果存在则可以硬解码,走硬解码流程;不存在就只能走软解码流程。
硬解码:
使用MediaCodec直接解码AVpacket,此时需要对AVPacket进行格式过滤,然后MediaCodec解码后的数据用OpenGL ES渲染出来。
检测视频是否能被硬解码:
FFmpeg视频解码器名称获取 "h264“ 对应解码器 "video/avc“
((const AVCodec*)(video->avCodecContext->codec))->name;
遍历当前手机解码器MediaCodecList查找是否存在"video/avc“
写一个java类方法
package com.ywl5320.myplayer.util;
import android.media.MediaCodecList;
import java.util.HashMap;
import java.util.Map;
public class WlVideoSupportUtil {
//映射表
private static Map<String, String> codeMap = new HashMap<>();
static {
codeMap.put("h264", "video/avc");
}
//根据码流格式得到手机解码器名称
public static String findVideoCodecName(String ffcodecName) {
if (codeMap.containsKey(ffcodecName)) {
return codeMap.get(ffcodecName);
}
return "";
}
//遍历本地手机解码器 判断是否存在对应解码器
public static boolean isSupportCodec(String ffcodecName) {
boolean supportVideo = false;
//最低版本要求16
int count = MediaCodecList.getCodecCount();
for (int i = 0; i < count; i++) {
String[] types = MediaCodecList.getCodecInfoAt(i).getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equals(findVideoCodecName(ffcodecName))) {
supportVideo = true;
break;
}
}
if (supportVideo) {
break;
}
}
return supportVideo;
}
}
在Player类里面写一个c++调用java的方法调用检测是否支持硬解码
//是否支持硬解码
public boolean onCallSupportMediaCodec(String ffcodecName) {
return WlVideoSupportUtil.isSupportCodec(ffcodecName);
}
在c++ WlCallJava.h 中写 jmethodID
jmethodID jmid_supportvideo;
构造器初始化
jmid_supportvideo = env->GetMethodID(jlz, "onCallSupportMediaCodec", "(Ljava/lang/String;)Z");
写方法声明
bool onCallIsSupportVideo(const char * ffcodecName);
实现
*需要把 char* 转成jstring
bool WlCallJava::onCallIsSupportVideo(const char *ffcodecName) {
bool support = false;
JNIEnv *jniEnv;
if (javaVM->AttachCurrentThread(&jniEnv, 0) != JNI_OK) {
if (LOG_DEBUG) {
LOGE("call onCallComplete worng");
}
return support;
}
//char * 转jstring
jstring type = jniEnv->NewStringUTF(ffcodecName);
support = jniEnv->CallBooleanMethod(jobj, jmid_supportvideo, type);
//释放
jniEnv->DeleteLocalRef(type);
javaVM->DetachCurrentThread();
return support;
}
在 WlFFmpeg.h 新建全局变量
//是否支持硬解码
bool supportMediaCodec = false;
在 WlVideo.h 新建变量和常量
#define CODEC_YUV 0
#define CODEC_MEDIACODEC 1
//解码类型
int codectype = CODEC_YUV;
在 WlFFmpeg.cpp void WlFFmpeg::start() 方法中在判断 video 不为空的情况下先判断视频是否能被硬解码
supportMediaCodec = false;
//把video设置到视频当中
video->audio = audio;
//获取码流格式
const char *codecName = ((const AVCodec *) video->avCodecContext->codec)->name;
//判断是否支持硬解码
if (supportMediaCodec = callJava->onCallIsSupportVideo(codecName)) {
LOGE("当前设备支持硬解码");
}
if (supportMediaCodec) {
video->codectype = CODEC_MEDIACODEC;
}
在 WlVideo.cpp 渲染判断硬解码软解码标志位
if (video->codectype == CODEC_MEDIACODEC) {
LOGE("硬解码视频");
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
} else if (video->codectype == CODEC_YUV){
...
软解码的全部代码
pthread_mutex_lock(&video->codecMutex);
if (avcodec_send_packet(video->avCodecContext, avPacket) != 0) {
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
pthread_mutex_unlock(&video->codecMutex);
continue;
}
AVFrame *avFrame = av_frame_alloc();
if (avcodec_receive_frame(video->avCodecContext, avFrame) != 0) {
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
pthread_mutex_unlock(&video->codecMutex);
continue;
}
// LOGD("子线程解码一个avFrame成功");
if (avFrame->format == AV_PIX_FMT_YUV420P) {
//渲染
double diff = video->getFrameDiffTime(avFrame);
LOGE("diff is %f", diff);
av_usleep(video->getDelayTime(diff) * 1000 * 1000);
video->wlCallJava->onCallRenderYUV(
avFrame->linesize[0],
video->avCodecContext->height,
avFrame->data[0],
avFrame->data[1],
avFrame->data[2]);
} else {
AVFrame *pFrameYUV420 = av_frame_alloc();
//获取存储空间大小
//1转换格式
//2视频宽
//2视频高
int num = av_image_get_buffer_size(
AV_PIX_FMT_YUV420P,
video->avCodecContext->width,
video->avCodecContext->height,
1);
uint8_t *buffer = static_cast<uint8_t *>(av_malloc(num * sizeof(uint8_t)));
//填充数据
av_image_fill_arrays(
pFrameYUV420->data,
pFrameYUV420->linesize,
buffer,
AV_PIX_FMT_YUV420P,
video->avCodecContext->width,
video->avCodecContext->height,
1);
//转码上下文
SwsContext *sws_cxt = sws_getContext(
video->avCodecContext->width,
video->avCodecContext->height,
video->avCodecContext->pix_fmt,
video->avCodecContext->width,
video->avCodecContext->height,
AV_PIX_FMT_YUV420P,
SWS_BICUBIC, NULL, NULL, NULL);
if (!sws_cxt) {
av_frame_free(&pFrameYUV420);
av_free(pFrameYUV420);
av_free(buffer);
pthread_mutex_unlock(&video->codecMutex);
continue;
}
//转换
sws_scale(
sws_cxt,
avFrame->data,
avFrame->linesize,
0,
avFrame->height,
pFrameYUV420->data,
pFrameYUV420->linesize);
//渲染
double diff = video->getFrameDiffTime(avFrame);
LOGE("diff is %f", diff);
av_usleep(video->getDelayTime(diff) * 1000 * 1000);
video->wlCallJava->onCallRenderYUV(
avFrame->linesize[0],
video->avCodecContext->height,
pFrameYUV420->data[0],
pFrameYUV420->data[1],
pFrameYUV420->data[2]);
//释放
av_frame_free(&pFrameYUV420);
av_free(pFrameYUV420);
av_free(buffer);
sws_freeContext(sws_cxt);
}
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
pthread_mutex_unlock(&video->codecMutex);