android ffmpeg mediacodec 硬解码
ffmpeg 3.1以后 ffmpeg加入了硬解。
用法其实很简单,首先编译一个带硬解码的ffmpeg 库文件。
#!/bin/bash
PLATFORM=/Users/lake/test/android-ndk-r14b/platforms/android-19/arch-arm/
TOOLCHAIN=/Users/lake/test/android-ndk-r14b/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64
PREFIX=./android
function build_one
{
./configure \
--prefix=$PREFIX \
--target-os=android \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--arch=arm \
--sysroot=$PLATFORM \
--extra-cflags="-I$PLATFORM/usr/include" \
--cc=$TOOLCHAIN/bin/arm-linux-androideabi-gcc \
--nm=$TOOLCHAIN/bin/arm-linux-androideabi-nm \
--disable-shared \
--disable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-doc \
--disable-symver \
--enable-small \
--enable-gpl \
--enable-asm \
--enable-jni \
--enable-mediacodec \
--enable-decoder=h264_mediacodec \
--enable-hwaccel=h264_mediacodec \
--enable-decoder=hevc_mediacodec \
--enable-decoder=mpeg4_mediacodec \
--enable-decoder=vp8_mediacodec \
--enable-decoder=vp9_mediacodec \
--enable-nonfree \
--enable-version3 \
--extra-cflags="-Os -fpic $ADDI_CFLAGS" \
--extra-ldflags="$ADDI_LDFLAGS" \
$ADDITIONAL_CONFIGURE_FLAG
make clean
make j8
make install
$TOOLCHAIN/bin/arm-linux-androideabi-ld \
-rpath-link=$PLATFORM/usr/lib \
-L$PLATFORM/usr/lib \
-L$PREFIX/lib \
-soname libffmpeg.so -shared -nostdlib -Bsymbolic --whole-archive --no-undefined -o \
$PREFIX/libffmpeg.so \
libavcodec/libavcodec.a \
libavfilter/libavfilter.a \
libswresample/libswresample.a \
libavformat/libavformat.a \
libavutil/libavutil.a \
libswscale/libswscale.a \
libavdevice/libavdevice.a \
libpostproc/libpostproc.a \
-lc -lm -lz -ldl -llog --dynamic-linker=/system/bin/linker \
$TOOLCHAIN/lib/gcc/arm-linux-androideabi/4.9.x/libgcc.a
}
# arm v7vfp
CPU=arm
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=vfp -marm -march=$CPU "
ADDI_CFLAGS="-marm"
build_one
PLATFORM和TOOLCHAIN请用自己电脑的路径
温馨提示,请不要用as自带的ndk-bundle,因为这个自带的ndk版本是不完全的,编译会出错哦。
这个编出来是单独一个ffmpeg这个so文件 有点大哈5,60mb样子
然后硬解码的代码
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavcodec/jni.h>
}
#ifdef ANDROID
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, "(^_^)", format, ##__VA_ARGS__)
#else
#define LOGE(format, ...) printf("(>_<) " format "\n", ##__VA_ARGS__)
#define LOGI(format, ...) printf("(^_^) " format "\n", ##__VA_ARGS__)
#endif
extern "C"
JNIEXPORT jint JNICALL Java_com_lake_ndktest_FFmpeg_play
(JNIEnv *env, jobject obj, jstring input_jstr, jobject surface) {
LOGI("play");
// sd卡中的视频文件地址,可自行修改或者通过jni传入
const char *file_name = env->GetStringUTFChars(input_jstr, NULL);
LOGI("file_name:%s\n", file_name);
av_register_all();
AVFormatContext *pFormatCtx = avformat_alloc_context();
// Open video file
if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
LOGE("Couldn't open file:%s\n", file_name);
return -1; // Couldn't open file
}
// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.");
return -1;
}
// Find the first video stream
//找到第一个视频流,因为里面的流还有可能是音频流或者其他的,我们摄像头只关心视频流
int videoStream = -1, i;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO
&& videoStream < 0) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
LOGE("Didn't find a video stream or audio steam.");
return -1; // Didn't find a video stream
}
LOGI("找到视频流");
AVCodecParameters *pCodecPar = pFormatCtx->streams[videoStream]->codecpar;
//查找解码器
//获取一个合适的编码器pCodec find a decoder for the video stream
//AVCodec *pCodec = avcodec_find_decoder(pCodecPar->codec_id);
AVCodec *pCodec;
switch (pCodecPar->codec_id){
case AV_CODEC_ID_H264:
pCodec = avcodec_find_decoder_by_name("h264_mediacodec");//硬解码264
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
break;
case AV_CODEC_ID_MPEG4:
pCodec = avcodec_find_decoder_by_name("mpeg4_mediacodec");//硬解码mpeg4
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
break;
case AV_CODEC_ID_HEVC:
pCodec = avcodec_find_decoder_by_name("hevc_mediacodec");//硬解码265
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
break;
default:
pCodec = avcodec_find_decoder(pCodecPar->codec_id);//软解
if (pCodec == NULL) {
LOGE("Couldn't find Codec.\n");
return -1;
}
break;
}
LOGI("获取解码器");
//打开这个编码器,pCodecCtx表示编码器上下文,里面有流数据的信息
// Get a pointer to the codec context for the video stream
AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
// Copy context
if (avcodec_parameters_to_context(pCodecCtx, pCodecPar) != 0) {
fprintf(stderr, "Couldn't copy codec context");
return -1; // Error copying codec context
}
LOGI("视频流帧率:%d fps\n", pFormatCtx->streams[videoStream]->r_frame_rate.num /
pFormatCtx->streams[videoStream]->r_frame_rate.den);
int iTotalSeconds = (int) pFormatCtx->duration / 1000000;
int iHour = iTotalSeconds / 3600;//小时
int iMinute = iTotalSeconds % 3600 / 60;//分钟
int iSecond = iTotalSeconds % 60;//秒
LOGI("持续时间:%02d:%02d:%02d\n", iHour, iMinute, iSecond);
LOGI("视频时长:%lld微秒\n", pFormatCtx->streams[videoStream]->duration);
LOGI("持续时间:%lld微秒\n", pFormatCtx->duration);
LOGI("获取解码器SUCESS");
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Could not open codec.");
return -1; // Could not open codec
}
LOGI("获取native window");
// 获取native window
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
LOGI("获取视频宽高");
// 获取视频宽高
int videoWidth = pCodecCtx->width;
int videoHeight = pCodecCtx->height;
LOGI("设置native window的buffer大小,可自动拉伸");
// 设置native window的buffer大小,可自动拉伸
ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer windowBuffer;
LOGI("Allocate video frame");
// Allocate video frame
AVFrame *pFrame = av_frame_alloc();
LOGI("用于渲染");
// 用于渲染
AVFrame *pFrameRGBA = av_frame_alloc();
if (pFrameRGBA == NULL || pFrame == NULL) {
LOGE("Could not allocate video frame.");
return -1;
}
LOGI("Determine required buffer size and allocate buffer");
// Determine required buffer size and allocate buffer
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height,
1);
uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
pCodecCtx->width, pCodecCtx->height, 1);
LOGI("由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换");
// 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width/*视频宽度*/, pCodecCtx->height/*视频高度*/,
pCodecCtx->pix_fmt/*像素格式*/,
pCodecCtx->width/*目标宽度*/,
pCodecCtx->height/*目标高度*/, AV_PIX_FMT_RGBA/*目标格式*/,
SWS_BICUBIC/*图像转换的一些算法*/, NULL, NULL, NULL);
if (sws_ctx == NULL) {
LOGE("Cannot initialize the conversion context!\n");
return -1;
}
LOGI("格式转换成功");
LOGE("开始播放");
int ret;
AVPacket packet;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?
if (packet.stream_index == videoStream) {
//该楨位置
float timestamp = packet.pts * av_q2d(pFormatCtx->streams[videoStream]->time_base);
LOGI("timestamp=%f", timestamp);
// 解码
ret = avcodec_send_packet(pCodecCtx, &packet);
if (ret < 0) {
break;
}
while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) {//绘图
// lock native window buffer
ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
// 格式转换
sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGBA->data, pFrameRGBA->linesize);
// 获取stride
uint8_t *dst = (uint8_t *) windowBuffer.bits;
int dstStride = windowBuffer.stride * 4;
uint8_t *src = pFrameRGBA->data[0];
int srcStride = pFrameRGBA->linesize[0];
// 由于window的stride和帧的stride不同,因此需要逐行复制
int h;
for (h = 0; h < videoHeight; h++) {
memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
}
}
av_packet_unref(&packet);
}
LOGE("播放完成");
av_free(buffer);
av_free(pFrameRGBA);
// Free the YUV frame
av_free(pFrame);
// Close the codecs
avcodec_close(pCodecCtx);
// Close the video file
avformat_close_input(&pFormatCtx);
return 0;
}
jint JNI_OnLoad(JavaVM* vm, void* reserved)//这个类似android的生命周期,加载jni的时候会自己调用
{
LOGI("ffmpeg JNI_OnLoad");
av_jni_set_java_vm(vm, reserved);
return JNI_VERSION_1_6;
}
软解和硬解区别太大了,尤其1080p的视频,软解很慢,硬解速度非常快,质的提升啊。所以理论上手机支持硬解的视频优先硬解,不支持再调用ffmpeg的软解。
demo例子 https://github.com/lakehubo/NDKtest/tree/ffmpeg-meidacodec