上一篇文章我们讲过了ffmpeg的编译,这一次我们讲一下在使用ffmpeg的时候遇到的问题
1.打开AndroidStudio创建一个C++项目,选择C++11
注意:新版本的AS把CMakeLists文件放在了cpp目录下,我们需要把这个文件放到src同级别目录下,同时修改一下app下的build.gradle 引用CMakeLists的配置
build.gradle配置
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
buildToolsVersion "29.0.3"
defaultConfig {
applicationId "com.xxxx.ffmepg"
minSdkVersion 21
targetSdkVersion 29
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++11 -frtti -fexceptions"
abiFilters 'x86_64'//这里因为是mac电脑模拟器的cpu架构选的是x86_64
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"//这个地方要修改默认生成的src/main/cpp/CMakeLists.txt
version "3.6.0"//将默认的高版本调低,避免日志乱码
}
}
sourceSets{
main{
jniLibs.srcDirs = ["src/main/jniLibs"]
}
}
}
将上一篇ffmpeg中编译出得x86_64和include文件分别放到cpp和jniLibs目录下
配置CMakeList.txt
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
#remove some build warnings
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-declarations ")
#FFMpeg配置
#FFmpeg配置目录
#这里就是为啥要将CMakeList放到src同级目录下的原因,使用便捷
set(lib_src_DIR ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI})
#配置编译的头文件
include_directories(${CMAKE_SOURCE_DIR}/src/main/cpp/include)
# 编解码(最重要的库)
add_library(
avcodec-57
SHARED
IMPORTED)
set_target_properties(
avcodec-57
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libavcodec-57.so)
# 滤镜特效处理库
add_library(
avfilter-6
SHARED
IMPORTED)
set_target_properties(
avfilter-6
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libavfilter-6.so)
# 封装格式处理库
add_library(
avformat-57
SHARED
IMPORTED)
set_target_properties(
avformat-57
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libavformat-57.so)
# 工具库(大部分库都需要这个库的支持)
add_library(
avutil-55
SHARED
IMPORTED)
set_target_properties(
avutil-55
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libavutil-55.so)
# 音频采样数据格式转换库
add_library(
swresample-2
SHARED
IMPORTED)
set_target_properties(
swresample-2
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libswresample-2.so)
# 视频像素数据格式转换
add_library(
swscale-4
SHARED
IMPORTED)
set_target_properties(
swscale-4
PROPERTIES IMPORTED_LOCATION
${lib_src_DIR}/libswscale-4.so)
#设置编译忽略过时方法使用导致的编译不通过
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11 -Wno-deprecated-declarations")
#判断编译器类型,如果是gcc编译器,则在编译选项中加入c++11支持
#if(CMAKE_COMPILER_IS_GNUCXX)
# set(CMAKE_CXX_FLAGS "-std=c++11 ${CMAKE_CXX_FLAGS}")
# message(STATUS "optional:-std=c++11")
#endif(CMAKE_COMPILER_IS_GNUCXX)
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${CMAKE_SOURCE_DIR}/src/main/cpp/native-lib.cpp)
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
native-lib avcodec-57 avfilter-6 avformat-57 avutil-55 swresample-2 swscale-4
# Links the target library to the log library
# included in the NDK.
${log-lib} )
native-lib.cpp视频解码代码,代码部分的实现都是针对前一篇的流程所编写
#include <jni.h>
#include <string>
#include <android/log.h>
extern "C"{
//核心库
#include "libavcodec/avcodec.h"
//封装格式处理库
#include "libavformat/avformat.h"
//工具库
#include "libavutil/imgutils.h"
//视频像素数据格式库
#include "libswscale/swscale.h"
//声明函数
//测试环境
JNIEXPORT void JNICALL Java_com_crobot_ffmepg_FFmpegUtils_cppFFmpegConfig(JNIEnv *env, jobject jobj);
//视频解码
JNIEXPORT void JNICALL Java_com_crobot_ffmepg_FFmpegUtils_cppFFmpegDecode(JNIEnv *env, jobject jobj, jstring jInFilePath, jstring jOutFilePath);
}
//函数实现
//1.NDK音视频编解码:FFmpeg-测试配置->Java方法
JNIEXPORT void JNICALL Java_com_crobot_ffmepg_FFmpegUtils_cppFFmpegConfig(JNIEnv *env, jobject jobj) {
const char *configuration = avcodec_configuration();
__android_log_print(ANDROID_LOG_INFO, "main", "输出信息:%s", configuration);
}
//2.NDK音视频编解码:FFmpeg-视频解码-视频像素数据(YUV420P)
JNIEXPORT void JNICALL Java_com_crobot_ffmepg_FFmpegUtils_cppFFmpegDecode(JNIEnv *env,
jobject jobj,
jstring jInFilePath,
jstring jOutFilePath){
//视频解码
//第一步:注册组件
av_register_all();
//第二步:打开封装格式文件
//参数一:封装格式上下文->存储了视频基本信息->全局信息(类似于Android里面Context上下文)
AVFormatContext *avformat_context = avformat_alloc_context();
//参数二:打开的文件地址->URL
const char *cInFilePath = env->GetStringUTFChars(jInFilePath, NULL);
//参数三:指定输入的格式
int avformat_open_input_result = avformat_open_input(&avformat_context, cInFilePath, NULL, NULL);
if (avformat_open_input_result != 0){
//打开错误
char* error_info;
av_strerror(avformat_open_input_result, error_info, 1024);
__android_log_print(ANDROID_LOG_INFO, "main", "错误信息:%s", error_info);
return;
}
//第三步:查找视频流(获取信息->视频信息)
//例如:视频流、音频流、字幕流等等...
//需要:视频流
int avformat_find_stream_info_result = avformat_find_stream_info(avformat_context, NULL);
if (avformat_find_stream_info_result < 0){
//打开错误
char* error_info;
av_strerror(avformat_find_stream_info_result, error_info, 1024);
__android_log_print(ANDROID_LOG_INFO, "main", "打开错误信息:%s", error_info);
return;
}
//第四步:查找视频解码器
//1、获取视频流索引位置
int av_stream_index = -1;
for (int i = 0; i < avformat_context->nb_streams; ++i) {
//判定->视频流
if (avformat_context->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
//表示视频流
av_stream_index = i;
break;
}
}
//2、获取视频解码器上下文
AVCodecContext *avcodec_context = avformat_context->streams[av_stream_index]->codec;
//3、获取视频解码器
AVCodec *avcodec = avcodec_find_decoder(avcodec_context->codec_id);
if (avcodec == NULL){
__android_log_print(ANDROID_LOG_INFO, "main", "找不到视频解码器");
return;
}
//打印信息
__android_log_print(ANDROID_LOG_INFO, "main", "解码器名称:%s", avcodec->name);
//第五步:打开视频解码器
int avcodec_open2_result = avcodec_open2(avcodec_context, avcodec, NULL);
if (avcodec_open2_result != 0){
//打开错误
char* error_info;
av_strerror(avcodec_open2_result, error_info, 1024);
__android_log_print(ANDROID_LOG_INFO, "main", "打开解码器失败:%s", error_info);
return;
}
//第六步:读取视频压缩数据(一帧一帧读取)
//1、准备一帧视频压缩数据(av_malloc->自己手动去分配内存)
AVPacket* packet = (AVPacket*)av_malloc(sizeof(AVPacket));
//3.2 接受一帧视频像素数据
//解码出来的视频像素数据分为很多种格式
//例如:YUV420P、YUV422P等等...
//目的:将视频像素数据统一为->YUV420P
AVFrame* avframe_in = av_frame_alloc();
//4、转格式->统一转成->YUV420P视频像素数据帧
//4.1 创建视频像素数据上下文
//参数一:输入帧的宽->avframe_in
//参数二:输入帧的高->avframe_in
//参数三:输入帧数据格式
//参数四:输出帧的宽
//参数五:输出帧的高
//参数六:输出帧数据格式
//参数七:视频像素数据格式类型(YUV420P、YUV422P等等...)
//参数八:字节对齐类型(C/C++结构体大小规则)
SwsContext* swsContext = sws_getContext(avcodec_context->width,
avcodec_context->height,
avcodec_context->pix_fmt,
avcodec_context->width,
avcodec_context->height,
AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL,
NULL,
NULL);
//4.2 创建一个YUV420P像素帧数据
//开辟一块内存空间,但是我们并不知道是什么类型格式的内存空间
//准备了一个杯子->确定杯子存放什么类型水?
AVFrame* avframe_out_yuv420p = av_frame_alloc();
//确定缓冲区大小
//参数一:视频像素数据格式类型->YUV420P
//参数二:缓冲区宽
//参数三:缓冲区高
//参数四:字节对其方式
int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
avcodec_context->width,
avcodec_context->height,
1);
//开辟一块buffer_size大小内存空间
uint8_t *out_buffer = (uint8_t *)av_malloc(buffer_size);
//向avframe_out_yuv420p->填充数据
//参数一:目标->填充数据->填到那里去->avframe_out_yuv420p
//参数二:目标->每一行大小
//参数三:数据源->out_buffer
//参数四:目标->格式类型
//参数五:宽
//参数六:高
//参数七:字节对其方式
av_image_fill_arrays(avframe_out_yuv420p->data,
avframe_out_yuv420p->linesize,
out_buffer,
AV_PIX_FMT_YUV420P,
avcodec_context->width,
avcodec_context->height,
1);
//5、要么播放,要么写入文件->YUV文件
//5.1
const char *cOutFilePath = env->GetStringUTFChars(jOutFilePath, NULL);
//打开文件
FILE* out_file_yuv = fopen(cOutFilePath, "wb");
if (out_file_yuv == NULL){
__android_log_print(ANDROID_LOG_INFO, "main", "文件打开失败...");
return;
}
//视频:就是一帧一帧图片(多张连续不断的图片)
//参数一:封装格式上下文
//参数二:读取一帧视频压缩数据
int result, y_size = 0, u_size = 0, v_size = 0, current_frame_index = 0;
while (av_read_frame(avformat_context, packet) >= 0){
//2、判定当前是否是视频流
if (packet->stream_index == av_stream_index){
//视频流
//3、解码一帧视频压缩数据
//分为两个步骤
//3.1 发送一帧视频压缩数据
avcodec_send_packet(avcodec_context, packet);
//3.2 接受一帧视频像素数据
//参数二:一帧视频像素数据
result = avcodec_receive_frame(avcodec_context, avframe_in);
if (result == 0){
//解码成功
//4、转格式->统一转成->YUV420P视频像素数据帧
//参数一:视频像素数据上下文
//参数二:输入数据->视频像素数据
//参数三:输入画面每一行大小
//参数四:输入画面每一行开始位置(0开始->原点)
//参数五:输入数据行数
//参数六:输出数据
//参数七:输出画面每一行大小
//例如
//avframe_in = YUV422P
//转换
//avframe_out_yuv420p = YUV420P
sws_scale(swsContext,
(const uint8_t *const*)avframe_in->data,
avframe_in->linesize,
0,
avcodec_context->height,
avframe_out_yuv420p->data,
avframe_out_yuv420p->linesize);
//5、要么播放,要么写入文件->YUV文件
//5.2 计算YUV大小
//分析原理?
//普及一下:YUV格式结构
//Y表示:亮点
//UV表示:色度
//人对亮度敏感,对色度不敏感
//有一个规律->规范
//YUV420P规范一:Y结构表示一个像素(一个像素对应一个Y)
//YUV420P规范二:4个像素点对应一个U和一个V( 4Y = U = V)
y_size = avcodec_context->width * avcodec_context->height;
u_size = y_size / 4;
v_size = y_size / 4;
//5.3 写入文件中
//写入Y->avframe_out_yuv420p->data[0]
fwrite(avframe_out_yuv420p->data[0], 1, y_size, out_file_yuv);
//写入U->avframe_out_yuv420p->data[1]
fwrite(avframe_out_yuv420p->data[1], 1, u_size, out_file_yuv);
//写入V->avframe_out_yuv420p->data[2]
fwrite(avframe_out_yuv420p->data[2], 1, v_size, out_file_yuv);
++current_frame_index;
__android_log_print(ANDROID_LOG_INFO, "main", "当前解析第%d帧", current_frame_index);
}
}
}
//第八步:关闭文件,关闭解码器
av_packet_free(&packet);
fclose(out_file_yuv);
av_frame_free(&avframe_in);
av_frame_free(&avframe_out_yuv420p);
free(out_buffer);
avcodec_close(avcodec_context);
avformat_free_context(avformat_context);
}
调用Native部分代码
public class FFmpegUtils {
//1.NDK音视频编解码:FFmpeg-测试配置->Java方法
public native static void cppFFmpegConfig();
//2.NDK音视频编解码:FFmpeg-视频解码-视频像素数据(YUV420P)
public native static void cppFFmpegDecode(String inFilePath, String outFilePath);
static {
System.loadLibrary("native-lib");
}
}