- 前言
本篇只做环境配置以及具体代码实现,不做源码讲解。
- 运行环境
开发软件:Android Studio 3.5.3;运行系统Mac;编译方式cmake
- 新建一个modue,修改build.gradle配置,具体配置如下所示
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
android {
compileSdkVersion 28
defaultConfig {
applicationId "com.lee.jniffmpeg"
minSdkVersion 15
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-frtti -fexceptions"
}
}
//ndk编译生成.so文件
ndk {
moduleName "ffmpeg" //生成的so名字
abiFilters 'x86', 'x86_64', 'armeabi-v7a', 'arm64-v8a' //输出指定三种abi体系结构下的so库。
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'com.android.support:appcompat-v7:28.0.0'
implementation 'com.android.support.constraint:constraint-layout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}
- 在main文件下创建cpp文件夹,添加CMakeLists.txt,添加native-lib.cpp,然后点击右键,
Link C++ Project With Gradle
CMakeLists.tx的内容如下所示:
cmake_minimum_required(VERSION 3.4.1)
include_directories(../cpp/include)
set(SOURCES)
file(GLOB_RECURSE SOURCES ${CMAKE_SOURCE_DIR}/*.cpp ${CMAKE_SOURCE_DIR}/*.c)
# 把系统的log库导入进来
find_library( log-lib
log )
set(distribution_DIR ../../../../libs)
add_library( yuv
SHARED
IMPORTED)
set_target_properties( yuv
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libyuv.so)
# 把libfmod.so预加载进来
add_library( avcodec-56
SHARED
IMPORTED)
set_target_properties( avcodec-56
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libavcodec-56.so)
# 把libfmodL.so预加载进来
add_library( avdevice-56
SHARED
IMPORTED)
set_target_properties( avdevice-56
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libavdevice-56.so)
add_library( avfilter-5
SHARED
IMPORTED)
set_target_properties( avfilter-5
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libavfilter-5.so)
add_library( avformat-56
SHARED
IMPORTED)
set_target_properties( avformat-56
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libavformat-56.so)
add_library( avutil-54
SHARED
IMPORTED)
set_target_properties( avutil-54
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libavutil-54.so)
add_library( postproc-53
SHARED
IMPORTED)
set_target_properties( postproc-53
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libpostproc-53.so)
add_library( swresample-1
SHARED
IMPORTED)
set_target_properties( swresample-1
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libswresample-1.so)
add_library( swscale-3
SHARED
IMPORTED)
set_target_properties( swscale-3
PROPERTIES IMPORTED_LOCATION
${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}/libswscale-3.so)
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11"
add_library( # Sets the name of the library.
ffmpeg
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
${SOURCES})
#用于native_window
#find_library(
# android-lib
# android
#)
target_link_libraries( ffmpeg
yuv
avutil-54
swresample-1
avcodec-56
avformat-56
swscale-3
postproc-53
avfilter-5
avdevice-56
-landroid#用于native_window
${log-lib} )
native-lib.cpp的内容如下如下所示
#include <jni.h>
#include <string>
#include <cstdlib>
#include <android/log.h>
using namespace std;
extern "C"//由于是C和C++混编,必须添加
{
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
//像素处理
#include "libswscale/swscale.h"
#include "libavutil/avutil.h"
#include "libavutil/frame.h"
}
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__);
extern "C" JNIEXPORT jstring JNICALL
Java_com_lee_jniffmpeg_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}
- 编译libyuv库和ffmpeg库(具体编译方式以后有时间再写)
编译成功后如下所示
- 具体实现代码
(1)新建DjPlayer文件,加载静态的so文件
package com.lee.jniffmpeg;
import android.view.Surface;
public class DjPlayer {
public native void render(String input,Surface surface);
static {
System.loadLibrary("ffmpeg");
}
}
(2)新建player.cpp文件,用来进行jni的相关实现
#include <jni.h>
#include <string>
#include <cstdlib>
#include <android/log.h>
#include <unistd.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#include "libyuv/libyuv.h"
using namespace std;
extern "C"//由于是C和C++混编,必须添加
{
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
//像素处理
#include "libswscale/swscale.h"
}
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__);
extern "C"
JNIEXPORT void JNICALL
Java_com_lee_jniffmpeg_DjPlayer_render(JNIEnv *env, jobject thiz, jstring input_jstr,
jobject surface) {
const char *input_cstr = env->GetStringUTFChars(input_jstr, NULL);
//1.注册组件
av_register_all();
//封装格式上下文
AVFormatContext *pFormatCtx = avformat_alloc_context();
//2.打开输入视频文件
if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {
LOGE("%s", "打开输入视频文件失败");
return;
}
//3.获取视频信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("%s", "获取视频信息失败");
return;
}
//视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
int i = 0;
for (; i < pFormatCtx->nb_streams; i++) {
//根据类型判断,是否是视频流
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
//4.获取视频解码器
AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);
if (pCodec == NULL) {
LOGE("%s", "无法解码");
return;
}
//5.打开解码器
if (avcodec_open2(pCodeCtx, pCodec, NULL) < 0) {
LOGE("%s", "解码器无法打开");
return;
}
//编码数据
AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
//像素数据(解码数据)
AVFrame *yuv_frame = av_frame_alloc();
AVFrame *rgb_frame = av_frame_alloc();
//native绘制
//窗体
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
//绘制时的缓冲区
ANativeWindow_Buffer outBuffer;
int len, got_frame, framecount = 0;
//6.一阵一阵读取压缩的视频数据AVPacket
while (av_read_frame(pFormatCtx, packet) >= 0) {
//解码AVPacket->AVFrame
len = avcodec_decode_video2(pCodeCtx, yuv_frame, &got_frame, packet);
//Zero if no frame could be decompressed
//非零,正在解码
if (got_frame) {
LOGI("解码%d帧", framecount++);
//lock
//设置缓冲区的属性(宽、高、像素格式)
ANativeWindow_setBuffersGeometry(nativeWindow, pCodeCtx->width, pCodeCtx->height,
WINDOW_FORMAT_RGBA_8888);
ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
//设置rgb_frame的属性(像素格式、宽高)和缓冲区
//rgb_frame缓冲区与outBuffer.bits是同一块内存
avpicture_fill((AVPicture *) rgb_frame, static_cast<const uint8_t *>(outBuffer.bits),
PIX_FMT_RGBA, pCodeCtx->width, pCodeCtx->height);
//YUV->RGBA_8888
libyuv::I420ToARGB(yuv_frame->data[0], yuv_frame->linesize[0],
yuv_frame->data[2], yuv_frame->linesize[2],
yuv_frame->data[1], yuv_frame->linesize[1],
rgb_frame->data[0], rgb_frame->linesize[0],
pCodeCtx->width, pCodeCtx->height);
//unlock
ANativeWindow_unlockAndPost(nativeWindow);
usleep(1000 * 16);
}
av_free_packet(packet);
}
}
- 编写相关的kotlin代码,进行调用