这篇文章是跟着徐福记的文章学习的,真的特别感谢互联网上这么多无私奉献的人。向他们致敬!
首先下载了徐福记的sourcecode到本地,下载地址:https://github.com/xufuji456/FFmpegAndroid
1.第一步就是把相关so库拷贝到libs文件夹下面去:
2.然后,将include文件夹以及相关C文件拷贝到Cpp目录下:
3.配置app的build.gradle相关信息:
defaultConfig {
...
externalNativeBuild {
cmake {
cppFlags ""
}
}
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
version "3.10.2"
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
jni.srcDirs = []
}
}
上面设置了支持的CPU平台、CMakeLists文件路径。libs文件夹路径(我怀疑这个不用设置的)。
然后进行CMake文件设置:
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
media-handle
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/ffmpeg/cmdutils.c
src/main/cpp/ffmpeg/ffmpeg.c
src/main/cpp/ffmpeg/ffmpeg_filter.c
src/main/cpp/ffmpeg/ffmpeg_opt.c
src/main/cpp/ffmpeg_cmd.c
src/main/cpp/ffmpeg/ffprobe.c
src/main/cpp/ffmpeg/ffmpeg_hw.c
src/main/cpp/audio_player.c
src/main/cpp/openSL_audio_player.c
src/main/cpp/video_player.c
src/main/cpp/ffmpeg_pusher.cpp
src/main/cpp/AVpacket_queue.c
src/main/cpp/media_player.c
src/main/cpp/video_filter.c
src/main/cpp/audio_lame.c
src/main/cpp/fast_start.c
src/main/cpp/ffprobe_cmd.c
)
add_library( ffmpeg
SHARED
IMPORTED )
set_target_properties( ffmpeg
PROPERTIES IMPORTED_LOCATION
../../../../libs/${CMAKE_ANDROID_ARCH_ABI}/libffmpeg.so )
add_library( mp3lame
SHARED
IMPORTED )
set_target_properties( mp3lame
PROPERTIES IMPORTED_LOCATION
../../../../libs/${CMAKE_ANDROID_ARCH_ABI}/libmp3lame.so )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
include_directories(src/main/cpp)
include_directories(src/main/cpp/include)
if(${CMAKE_ANDROID_ARCH_ABI} MATCHES "armeabi-v7a")
include_directories(src/main/cpp/include/armeabi-v7a)
message("This is armeabi-v7a")
elseif(${CMAKE_ANDROID_ARCH_ABI} MATCHES "arm64-v8a")
include_directories(src/main/cpp/include/arm64-v8a)
message("This is arm64-v8a")
endif()
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
target_link_libraries( # Specifies the target library.
media-handle
mp3lame
ffmpeg
-landroid #native_window
-ljnigraphics #bitmap
-lOpenSLES #openSLES
# Links the target library to the log library
# included in the NDK.
${log-lib} )
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log)
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
media-handle
mp3lame
ffmpeg
-landroid #native_window
-ljnigraphics #bitmap
-lOpenSLES #openSLES
# Links the target library to the log library
# included in the NDK.
${log-lib} )
注意这里的设置,如果library是直接的代码源文件,则可以直接在add_library中设置即可,比如:
add_library( # Sets the name of the library.
media-handle
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/ffmpeg/cmdutils.c
src/main/cpp/ffmpeg/ffmpeg.c
src/main/cpp/ffmpeg/ffmpeg_filter.c
src/main/cpp/ffmpeg/ffmpeg_opt.c
src/main/cpp/ffmpeg_cmd.c
src/main/cpp/ffmpeg/ffprobe.c
src/main/cpp/ffmpeg/ffmpeg_hw.c
src/main/cpp/audio_player.c
src/main/cpp/openSL_audio_player.c
src/main/cpp/video_player.c
src/main/cpp/ffmpeg_pusher.cpp
src/main/cpp/AVpacket_queue.c
src/main/cpp/media_player.c
src/main/cpp/video_filter.c
src/main/cpp/audio_lame.c
src/main/cpp/fast_start.c
src/main/cpp/ffprobe_cmd.c
)
而so库,则在find_library中设置位置为IMPORTED,然后接着设置set_target_properties.例如:
add_library( ffmpeg
SHARED
IMPORTED )
set_target_properties( ffmpeg
PROPERTIES IMPORTED_LOCATION
../../../../libs/${CMAKE_ANDROID_ARCH_ABI}/libffmpeg.so )
接着我们看音频的相关操作。
1.转码:
/**
* transform audio, according to your assigning the output format
*
* @param inputPath input file
* @param outputPath output file
* @return transform success or not
*/
public static String[] transformAudio(String inputPath, String outputPath) {
String transformAudioCmd = "ffmpeg -i %s %s";
transformAudioCmd = String.format(transformAudioCmd, inputPath, outputPath);
return transformAudioCmd.split(" ");
}
这里,这个方法中,inputPath,String outputPath,这两个为源文件、输出目标文件,他们的路径。
这个方法的主要目的就是把FFMPEG的视频转码命令转换成一个字符串数组。
所有的FFMPEG方法,都是得到一个命令,然后去执行这个命令。我们来看这个命令的执行:
/**
* execute the command of FFmpeg
*
* @param commandLine commandLine
*/
public void executeFFmpegCmd(final String[] commandLine) {
if (commandLine == null) {
return;
}
FFmpegCmd.execute(commandLine, new OnHandleListener() {
@Override
public void onBegin() {
Log.i(TAG, "handle onBegin...");
mHandler.obtainMessage(MSG_BEGIN).sendToTarget();
}
@Override
public void onProgress(int progress, int duration) {
mHandler.obtainMessage(MSG_PROGRESS, progress, duration).sendToTarget();
}
@Override
public void onEnd(int resultCode, String resultMsg) {
Log.i(TAG, "handle onEnd...");
if (isContinue) {
mHandler.obtainMessage(MSG_CONTINUE).sendToTarget();
} else {
mHandler.obtainMessage(MSG_FINISH).sendToTarget();
}
}
});
}
而这个方法中,就是把命令数组传递给FFmpegCmd类的execute方法。然后用一个回调函数进行监听,FFmpeg这个类是一个JNI调用类。也就是用来链接JAVA和C的桥梁类。来看下这个方法:
/**
* Execute FFmpeg command
* @param commands the String array of command
* @param onHandleListener the callback for executing command
*/
public static void execute(final String[] commands, final OnHandleListener onHandleListener) {
mProgressListener = onHandleListener;
ThreadPoolUtil.executeSingleThreadPool(new Runnable() {
@Override
public void run() {
if (onHandleListener != null) {
onHandleListener.onBegin();
}
//call JNI interface to execute FFmpeg cmd
int result = handle(commands);
if (onHandleListener != null) {
onHandleListener.onEnd(result, null);
}
mProgressListener = null;
}
});
}
这里调用了handle(commands)这个方法来进行调用native方法进行处理的:
private native static int handle(String[] commands);
而native文件是我们在Cmake文件里配置的。我们看下:
add_library( # Sets the name of the library.
media-handle
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/ffmpeg/cmdutils.c
src/main/cpp/ffmpeg/ffmpeg.c
src/main/cpp/ffmpeg/ffmpeg_filter.c
src/main/cpp/ffmpeg/ffmpeg_opt.c
#注意这里
src/main/cpp/ffmpeg_cmd.c
src/main/cpp/ffmpeg/ffprobe.c
src/main/cpp/ffmpeg/ffmpeg_hw.c
src/main/cpp/audio_player.c
src/main/cpp/openSL_audio_player.c
src/main/cpp/video_player.c
src/main/cpp/ffmpeg_pusher.cpp
src/main/cpp/AVpacket_queue.c
src/main/cpp/media_player.c
src/main/cpp/video_filter.c
src/main/cpp/audio_lame.c
src/main/cpp/fast_start.c
src/main/cpp/ffprobe_cmd.c
)
而在这个ffmpeg_cmd这个文件中,我们看到一个宏定义文件:
#include "ffmpeg_jni_define.h"
在这个宏定义文件ffmpeg_jni_define中,定义了相关联的java类,返回类型和方法名通过参数定义:
#define FFMPEG_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegCmd_ ## FUNC_NAME \
(JNIEnv *env, jobject thiz, ##__VA_ARGS__)\
这里就指向了java.com.frank.ffmpeg.FFmpegCmd.java类。
接着,我们继续在ffmpeg_cmd.c这个文件中,我们看到这个方法:
FFMPEG_FUNC(jint, handle, jobjectArray commands) {
init(env);
// set the level of log
av_log_set_level(AV_LOG_INFO);
// set the callback of log, and redirect to print android log
av_log_set_callback(log_callback);
int argc = (*env)->GetArrayLength(env, commands);
char **argv = (char **) malloc(argc * sizeof(char *));
int i;
int result;
for (i = 0; i < argc; i++) {
jstring jstr = (jstring) (*env)->GetObjectArrayElement(env, commands, i);
char *temp = (char *) (*env)->GetStringUTFChars(env, jstr, 0);
argv[i] = malloc(1024);
strcpy(argv[i], temp);
(*env)->ReleaseStringUTFChars(env, jstr, temp);
}
//execute ffmpeg cmd
result = run(argc, argv);
//release memory
for (i = 0; i < argc; i++) {
free(argv[i]);
}
free(argv);
return result;
}
这里这个handle参数就和前面java方法对应上了。这里主要就是把传过来的字符串数组转换成二级指针数组。然后调用ffmpeg的源码的run方法得到结果。而这个结果通过FFmpegCmd传回给回调函数,到FFmpegHandler.java这里。而FFmpegHandler通过构造函数里的handler传回Activity.
2.音频的剪切
/**
* 使用ffmpeg命令行进行音频剪切
* @param srcFile 源文件
* @param startTime 剪切的开始时间(单位为秒)
* @param duration 剪切时长(单位为秒)
* @param targetFile 目标文件
* @return 剪切后的文件
*/
@SuppressLint("DefaultLocale")
public static String[] cutAudio(String srcFile, int startTime, int duration, String targetFile){
String cutAudioCmd = "ffmpeg -i %s -ss %d -t %d %s";
cutAudioCmd = String.format(cutAudioCmd, srcFile, startTime, duration, targetFile);
return cutAudioCmd.split(" ");//以空格分割为字符串数组
}
这里都与上面类似了。就拼接命令字符串,然后转换成字符串数组。唯一不同的便是命令不同而已。
3.音频的合并
/**
* 使用ffmpeg命令行进行音频合并
* @param srcFile 源文件
* @param appendFile 待追加的文件
* @param targetFile 目标文件
* @return 合并后的文件
*/
public static String[] concatAudio(String srcFile, String appendFile, String targetFile){
String concatAudioCmd = "ffmpeg -i concat:%s|%s -acodec copy %s";
concatAudioCmd = String.format(concatAudioCmd, srcFile, appendFile, targetFile);
return concatAudioCmd.split(" ");//以空格分割为字符串数组
}
这里注意这里的命令关键字拼接就行。
4.音频的混合
/**
* 使用ffmpeg命令行进行音频混合
* @param srcFile 源文件
* @param mixFile 待混合文件
* @param targetFile 目标文件
* @return 混合后的文件
*/
public static String[] mixAudio(String srcFile, String mixFile, String targetFile){
String mixAudioCmd = "ffmpeg -i %s -i %s -filter_complex amix=inputs=2:duration=first -strict -2 %s";
mixAudioCmd = String.format(mixAudioCmd, srcFile, mixFile, targetFile);
return mixAudioCmd.split(" ");//以空格分割为字符串数组
}
这里的命令,比较复杂。对于几个常用的基本命令要知道,放在下一篇文章中说。