1、初始化surfaceview
package com.dongnaoedu.ffmplayer.view;
import android.content.Context;
import android.graphics.PixelFormat;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* 视频绘制的"画布"
* @author Jason
* QQ: 1476949583
* @date 2016年9月19日
* @version 1.0
*/
public class VideoView extends SurfaceView {
public VideoView(Context context) {
super(context);
init();
}
public VideoView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public VideoView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init(){
//初始化,SufaceView绘制的像素格式
SurfaceHolder holder = getHolder();
holder.setFormat(PixelFormat.RGBA_8888);
}
}
2、定义Native方法
3、生成头文件
4、c文件中实现该native方法
JNIEXPORT void JNICALL Java_com_dongnaoedu_ffmplayer_VideoUtils_render
(JNIEnv *env, jobject jcl, jstring input_jstr, jobject surface){
const char* input_cstr = (*env)->GetStringUTFChars(env,input_jstr,NULL);
LOGI("sound input_cstr= %s",input_cstr);
//注册组件
av_register_all();
//封装格式上下文
AVFormatContext *pFormatCtx = avformat_alloc_context();
//打开音频文件
if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){
LOGI("%s","无法打开音频文件");
return;
}
//获取输入文件信息
if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
LOGI("%s","无法获取输入文件信息");
return;
}
//获取音频流索引位置
int i = 0;
int video_stream_idx = -1;
for(; i < pFormatCtx->nb_streams;i++){
//根据类型判断,是否是视频流
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
video_stream_idx = i;
LOGI("sound input_cstr video_stream_idx= %d",video_stream_idx);
break;
}
}
//获取解码器
AVCodecContext *codecCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
if(codec == NULL){
LOGI("%s","无法获取解码器");
return;
}
//打开解码器
if(avcodec_open2(codecCtx,codec,NULL) < 0){
LOGI("%s","无法打开解码器");
return;
}
//压缩数据
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//像素数据(解码数据)
AVFrame *yuv_frame = av_frame_alloc();
AVFrame *rgb_frame = av_frame_alloc();
//native绘制
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);
ANativeWindow_Buffer outBuffer;
int len ,got_frame, framecount = 0;
//不断读取压缩数据
while(av_read_frame(pFormatCtx,packet) >= 0){
//解码AVPacket->AVFrame
len = avcodec_decode_video2(codecCtx,yuv_frame,&got_frame,packet);
//非零,正在解码
if(got_frame){
LOGI("解码%d帧",framecount++);
//lock
//设置缓冲区的属性(宽、高、像素格式)
ANativeWindow_setBuffersGeometry(nativeWindow,codecCtx->width,codecCtx->height,WINDOW_FORMAT_RGBA_8888);
ANativeWindow_lock(nativeWindow,&outBuffer,NULL);
//设置rgb_frame的属性(像素格式、宽高)和缓冲区
//rgb_frame缓冲区与outBuffer.bits是同一块内存
avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, PIX_FMT_RGBA, codecCtx->width, codecCtx->height);
//YUV->RGBA_8888
I420ToARGB(yuv_frame->data[0],yuv_frame->linesize[0],
yuv_frame->data[2],yuv_frame->linesize[2],
yuv_frame->data[1],yuv_frame->linesize[1],
rgb_frame->data[0], rgb_frame->linesize[0],
codecCtx->width,codecCtx->height);
//unlock
ANativeWindow_unlockAndPost(nativeWindow);
usleep(1000 * 16);
}
av_free_packet(packet);
}
ANativeWindow_release(nativeWindow);
av_frame_free(&yuv_frame);
avcodec_close(codec);
avformat_free_context(pFormatCtx);
(*env)->ReleaseStringUTFChars(env,input_jstr,input_cstr);
}
5、CMakeLists.txt中指定编译所需要的头文件
6、CMakeLists.txt中指定编译所需要的.so文件
7、此外还需要jnigraphics.so文件
cmake_minimum_required(VERSION 3.4.1)
find_library(
log-lib
log )
include_directories(
src/main/jni/include/ffmpeg)
include_directories(
src/main/jni/include/libyuv)
include_directories(
src/main/jni/include/libyuv/libyuv)
#set(distribution_DIR ../../../../libs)
set(distribution_DIR ../../../../src/main/jni)
add_library(libavutil
SHARED
IMPORTED)
set_target_properties(libavutil
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libavutil-54.so)
add_library(libswresample
SHARED
IMPORTED)
set_target_properties(libswresample
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libswresample-1.so)
add_library(libavcodec
SHARED
IMPORTED)
set_target_properties(libavcodec
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libavcodec-56.so)
add_library(libavformat
SHARED
IMPORTED)
set_target_properties(libavformat
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libavformat-56.so)
add_library(libswscale
SHARED
IMPORTED)
set_target_properties(libswscale
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libswscale-3.so)
add_library(libpostproc
SHARED
IMPORTED)
set_target_properties(libpostproc
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libpostproc-53.so)
add_library(libavfilter
SHARED
IMPORTED)
set_target_properties(libavfilter
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libavfilter-5.so)
add_library(libavdevice
SHARED
IMPORTED)
set_target_properties(libavdevice
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libavdevice-56.so)
add_library(libyuv
SHARED
IMPORTED)
set_target_properties(libyuv
PROPERTIES IMPORTED_LOCATION
${distribution_DIR}/armeabi-v7a/libyuv.so)
add_library(
ffmpeg_player
SHARED
src/main/jni/ffmpeg_decode.c
src/main/jni/dn_player.c
src/main/jni/queue.c
src/main/jni/ffmpeg_audio_player.c)
target_link_libraries(
ffmpeg_player
libavutil
libswresample
libavcodec
libavformat
libswscale
libpostproc
libavfilter
libavdevice
libyuv
android
${log-lib}
${jnigraphics-lib})
8、libyuv.so并不需要在代码中进行加载,只加载所需要的.so文件就可以
10、添加权限
11、代码中调用