android studio 使用ffmpeg库

我们通过https://blog.csdn.net/qq_15255121/article/details/117414931?spm=1001.2014.3001.5501这一节已经生成了android要使用的ffmpeg库。

接下来我们看如何使用?

我们按照https://blog.csdn.net/qq_15255121/article/details/115244956中的步骤创建native工程。

https://blog.csdn.net/qq_15255121/article/details/117415799?spm=1001.2014.3001.5501 这篇文章写了如何使用C语言进行开发。

 

第一步:我们修改app目录下的build.gradle

 ndk {
            // 设置支持的SO库架构
            abiFilters "arm64-v8a"
        }

 

第二步 修改CMakeLists.txt将用到的库引入进来

cmake_minimum_required(VERSION 3.10.2)

find_library(log-lib log)
find_library(android-lib android)

add_library(native-lib
        SHARED
        native-lib.c)


set(JNI_LIBS_DIR /Users/yuanxuzhen/StudioProjects/study/ffmpeg/app/src/main/yuanLibs
        )

add_library(avutil
        SHARED
        IMPORTED )
set_target_properties(avutil
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libavutil.so )

add_library(swresample
        SHARED
        IMPORTED )
set_target_properties(swresample
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libswresample.so )

add_library(swscale
        SHARED
        IMPORTED )
set_target_properties(swscale
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libswscale.so )

add_library(avcodec
        SHARED
        IMPORTED )
set_target_properties(avcodec
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libavcodec.so )

add_library(avformat
        SHARED
        IMPORTED )
set_target_properties(avformat
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libavformat.so )

add_library(avfilter
        SHARED
        IMPORTED )
set_target_properties(avfilter
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libavfilter.so )

add_library(avdevice
        SHARED
        IMPORTED )
set_target_properties(avdevice
        PROPERTIES IMPORTED_LOCATION
        ${JNI_LIBS_DIR}/${ANDROID_ABI}/libavdevice.so )


include_directories(${JNI_LIBS_DIR}/includes)

include_directories(/Users/yuanxuzhen/Library/Android/sdk/ndk/21.1.6352462/toolchains/llvm/prebuilt/darwin-x86_64/sysroot/usr/include
        )



target_link_libraries(native-lib
        avutil swresample swscale avcodec avformat avfilter avdevice
        ${log-lib} ${android-lib} )

 

代码中JNI_LIBS_DIR 是我们要将ffmpeg编译出来的so放的位置

set(JNI_LIBS_DIR /Users/yuanxuzhen/StudioProjects/study/ffmpeg/app/src/main/yuanLibs)

so放的位置一定要注意,不可以是app/src/main/jniLibs

否者的话会报

More than one file was found with OS independent path 'lib/x86_64/libopencv_java4.so'.

If you are using jniLibs and CMake IMPORTED targets, see https://developer.android.com/studio/preview/features#automatic_packaging_of_prebuilt_dependencies_used_by_cmake

 

 

通过add_library set_target_properties将ffmpeg库引入进来

例如:

add_library(avutil

SHARED

IMPORTED )

set_target_properties(avutil

PROPERTIES IMPORTED_LOCATION

${JNI_LIBS_DIR}/${ANDROID_ABI}/libavutil.so )

 

引入头文件

include_directories(${JNI_LIBS_DIR}/includes)

 

第三步 我们知道android通过surfaceview来显示视频,我们在java层,新建一个activity用来显示视频

 

第四步 增加一个native方法用来播放视频

public static native void playVideo(String videoPath, Surface surface);

 

第五步 在native层实现playVIdeo方法

#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <unistd.h>
#include <stdio.h>
#include "base.h"
#include<libavutil/log.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>




JNIEXPORT jstring JNICALL
Java_com_yuanxuzhen_ffmpeg_YuanUtils_urlProtocolInfo(JNIEnv *env, jclass clazz) {
    // TODO: implement urlProtocolInfo()
}

JNIEXPORT jstring JNICALL
Java_com_yuanxuzhen_ffmpeg_YuanUtils_avFormatInfo(JNIEnv *env, jclass clazz) {
    // TODO: implement avFormatInfo()
}

JNIEXPORT jstring JNICALL
Java_com_yuanxuzhen_ffmpeg_YuanUtils_avCodecInfo(JNIEnv *env, jclass clazz) {
    // TODO: implement avCodecInfo()
}

JNIEXPORT jstring JNICALL
Java_com_yuanxuzhen_ffmpeg_YuanUtils_avFilterInfo(JNIEnv *env, jclass clazz) {
    // TODO: implement avFilterInfo()
}

JNIEXPORT void JNICALL
Java_com_yuanxuzhen_ffmpeg_YuanUtils_playVideo(JNIEnv *env, jclass clazz, jstring video_path,
                                               jobject surface) {
    const char *videoPath = (*env)->GetStringUTFChars(env, video_path, 0);
    LOGE("PlayVideo: %s", videoPath);

    if (videoPath == NULL) {
        LOGE("videoPath is null");
        return;
    }

    AVFormatContext *formatContext = avformat_alloc_context();

    // open video file
    LOGI("Open video file");
    if (avformat_open_input(&formatContext, videoPath, NULL, NULL) != 0) {
        LOGE("Cannot open video file: %s\n", videoPath);
        return;
    }

    // Retrieve stream information
    LOGI("Retrieve stream information");
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        LOGE("Cannot find stream information.");
        return;
    }

    // Find the first video stream
    LOGI("Find video stream");
    int video_stream_index = -1;
    for (int i = 0; i < formatContext->nb_streams; i++) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_index = i;
        }
    }

    if (video_stream_index == -1) {
        LOGE("No video stream found.");
        return; // no video stream found.
    }

    // Get a pointer to the codec context for the video stream
    LOGI("Get a pointer to the codec context for the video stream");
    AVCodecParameters *codecParameters = formatContext->streams[video_stream_index]->codecpar;

    // Find the decoder for the video stream
    LOGI("Find the decoder for the video stream");
    AVCodec *codec = avcodec_find_decoder(codecParameters->codec_id);
    if (codec == NULL) {
        LOGE("Codec not found.");
        return; // Codec not found
    }

    AVCodecContext *codecContext = avcodec_alloc_context3(codec);

    if (codecContext == NULL) {
        LOGE("CodecContext not found.");
        return; // CodecContext not found
    }

    // fill CodecContext according to CodecParameters
    if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {
        LOGE("Fill CodecContext failed.");
        return;
    }

    // init codex context
    LOGI("open Codec");
    if (avcodec_open2(codecContext, codec, NULL)) {
        LOGE("Init CodecContext failed.");
        return;
    }

    enum AVPixelFormat dstFormat = AV_PIX_FMT_RGBA;

    // Allocate av packet
    AVPacket *packet = av_packet_alloc();
    if (packet == NULL) {
        LOGE("Could not allocate av packet.");
        return;
    }

    // Allocate video frame
    LOGI("Allocate video frame");
    AVFrame *frame = av_frame_alloc();
    // Allocate render frame
    LOGI("Allocate render frame");
    AVFrame *renderFrame = av_frame_alloc();

    if (frame == NULL || renderFrame == NULL) {
        LOGE("Could not allocate video frame.");
        return;
    }

    // Determine required buffer size and allocate buffer
    LOGI("Determine required buffer size and allocate buffer");
    int size = av_image_get_buffer_size(dstFormat, codecContext->width, codecContext->height, 1);
    uint8_t *buffer = (uint8_t *) av_malloc(size * sizeof(uint8_t));
    av_image_fill_arrays(renderFrame->data, renderFrame->linesize, buffer, dstFormat,
                         codecContext->width, codecContext->height, 1);

    // init SwsContext
    LOGI("init SwsContext");
    struct SwsContext *swsContext = sws_getContext(codecContext->width,
                                                   codecContext->height,
                                                   codecContext->pix_fmt,
                                                   codecContext->width,
                                                   codecContext->height,
                                                   dstFormat,
                                                   SWS_BILINEAR,
                                                   NULL,
                                                   NULL,
                                                   NULL);
    if (swsContext == NULL) {
        LOGE("Init SwsContext failed.");
        return;
    }

    // native window
    LOGI("native window");
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
    ANativeWindow_Buffer windowBuffer;

    // get video width , height
    LOGI("get video width , height");
    int videoWidth = codecContext->width;
    int videoHeight = codecContext->height;
    LOGI("VideoSize: [%d,%d]", videoWidth, videoHeight);

    // 设置native window的buffer大小,可自动拉伸
    LOGI("set native window");
    ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight,
                                     WINDOW_FORMAT_RGBA_8888);


    LOGI("read frame");
    while (av_read_frame(formatContext, packet) == 0) {
        // Is this a packet from the video stream?
        if (packet->stream_index == video_stream_index) {

            // Send origin data to decoder
            int sendPacketState = avcodec_send_packet(codecContext, packet);
            if (sendPacketState == 0) {
                LOGE("向解码器-发送数据");

                int receiveFrameState = avcodec_receive_frame(codecContext, frame);
                if (receiveFrameState == 0) {
                    LOGE("从解码器-接收数据");
                    // lock native window buffer
                    ANativeWindow_lock(nativeWindow, &windowBuffer, NULL);

                    // 格式转换
                    sws_scale(swsContext, (uint8_t const *const *) frame->data,
                              frame->linesize, 0, codecContext->height,
                              renderFrame->data, renderFrame->linesize);

                    // 获取stride
                    uint8_t *dst = (uint8_t *) windowBuffer.bits;
                    uint8_t *src = (renderFrame->data[0]);
                    int dstStride = windowBuffer.stride * 4;
                    int srcStride = renderFrame->linesize[0];

                    // 由于window的stride和帧的stride不同,因此需要逐行复制
                    for (int i = 0; i < videoHeight; i++) {
                        memcpy(dst + i * dstStride, src + i * srcStride, srcStride);
                    }

                    ANativeWindow_unlockAndPost(nativeWindow);
                } else if (receiveFrameState == AVERROR(EAGAIN)) {
                    LOGE("从解码器-接收-数据失败:AVERROR(EAGAIN)");
                } else if (receiveFrameState == AVERROR_EOF) {
                    LOGE("从解码器-接收-数据失败:AVERROR_EOF");
                } else if (receiveFrameState == AVERROR(EINVAL)) {
                    LOGE("从解码器-接收-数据失败:AVERROR(EINVAL)");
                } else {
                    LOGE("从解码器-接收-数据失败:未知");
                }
            } else if (sendPacketState == AVERROR(EAGAIN)) {//发送数据被拒绝,必须尝试先读取数据
                LOGE("向解码器-发送-数据包失败:AVERROR(EAGAIN)");//解码器已经刷新数据但是没有新的数据包能发送给解码器
            } else if (sendPacketState == AVERROR_EOF) {
                LOGE("向解码器-发送-数据失败:AVERROR_EOF");
            } else if (sendPacketState == AVERROR(EINVAL)) {//遍解码器没有打开,或者当前是编码器,也或者需要刷新数据
                LOGE("向解码器-发送-数据失败:AVERROR(EINVAL)");
            } else if (sendPacketState == AVERROR(ENOMEM)) {//数据包无法压如解码器队列,也可能是解码器解码错误
                LOGE("向解码器-发送-数据失败:AVERROR(ENOMEM)");
            } else {
                LOGE("向解码器-发送-数据失败:未知");
            }

        }
        av_packet_unref(packet);
    }


    //内存释放
    LOGI("release memory");
    ANativeWindow_release(nativeWindow);
    av_frame_free(&frame);
    av_frame_free(&renderFrame);
    av_packet_free(&packet);
    avcodec_close(codecContext);
    avcodec_free_context(&codecContext);
    avformat_close_input(&formatContext);
    avformat_free_context(formatContext);
    (*env)->ReleaseStringUTFChars(env, video_path, videoPath);

}

 完整代码gitlee地址

https://gitee.com/creat151/ffmpeg-android.git

  • 0
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值