Android Studio FFMPEG 入门

  (一).目的:在Android Studio上调用FFmpeg,播放视频,rtsp流

(二).准备工作:ffmpeg库下载    ndk下载 (我用的是 android-ndk-r20b   Ffmpeg 3.0)

(三).1.环境变量中配置好ndk路径  ,如下图:

          2.Android Studio中配置好ndk路径,如下图:

      ----------------------------

 

(四).调用FFmpeg的整体调用逻辑为:
             
1 编译完ffmpeg库(我直接网上拷贝下来编译好的)
              2 使用jni方式撰写c代码,其中需要包含相应的ffmpeg的头文件
              3 撰写相应的Android.mk文件,里面指定需要编译的c代码以及需要链接的动态库
              4 执行ndk-build生成相应的jni库
              5 创建andorid java程序,代码中loadLibrary相应ffmpeg库以及刚才生成的jni库
              6 静态方法声明native函数,这些函数在jni写的c语言中都已经实现过

我的操作顺序和上面的排序有些不一样,但是每一步都是不能缺少的,我只是顺序不同而已,最终不影响运行

1.操作如下:在main下新建jni文件夹

2.加入编译好的库和自定义的方法(我自己创建了一个类MyNdk)

注意点:play是视频播放的方法,别的方法可以不要,对应的test.c里面也只留play的方法

3.生成com_example_im_myapplication_MyNdk.h 文件

javah -encoding UTF-8 -classpath C:\Users\IM\AppData\Local\Android\Sdk\platforms\android-25\android.jar;. -jni com.example.im.ffmpegtest3.MainActivity(自己项目的路径)

4.在Jni 包下面加入已经编译好的 ffmpeg库中的Inclued 和 lib 文件夹,直接复制进来

5.加入 Android.mk    Application.mk   test.c

a:Android.mk 文件代码如下:

LOCAL_PATH := $(call my-dir)

# FFmpeg library
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-6.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-55.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-4.so
include $(PREBUILT_SHARED_LIBRARY)

# Program
include $(CLEAR_VARS)
LOCAL_MODULE := MyApplication(名字要对应)
LOCAL_SRC_FILES := test.c   (名字要对应)

LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz -landroid
LOCAL_SHARED_LIBRARIES := avcodec avfilter avformat avutil swresample swscale
include $(BUILD_SHARED_LIBRARY)

b: Application.mk代码如下:

APP_ABI := armeabi-v7a
APP_MODULES := MyApplication(名字要对应)

c:test.c代码如下:方法的名字格式要注意一下,最容易出错了

#include <stdio.h>
#include "com_example_im_myapplication_MyNdk.h"
#include "include/libavformat/avformat.h"
#include "include/libavcodec/avcodec.h"
#include "include/libavutil/avutil.h"
#include "include/libavfilter/avfilter.h"

#include <android/native_window_jni.h>
#include <android/native_window.h>

#include <jni.h>
#include <android/log.h>

#define LOG_TAG "JNI"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)


#define SWS_BICUBIC           4
#define SWS_BILINEAR           2

/**
 * com.ihubin.ffmpegstudy.MainActivity.avformatinfo()
 * AVFormat Support Information
 */
JNIEXPORT jstring Java_com_example_im_myapplication_MyNdk_avformatinfo(JNIEnv *env, jobject obj){

    char info[40000] = { 0 };

    av_register_all();

    AVInputFormat *if_temp = av_iformat_next(NULL);
    AVOutputFormat *of_temp = av_oformat_next(NULL);
    //Input
    while(if_temp!=NULL){
        sprintf(info, "%s[In ][%10s]\n", info, if_temp->name);
        if_temp=if_temp->next;
    }
    //Output
    while (of_temp != NULL){
        sprintf(info, "%s[Out][%10s]\n", info, of_temp->name);
        of_temp = of_temp->next;
    }
    //LOGE("%s", info);
    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.avcodecinfo()
 * AVCodec Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_myapplication_MyNdk_avcodecinfo(JNIEnv *env, jobject obj)
{
    char info[40000] = { 0 };

    av_register_all();

    AVCodec *c_temp = av_codec_next(NULL);

    while(c_temp!=NULL){
        if (c_temp->decode!=NULL){
            sprintf(info, "%s[Dec]", info);
        }
        else{
            sprintf(info, "%s[Enc]", info);
        }
        switch (c_temp->type){
        case AVMEDIA_TYPE_VIDEO:
            sprintf(info, "%s[Video]", info);
            break;
        case AVMEDIA_TYPE_AUDIO:
            sprintf(info, "%s[Audio]", info);
            break;
        default:
            sprintf(info, "%s[Other]", info);
            break;
        }
        sprintf(info, "%s[%10s]\n", info, c_temp->name);


        c_temp=c_temp->next;
    }
    //LOGE("%s", info);

    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.avfilterinfo()
 * AVFilter Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_myapplication_MyNdk_avfilterinfo(JNIEnv *env, jobject obj)
{
    char info[40000] = { 0 };
    avfilter_register_all();
    AVFilter *f_temp = (AVFilter *)avfilter_next(NULL);
    int i = 0;
    while (f_temp != NULL){
        sprintf(info, "%s[%10s]\n", info, f_temp->name);
        f_temp = f_temp->next;
    }

    return (*env)->NewStringUTF(env, info);
}

/**
 * com.ihubin.ffmpegstudy.MainActivity.urlprotocolinfo()
 * Protocol Support Information
 */
JNIEXPORT jstring JNICALL Java_com_example_im_ffmpegtest3_MainActivity_configurationinfo(JNIEnv *env, jobject obj)
{
    char info[10000] = {0};
    av_register_all();

    sprintf(info, "%s\n", avcodec_configuration());

    //LOGE("%s", info);
    return (*env)->NewStringUTF(env, info);
}

//2020-12-24wpp
static AVPacket *vPacket;
static AVFrame *vFrame, *pFrameRGBA;
static AVCodecContext *vCodecCtx;
struct SwsContext *img_convert_ctx;
static AVFormatContext *pFormatCtx;
ANativeWindow* nativeWindow;
ANativeWindow_Buffer windowBuffer;
uint8_t *v_out_buffer;

JNIEXPORT void JNICALL Java_com_example_im_myapplication_MyNdk_play(JNIEnv *env, jclass clazz, jstring url, jobject surface) {
    //char input_str[500]={0};

    //sprintf(input_str, "%s", (*env)->GetStringUTFChars(env,url,NULL));
   // LOGI("%s",input_str);
      char file_name[500]={0};
      //读取输入的视频频文件地址
      sprintf(file_name, "%s", (*env)->GetStringUTFChars(env,url, NULL));
      //LOGI("%s",file_name);
      // char * file_name = "/storage/emulated/999/tencent/MicroMsg/WeiXin/mmexport1588601045658.mp4";
       av_register_all();

        AVFormatContext * pFormatCtx = avformat_alloc_context();

        //AVDictionary* options = NULL;
        //av_dict_set(&options, "rtsp_transport", "udp", 0);

        // Open video file
        if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {

        LOGE("Couldn't open file:%s\n", file_name);
        return; // Couldn't open file
        }

        // Retrieve stream information
        if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
        LOGE("Couldn't find stream information.");
        return;
        }
        // Find the first video stream
        int videoStream = -1,i;
        for (i = 0; i < pFormatCtx->nb_streams; i++) {
         if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
            && videoStream < 0) {
             videoStream = i;
         }
        }
        if(videoStream==-1) {
         LOGE("Didn't find a video stream.");
         return; // Didn't find a video stream
        }

        // Get a pointer to the codec context for the video stream
        AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;

        // Find the decoder for the video stream
        AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        if(pCodec==NULL) {
         LOGE("Codec not found.");
         return; // Codec not found
        }
        if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
         LOGE("Could not open codec.");
         return; // Could not open codec
        }
        // 获取native window
        ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
        // 获取视频宽高
        int videoWidth = pCodecCtx->width;
        int videoHeight = pCodecCtx->height;
        // 设置native window的buffer大小,可自动拉伸
        ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
        ANativeWindow_Buffer windowBuffer;

        if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
         LOGE("Could not open codec.");
         return; // Could not open codec
         }
        // Allocate video frame
        AVFrame * pFrame = av_frame_alloc();
        // 用于渲染
        AVFrame * pFrameRGBA = av_frame_alloc();
        if(pFrameRGBA == NULL || pFrame == NULL) {
            LOGE("Could not allocate video frame.");
            return;
        }
        // Determine required buffer size and allocate buffer
            int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
            uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
            av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
                                 pCodecCtx->width, pCodecCtx->height, 1);
        // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
        struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
                             pCodecCtx->height,
                             pCodecCtx->pix_fmt,
                             pCodecCtx->width,
                             pCodecCtx->height,
                             AV_PIX_FMT_RGBA,
                             SWS_BILINEAR,
                             NULL,
                             NULL,
                             NULL);

        int frameFinished;
        AVPacket packet;
        while(av_read_frame(pFormatCtx, &packet)>=0) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // 并不是decode一次就可解码出一帧
            if (frameFinished) {
                // lock native window buffer
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
                // 格式转换
                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);
                // 获取stride
                uint8_t * dst = windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];
                // 由于window的stride和帧的stride不同,因此需要逐行复制
                int h;
                // LOGI("%d",videoHeight);
                for (h = 0; h < videoHeight; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }
                //LOGI("%s","111111111111");
                ANativeWindow_unlockAndPost(nativeWindow);
            }
        }
        av_packet_unref(&packet);
    }
     //内存释放
    ANativeWindow_release(nativeWindow);
    av_free(buffer);
    av_free(pFrameRGBA);
    // Free the YUV frame
    av_free(pFrame);
    // Close the codecs
    avcodec_close(pCodecCtx);
    // Close the video file
    avformat_close_input(&pFormatCtx);
    //return 0;
}
注意点:①include 文件名称别写错了   ② 方法的名字格式不要错了      ③Java_com_example_im_myapplication_MyNdk_play是视频播放的方法,别的方法可以不要

 

6.ndk-build,先进入到jni的目录下

有时候会提示不存在该命令:那么先set path一下   set path=D:\software\android-ndk-r20b\build    然后再ndk-buil(根据自己的地址来)

6.现在只要在界面上显示出来就可以了

7.运行成功:

研究了好久,终于研究出来了,开心,为自己鼓个掌 

 

评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值