4. Android工程中使用FFmpeg的so库 -- JNI头文件定义方法的实现

白话:
闲话不多说,紧接3文中,继续介绍。

4.1 JNI头文件定义方法的实现

1. 将3文中生成com_rk_myapp_MainActivity.h文件拷贝到Android工程的jni目录下,在jni目录下创建com_rk_myapp_MainActivity.c文件,实现相关接口,具体实现细节可以参考代码中的注释:

#include <jni.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <libavutil/log.h>

#include <android/log.h>
#include <android/bitmap.h>

#define LOG_TAG "FFMpeg_JNI"
#define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);

AVFormatContext *pFormatCtx;
int i, videoStream;
AVCodecContext *pCodecCtxOrig;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVFrame *pFrameRGB;
int numBytes;
uint8_t *buffer;
AVPacket packet;
int frameFinished;
struct SwsContext *sws_ctx;
char *uri;
jobject bitmapObj;

void dump_metadata_for_andriod(void *ctx, AVDictionary *m, const char *indent)
{
  if (m && !(av_dict_count(m) == 1 && av_dict_get(m, "language", NULL, 0))) {
  AVDictionaryEntry *tag = NULL;

  LOGI("%sMetadata:\n", indent);
  while ((tag = av_dict_get(m, "", tag, AV_DICT_IGNORE_SUFFIX)))
  if (strcmp("language", tag->key)) {
  const char *p = tag->value;
  LOGI("%s %-16s: ", indent, tag->key);
  while (*p) {
  char tmp[256];
  size_t len = strcspn(p, "\x8\xa\xb\xc\xd");
  av_strlcpy(tmp, p, FFMIN(sizeof(tmp), len+1));
  av_log(ctx, AV_LOG_INFO, "%s", tmp);
  p += len;
  if (*p == 0xd) av_log(ctx, AV_LOG_INFO, " ");
  if (*p == 0xa) av_log(ctx, AV_LOG_INFO, "\n%s %-16s: ", indent, "");
  if (*p) p++;
  }
  LOGI("\n");
  }
  }
}

void av_dump_format_for_android(AVFormatContext *ic, int index,
  const char *url, int is_output)
{
  int i;
  uint8_t *printed = ic->nb_streams ? av_mallocz(ic->nb_streams) : NULL;
  if (ic->nb_streams && !printed)
  return;

  LOGI("%s #%d, %s, %s '%s':\n",
  is_output ? "Output" : "Input",
  index,
  is_output ? ic->oformat->name : ic->iformat->name,
  is_output ? "to" : "from", url);
  dump_metadata_for_andriod(NULL, ic->metadata, " ");

  if (!is_output) {
  LOGI(" Duration: ");
  if (ic->duration != AV_NOPTS_VALUE) {
  int hours, mins, secs, us;
  int64_t duration = ic->duration + (ic->duration <= INT64_MAX - 5000 ? 5000 : 0);
  secs = duration / AV_TIME_BASE;
  us = duration % AV_TIME_BASE;
  mins = secs / 60;
  secs %= 60;
  hours = mins / 60;
  mins %= 60;
  LOGI("%02d:%02d:%02d.%02d", hours, mins, secs,
  (100 * us) / AV_TIME_BASE);
  } else {
  LOGI("N/A");
  }
  if (ic->start_time != AV_NOPTS_VALUE) {
  int secs, us;
  LOGI(", start: ");
  secs = llabs(ic->start_time / AV_TIME_BASE);
  us = llabs(ic->start_time % AV_TIME_BASE);
  LOGI("%s%d.%06d",
  ic->start_time >= 0 ? "" : "-",
  secs,
  (int) av_rescale(us, 1000000, AV_TIME_BASE));
  }
  LOGI(", bitrate: ");
  if (ic->bit_rate) {
  LOGI("%"PRId64" kb/s", (int64_t)ic->bit_rate / 1000);
  } else {
  LOGI("N/A");
  }
  LOGI("\n");
  }
}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativeInit
 * Signature: ()V
 */
JNIEXPORT void JNICALL Java_com_rk_myapp_MainActivity_nativeInit(JNIEnv *env, jobject obj)
{
  pFormatCtx = NULL;
  i = 0;
  videoStream = -1;
  pCodecCtxOrig = NULL;
  pCodecCtx = NULL;
  pCodec = NULL;
  pFrame = NULL;
  pFrameRGB = NULL;
  numBytes = 0;
  buffer = NULL;
  frameFinished = 0;
  sws_ctx = NULL;
  uri = NULL;
  //bitmapObj = NULL;
  LOGI("nativeInit()");
  av_register_all();
}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativeSetUri
 * Signature: (Ljava/lang/String;)V
 */
JNIEXPORT void JNICALL Java_com_rk_myapp_MainActivity_nativeSetUri(JNIEnv *env, jobject obj, jstring filePath)
{
  uri = (*env)->GetStringUTFChars(env, filePath, JNI_FALSE);
  LOGI("nativeSetUri(), uri = %s", uri);
}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativeGetFrameBitmap
 * Signature: ()Landroid/graphics/Bitmap;
 */
JNIEXPORT jobject JNICALL Java_com_rk_myapp_MainActivity_nativeGetFrameBitmap(JNIEnv *env, jobject obj)
{
  LOGI("nativeGetFrameBitmap()");
  return bitmapObj;
}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativePrepare
 * Signature: ()V
 */
JNIEXPORT void JNICALL Java_com_rk_myapp_MainActivity_nativePrepare(JNIEnv *env, jobject obj)
{
  LOGI("nativePrepare()");
  // Open video file.
  if (avformat_open_input(&pFormatCtx, uri, NULL, NULL) != 0)
  {
  LOGI("nativePrepare(), Couldn't open file.");
  return; // Couldn't open file.
  }

  // Retrieve stream information.
  if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
  {
  LOGI("nativePrepare(), Couldn't find stream information.");
  return; // Couldn't find stream information.
  }

  // Dump information about file onto standard error.
  av_dump_format_for_android(pFormatCtx, 0, uri, 0);

  // Find the first video stream.
  for (i = 0; i < pFormatCtx->nb_streams; i++)
  {
  if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
  {
  videoStream = i;
  break;
  }
  }
  if (videoStream == -1)
  {
  LOGI("nativePrepare(), Didn't find a video stream.");
  return; // Didn't find a video stream.
  }

  LOGI("nativePrepare(), find all stream num = %d", pFormatCtx->nb_streams);
  // Get a pointer to the codec context for the video stream.
  pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
  // Find the decoder for the video stream.
  pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
  if (pCodec == NULL)
  {
  LOGI("nativePrepare(), Unsupported codec!");
  return; // Codec not found.
  }

  // Copy context.
  pCodecCtx = avcodec_alloc_context3(pCodec);
  if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0)
  {
  LOGI("nativePrepare(), Couldn't copy codec context");
  return; // Error copying codec context.
  }

  // Open codec.
  if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
  {
  LOGI("nativePrepare(), Could not open codec.");
  return; // Could not open codec.
  }

  // Allocate video frame.
  pFrame = av_frame_alloc();

  // Allocate an AVFrame structure.
  pFrameRGB = av_frame_alloc();
  if (pFrameRGB == NULL)
  {
  LOGI("nativePrepare(), Could not alloc for pFrameRGB.");
  return;
  }

  // Determine required buffer size and allocate buffer.
  numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
  buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));

  // Assign appropriate parts of buffer to image planes in pFrameRGB
  //Note that pFrameRGB is an AVFrame, but AVFrame is a superset of AVPicture
  //avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); // Deprecated.
  av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);

  // Initialize SWS context for software scaling.
  sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
}

void createBitmap_for_android(JNIEnv *env, AVFrame *pFrame, int width, int height)
{
  LOGI("createBitmap_for_android(), bitmap width = %d, height = %d", width, height);
  /*
  jclass bitmapConfig = (*env)->FindClass(env, "android/graphics/Bitmap$Config");
  jfieldID argb8888FieldID = (*env)->GetStaticFieldID(env, bitmapConfig, "ARGB_8888", "Landroid/graphics/Bitmap$Config;");
  jobject argb8888Obj = (*env)->GetStaticObjectField(env, bitmapConfig, argb8888FieldID);
  */
  // create bitmap config
  // NOTE: c++, use the format " env->FindClass("android/graphics/Bitmap$Config") "
  // c, use the format " (*env)->FindClass(env, "android/graphics/Bitmap$Config") "
  jclass bitmapConfig = (*env)->FindClass(env, "android/graphics/Bitmap$Config");
  jmethodID argbGetValueMethodID = (*env)->GetStaticMethodID(env, bitmapConfig, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
  jobject argbObj = (*env)->CallStaticObjectMethod(env, bitmapConfig, argbGetValueMethodID, (*env)->NewStringUTF(env, "ARGB_8888"));

  LOGI("test 1");
  // create bitmap
  jclass bitmapClass = (*env)->FindClass(env, "android/graphics/Bitmap");
  jmethodID createBitmapMethodID = (*env)->GetStaticMethodID(env, bitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
  jmethodID copyByteMethodID = (*env)->GetMethodID(env, bitmapClass, "copyPixelsFromBuffer","(Ljava/nio/Buffer;)V");

  LOGI("test 2");
  // NOTE: don't leave the "env" arg
  // Althought there is not compiling error, the code will run error, if leave out the "env" arg.
  jobject bitmapObjLocal = (*env)->CallStaticObjectMethod(env, bitmapClass, createBitmapMethodID, width, height, argbObj);

  LOGI("test 3");
  // Note: there use global reference, otherwise we not get the local object in JAVA layer and
  // there will be a "JNI DETECTED ERROR IN APPLICATION: use of deleted .." error.
  bitmapObj = (*env)->NewGlobalRef(env, bitmapObjLocal);

  jclass byteBufferClass = (*env)->FindClass(env, "java/nio/ByteBuffer");
  jmethodID wrapBufferMethodID = (*env)->GetStaticMethodID(env, byteBufferClass, "wrap", "([B)Ljava/nio/ByteBuffer;");

  LOGI("test 4");
  //image data to byte array
  jbyteArray array = (*env)->NewByteArray(env, width * height * 4);
  (*env)->SetByteArrayRegion(env, array, 0, width * height * 4, (jbyte *)(pFrame->data[0]));

  jobject byteBufferObj = (*env)->CallStaticObjectMethod(env, byteBufferClass, wrapBufferMethodID, array);

  LOGI("test 5");
  //NOTE: because method "copyPixelsFromBuffer" return void, so call "CallVoidMethod".
  //(*env)->CallObjectMethod(env, bitmapObj, copyByteMethodID, byteBufferObj);
  (*env)->CallVoidMethod(env, bitmapObj, copyByteMethodID, byteBufferObj);
  LOGI("test 6");

}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativeStart
 * Signature: ()V
 */
JNIEXPORT void JNICALL Java_com_rk_myapp_MainActivity_nativeStart(JNIEnv *env, jobject obj)
{
  LOGI("nativeStart()");
  // Read frames and save first five frames to disk.
  i = 0;
  int frame = 10;
  while (av_read_frame(pFormatCtx, &packet) >= 0)
  {
  LOGI("test 7");
  // Is this a packet from the video stream?
  if (packet.stream_index == videoStream)
  {
  // Decode video frame
  avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

  // Did we get a video frame?
  if (frameFinished)
  {
  // Convert the image from its native format to RGB.
  sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);

  // Save the frame to disk.
  if (++i == frame)
  {
  //TODO: save the frame.
  createBitmap_for_android(env, pFrameRGB, pCodecCtx->width, pCodecCtx->height);
  av_packet_unref(&packet);
  break;
  }
  else
  {
  frameFinished = 0;
  }
  }
  }

  // Free the packet that was allocated by av_read_frame.
  //av_free_packet(&packet); // Deprecated.
  LOGI("test 8");
  av_packet_unref(&packet);
  LOGI("test 9");
  }
}

/*
 * Class: com_rk_myapp_MainActivity
 * Method: nativeDeinit
 * Signature: ()V
 */
JNIEXPORT void JNICALL Java_com_rk_myapp_MainActivity_nativeDeinit(JNIEnv *env, jobject obj)
{
  LOGI("nativeDeinit()");
  // Free the RGB image.
  av_free(buffer);
  av_frame_free(&pFrameRGB);

  // Free the YUV frame.
  av_frame_free(&pFrame);

  // Close the codecs.
  avcodec_close(pCodecCtx);
  avcodec_close(pCodecCtxOrig);

  // Close the video file.
  avformat_close_input(&pFormatCtx);
}

2. jni目录下创建Android.mk和Application.mk文件

Android.mk:

LOCAL_PATH := $(call my-dir)

# FFmpeg library
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := lib/libavcodec-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := lib/libavfilter-6.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := lib/libavformat-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := lib/libavutil-55.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := lib/libswresample-2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := lib/libswscale-4.so
include $(PREBUILT_SHARED_LIBRARY)

# Program
include $(CLEAR_VARS)
LOCAL_MODULE := FFMpegJni
LOCAL_SRC_FILES := com_rk_myapp_MainActivity.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz
LOCAL_SHARED_LIBRARIES := avcodec avfilter avformat avutil swresample swscale
include $(BUILD_SHARED_LIBRARY)

Application.mk

APP_ABI=armeabi


4.2 使用NDK编译native源码

1. 配置编译工具目录,在".bashrc"文件中最后一行为PATH变量添加NDK路径

export ANDROID_NATIVE=~/Android/android-ndk-r14b
export PATH=$PATH:$ANDROID_NATIVE


2.  在Android工程目录下运行命令编译native源码

ndk-build


3. 在Android工程下libs/armeabi目录下生成相应so库文件



4. 在Android工程下运行命令,生成apk,所生成的apk在Android工程的bin目录下,我的apk为MyFirstApp-debug.apk

ant debug



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值