自学 FFmpeg转码 decode

因为之前在linux上配置了ffmpeg, 其中我们只用到了decodec
我们  zip  android.zip  android  把android文件压缩成android.zip压缩包

 Android包内是生成对应cpu的头文件和so库

新建Android项目
由原来的cmake转为ndk-build
build-gradle
apply plugin: 'com.android.application'

android {
    compileSdkVersion 26
    defaultConfig {
        applicationId "com.boom.do_ffmpeg_player"
        minSdkVersion 15
        targetSdkVersion 24
        versionCode 1
        versionName "1.0"
    }
    buildTypes {
        debug{
            jniDebuggable true
            jniDebuggable = true
        }
        release {
            minifyEnabled false
            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
        }
    }
    externalNativeBuild {
        ndkBuild {
            path file("src/main/jni/Android.mk")
        }
    }
    sourceSets.main {
        jni.srcDirs = []
        jniLibs.srcDir 'src/main/libs'
    }
}

dependencies {
    implementation fileTree(dir: 'libs', include: ['*.jar'])
    implementation 'com.android.support:appcompat-v7:26.1.0'
}
这里我们用的是自己生成的so库
先配置一些需要的东西
1)新建jni,并将从linux上下载下来的Android压缩包中的 include包和lib下的so库考入

2) 新建 VideoUtils  转码
package com.boom.doffmpegplayer;

/**
 * Created by Boom on 2017/12/5.
 */

public class VideoUtils {

    public native  static  void decode(String input, String output);

    static{
        System.loadLibrary("avutil-54");
        System.loadLibrary("swresample-1");
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avformat-56");
        System.loadLibrary("swscale-3");
        System.loadLibrary("postproc-53");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("avdevice-56");
        System.loadLibrary("myffmpeg");
    }
}
3)javah 生成头文件  com_boom_doffmpegplayer_VideoUtils.h
/* DO NOT EDIT THIS FILE - it is machine generated */
#include "jni.h"
/* Header for class com_boom_doffmpegplayer_VideoUtils */

#ifndef _Included_com_boom_doffmpegplayer_VideoUtils
#define _Included_com_boom_doffmpegplayer_VideoUtils
#ifdef __cplusplus
extern "C" {
#endif
/*
 * Class:     com_boom_doffmpegplayer_VideoUtils
 * Method:    decode
 * Signature: (Ljava/lang/String;Ljava/lang/String;)V
 */
JNIEXPORT void JNICALL Java_com_boom_doffmpegplayer_VideoUtils_decode
  (JNIEnv *, jclass, jstring, jstring);

#ifdef __cplusplus
}
#endif
#endif
4)新建do_ffmpeg_player.c实现头文件中的东西
#include "com_boom_doffmpegplayer_VideoUtils.h"
#include <android/log.h>

//编码
#include "include/libavcodec/avcodec.h"
//封装格式处理
#include "include/libavformat/avformat.h"
//像素处理
#include "include/libswscale/swscale.h"

#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__);

JNIEXPORT void JNICALL Java_com_boom_doffmpegplayer_VideoUtils_decode
  (JNIEnv *env, jclass jcls, jstring input_jstr, jstring output_jstr){
	//需要转码的视频文件(输入的视频文件)
		const char* input_cstr = (*env)->GetStringUTFChars(env,input_jstr,NULL);
		const char* output_cstr = (*env)->GetStringUTFChars(env,output_jstr,NULL);

		//1.注册所有组件
		av_register_all();
        
		//封装格式上下文,统领全局的结构体,保存了视频文件封装格式的相关信息
		AVFormatContext *pFormatCtx = avformat_alloc_context();

		//2.打开输入视频文件
		if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0)
		{
			LOGE("%s","无法打开输入视频文件");
			return;
		}

		//3.获取视频文件信息
		if (avformat_find_stream_info(pFormatCtx,NULL) < 0)
		{
			LOGE("%s","无法获取视频文件信息");
			return;
		}

		//获取视频流的索引位置
		//遍历所有类型的流(音频流、视频流、字幕流),找到视频流
		int v_stream_idx = -1;
		int i = 0;
		//number of streams
		for (; i < pFormatCtx->nb_streams; i++)
		{
			//流的类型
			if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
			{
				v_stream_idx = i;
				break;
			}
		}

		if (v_stream_idx == -1)
		{
			LOGE("%s","找不到视频流\n");
			return;
		}

		//只有知道视频的编码方式,才能够根据编码方式去找到解码器
		//获取视频流中的编解码上下文
		AVCodecContext *pCodecCtx = pFormatCtx->streams[v_stream_idx]->codec;
		//4.根据编解码上下文中的编码id查找对应的解码
		AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
		//(迅雷看看,找不到解码器,临时下载一个解码器)
		if (pCodec == NULL)
		{
			LOGE("%s","找不到解码器\n");
			return;
		}

		//5.打开解码器
		if (avcodec_open2(pCodecCtx,pCodec,NULL)<0)
		{
			LOGE("%s","解码器无法打开\n");
			return;
		}

		//输出视频信息
		LOGI("视频的文件格式:%s",pFormatCtx->iformat->name);
		LOGI("视频时长:%d", (pFormatCtx->duration)/1000000);
		LOGI("视频的宽高:%d,%d",pCodecCtx->width,pCodecCtx->height);
		LOGI("解码器的名称:%s",pCodec->name);

		//准备读取
		//AVPacket用于存储一帧一帧的压缩数据(H264)
		//缓冲区,开辟空间
		AVPacket *packet = (AVPacket*)av_malloc(sizeof(AVPacket));

		//AVFrame用于存储解码后的像素数据(YUV)
		//内存分配
		AVFrame *pFrame = av_frame_alloc();
		//YUV420
		AVFrame *pFrameYUV = av_frame_alloc();
		//只有指定了AVFrame的像素格式、画面大小才能真正分配内存
		//缓冲区分配内存
		uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
		//初始化缓冲区
		avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

		//用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
		struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,
			pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
			SWS_BICUBIC, NULL, NULL, NULL);


		int got_picture, ret;

		FILE *fp_yuv = fopen(output_cstr, "wb+");

		int frame_count = 0;

		//6.一帧一帧的读取压缩数据
		while (av_read_frame(pFormatCtx, packet) >= 0)
		{
			//只要视频压缩数据(根据流的索引位置判断)
			if (packet->stream_index == v_stream_idx)
			{
				//7.解码一帧视频压缩数据,得到视频像素数据
				ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
				if (ret < 0)
				{
					LOGE("%s","解码错误");
					return;
				}

				//为0说明解码完成,非0正在解码
				if (got_picture)
				{
					//AVFrame转为像素格式YUV420,宽高
					//2 6输入、输出数据
					//3 7输入、输出画面一行的数据的大小 AVFrame 转换是一行一行转换的
					//4 输入数据第一列要转码的位置 从0开始
					//5 输入画面的高度
					sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
						pFrameYUV->data, pFrameYUV->linesize);

					//输出到YUV文件
					//AVFrame像素帧写入文件
					//data解码后的图像像素数据(音频采样数据)
					//Y 亮度 UV 色度(压缩了) 人对亮度更加敏感
					//U V 个数是Y的1/4
					int y_size = pCodecCtx->width * pCodecCtx->height;
					fwrite(pFrameYUV->data[0], 1, y_size, fp_yuv);
					fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_yuv);
					fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_yuv);

					frame_count++;
					LOGI("解码第%d帧",frame_count);
				}
			}

			//释放资源
			av_free_packet(packet);
		}

		fclose(fp_yuv);

		(*env)->ReleaseStringUTFChars(env,input_jstr,input_cstr);
		(*env)->ReleaseStringUTFChars(env,output_jstr,output_cstr);

		av_frame_free(&pFrame);

		avcodec_close(pCodecCtx);

		avformat_free_context(pFormatCtx);

}
5)Android.mk
LOCAL_PATH := $(call my-dir)

#ffmpeg lib
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-56.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avdevice
LOCAL_SRC_FILES := libavdevice-56.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-5.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-56.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-54.so
include $(PREBUILT_SHARED_LIBRARY)


include $(CLEAR_VARS)
LOCAL_MODULE := postproc
LOCAL_SRC_FILES := libpostproc-53.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-1.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-3.so
include $(PREBUILT_SHARED_LIBRARY)

#myapp
include $(CLEAR_VARS)
LOCAL_MODULE := myffmpeg
LOCAL_SRC_FILES := do_ffmpeg_player.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog
LOCAL_SHARED_LIBRARIES := avcodec avdevice avfilter avformat avutil postproc swresample swscale
include $(BUILD_SHARED_LIBRARY)
6)Application.mk
APP_ABI := all
APP_PLATFORM := android-8
APP_OPIM :=debug
7)MainActivity调试
package com.boom.doffmpegplayer;

import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;

import java.io.File;

public class MainActivity extends Activity {
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
    }
    public void AvDecode2Yuv(View btn){
        new Thread(new Runnable() {
            @Override
            public void run() {
                final String input = new File(Environment.getExternalStorageDirectory(),"cuc_ieschool.mp4").getAbsolutePath();
                final String output = new File(Environment.getExternalStorageDirectory(),"output.yuv").getAbsolutePath();
                VideoUtils.decode(input, output);
            }
        }).start();
    }
}
8)效果

9)在配置文件添加权限
   <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS" />


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值