android 使用ffmpeg音视频播放(一)

从这篇开始进入深水期,编译C/C++库,配NDK,编写native,编译APP,真累,全是坑。这篇的主要内容是讲ffmpeg解码视频文件并在android上显示播放。

编译ffmpeg

参照FFmpeg的Android平台移植—编译篇,编译需要注意的地方是版本、脚本的编写、对库的裁剪。

编写Android代码

使用surfaceview播放本地视频,调用代码mplay(),在子线程播放。

player = new YoungPlayer();

public void mPlay(View btn){
        String video = sp_video.getSelectedItem().toString();
        final String input = new File(Environment.getExternalStorageDirectory(),video).getAbsolutePath();
        //Surface传入到Native函数中,用于绘制
        final Surface surface = videoView.getHolder().getSurface();
        new Thread(new Runnable() {
            public void run() {
                player.render(input,surface);
            }
        }).start();
    }

需要传入的videoview自定义如下:

public class VideoView extends SurfaceView {

    public VideoView(Context context) {
        super(context);
        init();
    }

    public VideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init();
    }

    public VideoView(Context context, AttributeSet attrs, int defStyle) {
        super(context, attrs, defStyle);
        init();
    }

    private void init(){
        //初始化,SufaceView绘制的像素格式
        SurfaceHolder holder = getHolder();
        holder.setFormat(PixelFormat.RGBA_8888);
    }
}

过一个代理类来执行相关的操作:

public class YoungPlayer {

    public native void render(String input,Surface surface);

    public native void sound(String input,String output);

    public native void play(String input,Surface surface);

    static{
        System.loadLibrary("avutil-54");
        System.loadLibrary("swresample-1");
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avformat-56");
        System.loadLibrary("swscale-3");
        System.loadLibrary("postproc-53");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("avdevice-56");
        System.loadLibrary("yuv");
        System.loadLibrary("myffmpeg");
    }
}

所以,只需要编写代理类YoungPlayer里面的native方法,通过javah命令生成头文件,不同JDK版本,javah命令格式不一样,也可以按jni语法直接手写头文件com_yang_ffmpegDemo_YoungPlayer.h。

编写C/C++实现文件解码播放

编写yang_video_player.c,按步骤解码视频文件(请忽略LOG >_<),解码遵循的步骤一般如下图:
这里写图片描述

#include "com_yang_ffmpegDemo_YoungPlayer.h"
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"yang",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"yang",FORMAT,##__VA_ARGS__);
#include "libyuv.h"

//封装格式
#include "libavformat/avformat.h"
//解码
#include "libavcodec/avcodec.h"
//缩放
#include "libswscale/swscale.h"



JNIEXPORT void JNICALL Java_com_td_youngplayer_YoungPlayer_render
(JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface){
    const char* input_cstr = (*env)->GetStringUTFChars(env,input_jstr,NULL);
    //1.注册组件
    av_register_all();

    //封装格式上下文
    AVFormatContext *pFormatCtx = avformat_alloc_context();

    //2.打开输入视频文件
    if(avformat_open_input(&pFormatCtx,input_cstr,NULL,NULL) != 0){
        LOGE("%s","打开输入视频文件失败");
        return;
    }
    //3.获取视频信息
    if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
        LOGE("%s","获取视频信息失败");
        return;
    }

    int64_t duration = pFormatCtx->duration;
    int bit_rate = pFormatCtx->bit_rate;
    LOGI("视频时长%llu:%llu",duration/1000000/60,duration/1000000%60);
    LOGI("比特率%dkps",bit_rate/1000);

    //视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置
    int video_stream_idx = -1;
    int i = 0;
    for(; i < pFormatCtx->nb_streams;i++){
        //根据类型判断,是否是视频流
        if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
            video_stream_idx = i;
            break;
        }
    }
    //4.获取视频解码器
    AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec;
    AVStream *avstream =pFormatCtx->streams[video_stream_idx];
    AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);
    if(pCodec == NULL){
        LOGE("%s","无法解码");
        return;
    }

    //5.打开解码器
    if(avcodec_open2(pCodeCtx,pCodec,NULL) < 0){
        LOGE("%s","解码器无法打开");
        return;
    }

    //编码数据
    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));

    //像素数据(解码数据)
    AVFrame *yuv_frame = av_frame_alloc();
    AVFrame *rgb_frame = av_frame_alloc();

    //native绘制
    //窗体
    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env,surface);
    //绘制时的缓冲区
    ANativeWindow_Buffer outBuffer;
    AVRational framerate = pCodeCtx->framerate;
    AVRational time_base = pCodeCtx->time_base;

    //宽高比
    AVRational  sample_aspect_ratio = pCodeCtx->sample_aspect_ratio;

    //播放器宽高
    int32_t width = ANativeWindow_getWidth(nativeWindow);
    int32_t height = ANativeWindow_getHeight(nativeWindow);

    int showWidth = pCodeCtx->width;
    int showHeight = pCodeCtx->height;

    int a  = sample_aspect_ratio.num;
    int b  = sample_aspect_ratio.den;



//  if(width<height*showWidth/showHeight){
//      showHeight=width*showHeight/showWidth;
//  }else{
//      showHeight = height;
//  }
//  showHeight = 300;
//  showWidth = width;
//  showWidth = 640;
//  showHeight = 360;
    LOGI("showWidth%d",showWidth);
    LOGI("showHeight%d",showHeight);



    int len ,got_frame, framecount = 0;
    //6.一阵一阵读取压缩的视频数据AVPacket
    while(av_read_frame(pFormatCtx,packet) >= 0){
        if(packet->stream_index == video_stream_idx){
            //解码AVPacket->AVFrame
            len = avcodec_decode_video2(pCodeCtx, yuv_frame, &got_frame, packet);

            //Zero if no frame could be decompressed
            //非零,正在解码
            if(got_frame){
                LOGI("packet大小%hhu",packet->data);
                LOGI("packet显示时间戳%llu",packet->pts);
                LOGI("width%d",yuv_frame->width);
                LOGI("height%d",yuv_frame->height);
                LOGI("控件width%d",width);
                LOGI("控件height%d",height);


                LOGI("packet显示时间%llu",packet->pts*1000*(avstream->time_base.num)/(avstream->time_base.den));


                LOGI("avstream->time_base.den%d",avstream->time_base.den);
                LOGI("avstream->time_base.num%d",avstream->time_base.num);

                LOGI("解码后原始数据类型%d",yuv_frame->format);
                LOGI("是否是关键帧%d",yuv_frame->key_frame);
                LOGI("宽高比%d",yuv_frame->sample_aspect_ratio.den);
                LOGI("编码帧序号%d",yuv_frame->coded_picture_number);
                LOGI("显示帧序号%d",yuv_frame->display_picture_number);



                LOGI("packet解码时间戳%llu",packet->dts/1000);


                LOGI("解码%d帧 开始",framecount);
                LOGI("帧数%d",framerate.num);
                LOGI("时间num%d",time_base.num);
                LOGI("时间den%d",time_base.den);
                //lock
                //设置缓冲区的属性(宽、高、像素格式)
                ANativeWindow_setBuffersGeometry(nativeWindow, showWidth, showHeight,WINDOW_FORMAT_RGBA_8888);
                ANativeWindow_lock(nativeWindow,&outBuffer,NULL);
                //设置rgb_frame的属性(像素格式、宽高)和缓冲区
                //rgb_frame缓冲区与outBuffer.bits是同一块内存
                avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, showWidth, showHeight);


                //YUV->RGBA_8888

                I420ToARGB(yuv_frame->data[0],yuv_frame->linesize[0],
                        yuv_frame->data[2],yuv_frame->linesize[2],
                        yuv_frame->data[1],yuv_frame->linesize[1],
                        rgb_frame->data[0], rgb_frame->linesize[0],
                        showWidth,showHeight);

                //unlock
                ANativeWindow_unlockAndPost(nativeWindow);
                LOGI("解码%d帧 结束",framecount++);
                //  usleep(1000 * 10);
            }

        }
        av_free_packet(packet);
    }


    ANativeWindow_release(nativeWindow);
    av_frame_free(&yuv_frame);
    avcodec_close(pCodeCtx);
    avformat_free_context(pFormatCtx);

    (*env)->ReleaseStringUTFChars(env,input_jstr,input_cstr);
}

解码出 yuv_frame 在 nativewindow 播放需要转码成 rgb_frame,转码原来用的是yuvlib的I420ToARGB方法,此方法效率较如下方法慢,可改为:

sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data,
                        pFrame->linesize, 0, pCodecCtx->height,
                        pFrameRGBA->data, pFrameRGBA->linesize);

下面方法省去从rgb_frame内存逐行memcpy到outBuffer的步骤,使用同一块内存空间。

avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, showWidth, showHeight);

再完善mk文件即可,编译发布。
这里写图片描述

demo下载

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值