ffmpeg原生绘制视频

9 篇文章 0 订阅

MainActivity

<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".MainActivity">

    <com.ican.ffmpegdemo1.VideoView
        android:id="@+id/surface"
        android:layout_width="match_parent"
        android:layout_height="match_parent" />
    <LinearLayout
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:orientation="horizontal">
        <Spinner
            android:id="@+id/sp_video"
            android:layout_width="wrap_content"
            android:layout_height="wrap_content"/>
        <Button
            android:layout_width="wrap_content"
            android:layout_height="wrap_content"
            android:text="开始"
            android:onClick="mPlay"/>
    </LinearLayout>

</RelativeLayout>
public class MainActivity extends AppCompatActivity {
    VideoView videoView;
    Spinner sp_video;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        videoView = findViewById(R.id.surface);
        sp_video = findViewById(R.id.sp_video);
        String[] videoArray  = getResources().getStringArray(R.array.video_list);
        ArrayAdapter<String> adapter = new ArrayAdapter<String>(this,android.R.layout.simple_list_item_1,android.R.id.text1,videoArray);
        sp_video.setAdapter(adapter);
    }

    public void mPlay(View view) {
        String video = sp_video.getSelectedItem().toString();
        String input = new File(Environment.getExternalStorageDirectory(),video).getAbsolutePath();
        videoView.player(input);
    }

}

VideoView

public class VideoView extends SurfaceView {
    static{
        System.loadLibrary("avcodec-56");
        System.loadLibrary("avdevice-56");
        System.loadLibrary("avfilter-5");
        System.loadLibrary("avformat-56");
        System.loadLibrary("avutil-54");
        System.loadLibrary("postproc-53");
        System.loadLibrary("swresample-1");
        System.loadLibrary("swscale-3");
        System.loadLibrary("native-lib");
    }
    public VideoView(Context context) {
        super(context);
    }

    public VideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init();
    }

    public VideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public VideoView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
        super(context, attrs, defStyleAttr, defStyleRes);
    }

    private void init(){
        SurfaceHolder holder = getHolder();
        holder.setFormat(PixelFormat.RGBA_8888);
    }

    public void player(final String input){
        new Thread(new Runnable() {
            @Override
            public void run() {
                render(input,VideoView.this.getHolder().getSurface());
            }
        }).start();
    }

    public native void render(String input, Surface surface);

}

native-lib.cpp

#include <jni.h>
#include <string>
#include <android/log.h>
extern "C"{
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
//像素处理
#include "libswscale/swscale.h"
#include <android/native_window_jni.h>
#include <unistd.h>
}
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"twy",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"twy",FORMAT,##__VA_ARGS__);
extern "C" JNIEXPORT jstring

JNICALL
Java_com_ican_ffmpegdemo1_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    av_register_all();
    return env->NewStringUTF(hello.c_str());
}
extern "C"

JNIEXPORT void JNICALL
Java_com_ican_ffmpegdemo1_VideoView_render(JNIEnv *env, jobject instance, jstring input_,
                                           jobject surface) {
    const char *input = env->GetStringUTFChars(input_,false);
    av_register_all();

    AVFormatContext *pFormatCtx = avformat_alloc_context();
    //第四个参数是 可以传一个 字典   是一个入参出参对象
    if (avformat_open_input(&pFormatCtx, input, NULL, NULL) != 0) {
        LOGE("%s","打开输入视频文件失败");
    }
    //3.获取视频信息
    if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
        LOGE("%s","获取视频信息失败");
        return;
    }


    int vidio_stream_idx=-1;
    int i=0;
    for (int i = 0; i < pFormatCtx->nb_streams; ++i) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            LOGE("  找到视频id %d", pFormatCtx->streams[i]->codec->codec_type);
            vidio_stream_idx=i;
            break;
        }
    }

//    获取视频编解码器
    AVCodecContext *pCodecCtx=pFormatCtx->streams[vidio_stream_idx]->codec;
    LOGE("获取视频编码器上下文 %p  ",pCodecCtx);
//    加密的用不了
    AVCodec *pCodex = avcodec_find_decoder(pCodecCtx->codec_id);
    LOGE("获取视频编码 %p",pCodex);
//版本升级了
    if (avcodec_open2(pCodecCtx, pCodex, NULL)<0) {


    }
    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//    av_init_packet(packet);
//    像素数据
    AVFrame *frame;
    frame = av_frame_alloc();
//    RGB
    AVFrame *rgb_frame = av_frame_alloc();
//    给缓冲区分配内存
    //只有指定了AVFrame的像素格式、画面大小才能真正分配内存
    //缓冲区分配内存
    uint8_t   *out_buffer= (uint8_t *) av_malloc(avpicture_get_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height));
    LOGE("宽  %d,  高  %d  ",pCodecCtx->width,pCodecCtx->height);
//设置yuvFrame的缓冲区,像素格式
    int re= avpicture_fill((AVPicture *) rgb_frame, out_buffer, AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height);
    LOGE("申请内存%d   ",re);

//    输出需要改变
    int length=0;
    int got_frame;
//    输出文件
    int frameCount=0;
    SwsContext *swsContext = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,
                                            pCodecCtx->width,pCodecCtx->height,AV_PIX_FMT_RGBA,SWS_BICUBIC,NULL,NULL,NULL
    );
    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
//    视频缓冲区
    ANativeWindow_Buffer outBuffer;
//    ANativeWindow
    while (av_read_frame(pFormatCtx, packet)>=0) {
//        AvFrame
        if (packet->stream_index == vidio_stream_idx) {
            length = avcodec_decode_video2(pCodecCtx, frame, &got_frame, packet);
            LOGE(" 获得长度   %d ", length);

//非零   正在解码
            if (got_frame) {
//            绘制之前   配置一些信息  比如宽高   格式
                ANativeWindow_setBuffersGeometry(nativeWindow, pCodecCtx->width, pCodecCtx->height,
                                                 WINDOW_FORMAT_RGBA_8888);
//            绘制
                ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
//     h 264   ----yuv          RGBA
                LOGI("解码%d帧",frameCount++);
                //转为指定的格式
                sws_scale(swsContext, (const uint8_t *const *) frame->data, frame->linesize, 0
                        , pCodecCtx->height, rgb_frame->data,
                          rgb_frame->linesize);
//rgb_frame是有画面数据
                uint8_t *dst= (uint8_t *) outBuffer.bits;
//            拿到一行有多少个字节 RGBA
                int destStride=outBuffer.stride*4;
//像素数据的首地址
                uint8_t * src= (uint8_t *) rgb_frame->data[0];
//            实际内存一行数量
                int srcStride = rgb_frame->linesize[0];
                int i=0;
                for (int i = 0; i < pCodecCtx->height; ++i) {
//                memcpy(void *dest, const void *src, size_t n)
                    //copy的东西直接到屏幕上
                    memcpy(dst + i * destStride,  src + i * srcStride, srcStride);
                }
//
                ANativeWindow_unlockAndPost(nativeWindow);
                usleep(1000 * 16);
            }
        }
        av_free_packet(packet);
    }
    ANativeWindow_release(nativeWindow);
    av_frame_free(&frame);
    avcodec_close(pCodecCtx);
    avformat_free_context(pFormatCtx);
    env->ReleaseStringUTFChars(input_, input);
}

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值