ffmpeg 编译安装 和SDL简单的视频播放器实现

ffmpeg的安装

1、源码获取

首先从下面地址先获取ffmpeg最新版本源码,如果没有git命令,则需要先安装git

apt-get install git

git clone https://git.ffmpeg.org/ffmpeg.git

源码下载完成后,进入ffmpeg目录,执行如下脚本 ./mk.sh

#!/bin/bash

./configure \
        --prefix=/usr/local/ffmpeg \
        --enable-gpl   \
        --enable-nonfree  \
        --enable-libfdk-aac \
        --enable-libx264 \
        --enable-libx265 \
        --enable-filter=delogo \
        --enable-debug \
        --disable-optimizations \
        --enable-libspeex \
        --enable-shared \
        --enable-pthreads \
        --enable-version3 \
        --enable-hardcoded-tables \
        --cc=gcc \
        --host-cflags= \
        --host-ldflags= \

#--enable-videotoolbox \

可以看到会报如下错误:

安装nasm

apt-get install nasm,重新执行./mk.sh

出现如下报错

安装fdk_aac

1、下载源码:git clone https://git.code.sf.net/p/opencore-amr/fdk-aac opencore-amr-fdk-aac

2、源码下载完成后,进入opencore-amr-fdk-aac 目录,执行 ./autogen.sh

如下报错:

./autogen.sh: 2: ./autogen.sh: autoreconf: not found

3、安装 autoconf

apt-get install autoconf

4、重新执行 ./autogen.sh

5、安装 apt install libtool-bin 重新执行 目录下又生成Makefile.in文件即可

6、执行./configure --prefix=/usr/local/fdk_aac 进行编译前的配置 这一步将会生成Makefile文件

7、make ;make install 等待安装完成

8、在 ~/.bashrc 文件的最末尾加入下面两句 然后source ~/.bashrc

export LD_LIBRARY_PATH="/usr/local/fdk_aac/lib:$LD_LIBRARY_PATH" export PKG_CONFIG_PATH="/usr/local/fdk_aac/lib/pkgconfig:$PKG_CONFIG_PATH"

9、重新执行./mk.sh 出现如下报错

安装 speex库

1、源码下载

链接:https://pan.baidu.com/s/1260DTRhQMrxcdpxcBor0iA

提取码:195x

2、下载完后解压,进入speex-1.2.0 目录

3、编译安装

./configure --prefix=/usr/local/speex

make ;make install

4、同上面步骤8 在.bashrc 文件添加库路径和pkg_config路径如下:

5、重新执行./mk.sh 出现如下报错

安装x264 库

1、源码下载

git clone GitHub - mirror/x264: x264 Git mirror

2、下载完后进入目录执行 编译安装

./configure --prefix=/usr/local/x264 --enable-shared --disable-asm

make;make install

3、安装完成后 同样在.bashrc中添加路径并source

4、重新执行./mk.sh 出现如下报错

安装x265

1、源码下载:

hg clone x265: log

需要安装 hg cmake ccmake

apt install mercurial

apt-get install cmake

apt install cmake-curses-gui

2、下载完成后进入目录编译安装

进入目录x265/build/linux 执行 ./make-Makefiles.bash

会弹出一个弹窗,按方向键到第三行 将/usr/local 改为/usr/local/x265 按回车

按 c  g 退出

make ;make install

3、安装完成后,同样在.bashrc中添加路径并source

至此,执行./mk.sh将可以正常完成配置,但是make不会生成ffplay 由于缺少SDL库

安装SDL库

1、在安装SDL库之前 先安装一些SDL依赖的库

apt-get install libx11-dev

apt-get install xorg-dev

apt-get install libasound2-dev

2、安装完成后,开始安装SDL

链接:https://pan.baidu.com/s/16jhkWuBVyH_j2SGqGHpdnQ

提取码:ssxf

3、下载完成后开始安装

./autogen.sh ./configure --prefix=/usr/local/SDL2

make ;make install

4、安装完成后,同样在.bashrc中添加路径并source

5、重新执行./mk.sh 发现ffplay已经有了

6、开始编译ffmepg

make ;makeinstall

7、编译完成后,在.bashrc里面添加

export LD_LIBRARY_PATH="/usr/local/ffmpeg/lib:$LD_LIBRARY_PATH"

export PATH="/usr/local/ffmpeg/bin:$PATH"

测试安装的ffmpeg

ffmpeg -version

ffplay测试 测试文件下载:

链接:https://pan.baidu.com/s/1aO68S5yK2Uu9lvtdqT15eA

提取码:ls0a

ffplay sintel.h264

##注意此步骤一定要在虚拟机执行(不能远程),要不然会报错: No available video device

播放成功!!!!!


二、一个利用ffmpeg播放的例子

源码

此程序参考了雷神的源码,原文链接如下,致敬雷神

最简单的基于FFMPEG+SDL的视频播放器 ver2 (采用SDL2.0)_雷霄骅的博客-CSDN博客_ffmpeg sdl

雷神的那个代码最新的ffmpeg+SDL2编译不通过,这边改了下,代码如下:

/**
 * 最简单的基于FFmpeg的视频播放器 2
 * Simplest FFmpeg Player 2
 *
 * 雷霄骅 Lei Xiaohua
 * leixiaohua1020@126.com
 * 中国传媒大学/数字电视技术
 * Communication University of China / Digital TV Technology
 * http://blog.csdn.net/leixiaohua1020
 *
 * 第2版使用SDL2.0取代了第一版中的SDL1.2
 * Version 2 use SDL 2.0 instead of SDL 1.2 in version 1.
 *
 * 本程序实现了视频文件的解码和显示(支持HEVC,H.264,MPEG2等)。
 * 是最简单的FFmpeg视频解码方面的教程。
 * 通过学习本例子可以了解FFmpeg的解码流程。
 * This software is a simplest video player based on FFmpeg.
 * Suitable for beginner of FFmpeg.
 *
 */



#include <stdio.h>

#define __STDC_CONSTANT_MACROS

#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include "SDL2/SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "SDL2/SDL.h"
#include "libavutil/imgutils.h"
#ifdef __cplusplus
};
#endif
#endif

//Output YUV420P data as a file 
#define OUTPUT_YUV420P 0
#define SHOW_PICTURE 1

int main(int argc, char* argv[])
{
    AVFormatContext *pFormatCtx;
    int             i, videoindex;
    AVCodecParameters  *pCodecCtx_s;
    AVCodecContext  *pCodecCtx = NULL;
    const AVCodec         *pCodec;
    AVFrame *pFrame,*pFrameYUV;
    unsigned char *out_buffer = NULL;
    AVPacket *packet = NULL;
    //int y_size;
    int ret;
    struct SwsContext *img_convert_ctx;

    //char filepath[]="bigbuckbunny_480x272.h265";
    char *filepath = argv[1];
    //SDL---------------------------
    int screen_w=0,screen_h=0;
    SDL_Window *screen; 
    SDL_Renderer* sdlRenderer;
    SDL_Texture* sdlTexture;
    SDL_Rect sdlRect;

    //FILE *fp_yuv;

    //av_register_all();
    avformat_network_init();
    pFormatCtx = avformat_alloc_context();

    if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0)
    {
        printf("Couldn't open input stream.\n");
        return -1;
    }
    if(avformat_find_stream_info(pFormatCtx,NULL)<0)
    {
        printf("Couldn't find stream information.\n");
        return -1;
    }

    videoindex=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++) 
    {
        if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            videoindex=i;
            break;
        }
    }
    if(videoindex==-1)
    {
        printf("Didn't find a video stream.\n");
        return -1;
    }

    pCodecCtx_s=pFormatCtx->streams[videoindex]->codecpar;
    //根据id选择合适的解码器
    pCodec=avcodec_find_decoder(pCodecCtx_s->codec_id);
    if(pCodec==NULL)
    {
        printf("Codec not found.\n");
        return -1;
    }
    // 配置解码器
    pCodecCtx = avcodec_alloc_context3(pCodec);
    //AVStream里面的参数直接复制到AVCodecContext的上下文当中。
    avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar);
    //初始化视音频编解码器的AVCodecContext
    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
    {
        printf("Could not open codec.\n");
        return -1;
    }

    pFrame=av_frame_alloc();
    pFrameYUV=av_frame_alloc();
    out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,  pCodecCtx->width, pCodecCtx->height,1));
    if(out_buffer == NULL)
    {
        printf("av_malloc error\n");
        return -1;
    }
    //对申请的内存进行格式化
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize,out_buffer,
            AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height,1);

    packet=av_packet_alloc();
    //Output Info-----------------------------
    printf("--------------- File Information ----------------\n");
    av_dump_format(pFormatCtx,0,filepath,0);
    printf("-------------------------------------------------\n");
    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt, 
            pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 

#if OUTPUT_YUV420P 
    fp_yuv=fopen("output.yuv","wb+");  
#endif  

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
        printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
        return -1;
    } 

    screen_w = pCodecCtx->width;
    screen_h = pCodecCtx->height;
    printf("w:%d  h:%d\n",screen_w,screen_h);
    //SDL 2.0 Support for multiple windows
    screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
            screen_w, screen_h,
            SDL_WINDOW_OPENGL);

    if(!screen) 
    {  
        printf("SDL: could not create window - exiting:%s\n",SDL_GetError());  
        return -1;
    }

    sdlRenderer = SDL_CreateRenderer(screen, -1, 0);  
    //IYUV: Y + U + V  (3 planes)
    //YV12: Y + V + U  (3 planes)
    sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);  

    sdlRect.x=0;
    sdlRect.y=0;
    sdlRect.w=screen_w;
    sdlRect.h=screen_h;

    //SDL End----------------------
    while(av_read_frame(pFormatCtx, packet)>=0)
    {
        if(packet->stream_index==videoindex)
        {
            ret = avcodec_send_packet(pCodecCtx, packet);
            av_packet_unref(packet);
            if(ret != 0)
            {
                printf("Decode Error.\n");
                continue;
            }
            ret = avcodec_receive_frame(pCodecCtx, pFrame);
            if(ret!=0)
            {
                printf("avcodec_receive_frame failed ! ret = %d\n",ret);
                continue;
            }
            if(SHOW_PICTURE)
            {
                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
                        pFrameYUV->data, pFrameYUV->linesize);

#if OUTPUT_YUV420P
                y_size=pCodecCtx->width*pCodecCtx->height;  
                fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y 
                fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif
                //SDL---------------------------
#if 0
                SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );  
#else
                SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
                        pFrameYUV->data[0], pFrameYUV->linesize[0],
                        pFrameYUV->data[1], pFrameYUV->linesize[1],
                        pFrameYUV->data[2], pFrameYUV->linesize[2]);
#endif  

                SDL_RenderClear( sdlRenderer );  
                SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect);  
                SDL_RenderPresent( sdlRenderer );  
                //SDL End-----------------------
                //Delay 40ms
                SDL_Delay(40);
            }
        }
    }
    //flush decoder
    //FIX: Flush Frames remained in Codec
#if 0
    while (1) 
    {
        ret = avcodec_send_packet(pCodecCtx,packet);
        av_packet_unref(packet);
        if (ret != 0)
        {
            continue;
        }
        if (!SHOW_PICTURE)
            break;
        ret = avcodec_receive_frame(pCodecCtx, pFrame);
        if(ret!=0)
        {
            printf("avcodec_receive_frame failed !");
        }
        sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
                pFrameYUV->data, pFrameYUV->linesize);
#if OUTPUT_YUV420P
        int y_size=pCodecCtx->width*pCodecCtx->height;  
        fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y 
        fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
        fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif
        //SDL---------------------------
        SDL_UpdateTexture( sdlTexture, &sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0] );  
        SDL_RenderClear( sdlRenderer );  
        SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect);  
        SDL_RenderPresent( sdlRenderer );  
        //SDL End-----------------------
        //Delay 40ms
        SDL_Delay(40);
    }
#endif

    sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P 
    fclose(fp_yuv);
#endif 

    SDL_Quit();

    av_frame_free(&pFrameYUV);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    return 0;
}

Makefile

CC       = gcc
CFLAGS   = -Wall -O -g
CXXFLAGS =
INCLUDE  = -I/usr/local/ffmpeg/include -I/usr/local/SDL2/include
TARGET   = play
LIBVAR   = -lavutil -lavformat -lavcodec -lSDL2 -lavfilter -lswscale
LIBPATH  = -L/usr/local/ffmpeg/lib -L/usr/local/SDL2/lib
SRC      = my_play.c

obj=$(patsubst %.c,%.o,$(SRC))

all:$(TARGET)


$(TARGET):$(obj)
	$(CC) $(SRC) -o  $@  $(INCLUDE) $(LIBVAR) $(LIBPATH) $(CFLAGS)

%.o:%.c
	$(CC) $(INCLUDE) $(LIBVAR) $(LIBPATH) $(CFLAGS)  -c $<


clean:
	rm -f *.o
	rm -f $(TARGET)

make编译成功后测试:(一定要进入虚拟机在执行)

./play bigbuckbunny_480x272.h265

播放成功!!!不过有个问题,播放到后面会改变片源的亮度,以及变灰。原因暂时未找到。

2.1 改良版,可空格暂停。

下面的代码解决了上面问题,同时实现了暂停和播放功能:

#include <stdio.h>

#define __STDC_CONSTANT_MACROS

#ifdef __cplusplus
extern "C"
{
#endif
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "SDL2/SDL.h"
#include "libavutil/imgutils.h"
#ifdef __cplusplus
};
#endif

#define OUTPUT_YUV420P 0
#define SHOW_PICTURE 1


#define SFM_REFRESH_EVENT  (SDL_USEREVENT + 1)
 
#define SFM_BREAK_EVENT  (SDL_USEREVENT + 2)

int thread_exit=0;
int thread_pause=0;
 
int sfp_refresh_thread(void *opaque){
    thread_exit=0;
    thread_pause=0;

    while (!thread_exit) 
    {
        if(!thread_pause)
        {
            SDL_Event event;
            event.type = SFM_REFRESH_EVENT;
            SDL_PushEvent(&event);
        }
        SDL_Delay(40);
    }
    thread_exit=0;
    thread_pause=0;
    //Break
    SDL_Event event;
    event.type = SFM_BREAK_EVENT;
    SDL_PushEvent(&event);

    return 0;
}

int main(int argc, char* argv[])
{
    AVFormatContext *pFormatCtx;
    int             i, videoindex;
    AVCodecParameters  *pCodecCtx_s;
    AVCodecContext  *pCodecCtx = NULL;
    const AVCodec         *pCodec;
    AVFrame *pFrame,*pFrameYUV;
    unsigned char *out_buffer = NULL;
    AVPacket *packet = NULL;
    //int y_size;
    int ret;
    struct SwsContext *img_convert_ctx;

    //char filepath[]="bigbuckbunny_480x272.h265";
    char *filepath = argv[1];
    //SDL---------------------------
    int screen_w=0,screen_h=0;
    SDL_Window *screen; 
    SDL_Renderer* sdlRenderer;
    SDL_Texture* sdlTexture;
    SDL_Rect sdlRect;
    //SDL_Thread *video_tid = NULL;
    SDL_Event event;


    //FILE *fp_yuv;

    //av_register_all();
    avformat_network_init();
    pFormatCtx = avformat_alloc_context();

    if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0)
    {
        printf("Couldn't open input stream.\n");
        return -1;
    }
    if(avformat_find_stream_info(pFormatCtx,NULL)<0)
    {
        printf("Couldn't find stream information.\n");
        return -1;
    }

    videoindex=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++) 
    {
        if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO)
        {
            videoindex=i;
            break;
        }
    }
    if(videoindex==-1)
    {
        printf("Didn't find a video stream.\n");
        return -1;
    }

    pCodecCtx_s=pFormatCtx->streams[videoindex]->codecpar;
    //根据id选择合适的解码器
    pCodec=avcodec_find_decoder(pCodecCtx_s->codec_id);
    if(pCodec==NULL)
    {
        printf("Codec not found.\n");
        return -1;
    }
    // 配置解码器
    pCodecCtx = avcodec_alloc_context3(pCodec);
    //AVStream里面的参数直接复制到AVCodecContext的上下文当中。
    avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar);
    //初始化视音频编解码器的AVCodecContext
    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
    {
        printf("Could not open codec.\n");
        return -1;
    }

    pFrame=av_frame_alloc();
    pFrameYUV=av_frame_alloc();
    out_buffer=(unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P,  pCodecCtx->width, pCodecCtx->height,1));
    if(out_buffer == NULL)
    {
        printf("av_malloc error\n");
        return -1;
    }
    //对申请的内存进行格式化
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize,out_buffer,
            AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height,1);

    packet=av_packet_alloc();

    //Output Info-----------------------------
    printf("--------------- File Information ----------------\n");
    av_dump_format(pFormatCtx,0,filepath,0);
    printf("-------------------------------------------------\n");
    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt, 
            pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 

#if OUTPUT_YUV420P 
    fp_yuv=fopen("output.yuv","wb+");  
#endif  

    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {  
        printf( "Could not initialize SDL - %s\n", SDL_GetError()); 
        return -1;
    } 

    screen_w = pCodecCtx->width;
    screen_h = pCodecCtx->height;
    printf("w:%d  h:%d\n",screen_w,screen_h);
    //SDL 2.0 Support for multiple windows
    //创建一个窗口
    screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
            screen_w, screen_h,
            SDL_WINDOW_OPENGL);

    if(!screen) 
    {  
        printf("SDL: could not create window - exiting:%s\n",SDL_GetError());  
        return -1;
    }

    sdlRenderer = SDL_CreateRenderer(screen, -1, 0);  
    //IYUV: Y + U + V  (3 planes)
    //YV12: Y + V + U  (3 planes)
    sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);  

    sdlRect.x=0;
    sdlRect.y=0;
    sdlRect.w=screen_w;
    sdlRect.h=screen_h;

    //video_tid = SDL_CreateThread(sfp_refresh_thread,NULL,NULL);
    SDL_CreateThread(sfp_refresh_thread,NULL,NULL);

    //SDL End----------------------
    for(;;)
    {
        SDL_WaitEvent(&event);
        if(event.type == SFM_REFRESH_EVENT)
        {
            while(1)
            {
                if(av_read_frame(pFormatCtx, packet) < 0)
                    thread_exit =1;
                if(packet->stream_index==videoindex)
                    break;
            }
            ret = avcodec_send_packet(pCodecCtx, packet);
            av_packet_unref(packet);
            if(ret != 0)
            {
                printf("Decode Error.\n");
                continue;
            }
            ret = avcodec_receive_frame(pCodecCtx, pFrame);
            if(ret!=0)
            {
                printf("avcodec_receive_frame failed ! ret = %d\n",ret);
                continue;
            }
            if(SHOW_PICTURE)
            {
                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, 
                        pFrameYUV->data, pFrameYUV->linesize);

#if OUTPUT_YUV420P
                y_size=pCodecCtx->width*pCodecCtx->height;  
                fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y 
                fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
                fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif
                //SDL---------------------------
#if 0
                SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );  
#else
                SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
                        pFrameYUV->data[0], pFrameYUV->linesize[0],
                        pFrameYUV->data[1], pFrameYUV->linesize[1],
                        pFrameYUV->data[2], pFrameYUV->linesize[2]);
#endif  

                SDL_RenderClear( sdlRenderer );  
                SDL_RenderCopy( sdlRenderer, sdlTexture,  NULL, &sdlRect);  
                SDL_RenderPresent( sdlRenderer );  
            }
        }
        else if(event.type==SDL_KEYDOWN)
        {
            if(event.key.keysym.sym==SDLK_SPACE)
                thread_pause=!thread_pause;
        }
        else if(event.type==SDL_QUIT)
            thread_exit=1;
        else if(event.type==SFM_BREAK_EVENT)
            break;
    }

    sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P 
    fclose(fp_yuv);
#endif 

    SDL_Quit();

    av_frame_free(&pFrameYUV);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);

    return 0;
}

  • 15
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
要下载一个基于FFmpeg SDL的最简单视频播放器,可以按照以下步骤进行: 1. 首先,需要下载和安装FFmpeg软件包。FFmpeg是一个开源的跨平台多媒体框架,可以用于处理音频和视频文件。可以上FFmpeg官网(https://www.ffmpeg.org/)找到相应的下载链接,并根据操作系统选择正确的版本进行下载和安装。 2. 下载SDL库。SDL是一个跨平台的开发库,可以用于创建多媒体应用程序。可以在SDL官网(https://www.libsdl.org/)上找到相应的下载链接,并选择适合自己操作系统的版本进行下载和安装。 3. 使用编程语言(如C/C++)编写一个基于FFmpegSDL视频播放器。可以使用任何喜欢的集成开发环境(IDE),如Visual Studio、Dev-C++等。根据自己的需求,可以封装FFmpegSDL的相关函数,以方便播放视频文件。 4. 在编程中,需要包含FFmpegSDL所需的头文件,并链接FFmpegSDL的库文件。可以在编译选项中添加"-lffmpeg"和"-lsdl"等参数。 5. 编写代码来打开视频文件,读取视频流,将每一帧解码和渲染到屏幕上并进行播放。可以使用FFmpeg提供的函数来进行解码和渲染,使用SDL提供的函数来显示图像并进行窗口管理。 6. 编译和运行程序,即可实现简单的基于FFmpeg SDL视频播放器。可以通过命令行输入视频文件的路径进行播放。 需要注意的是,基于FFmpeg SDL视频播放器可以根据个人需求来进行功能的扩展,如添加播放控制(播放、暂停、停止等)、全屏显示、音量调节等功能。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值