网上看了一些大牛的例子,怎么说呢? 一坨坨的代码直接摞到main函数中,真是让人看起来蛋疼。 又看了JUVRTMPClient 这种漂亮的封装,就想做点事情,给FFMPEG做一个C++封装类。
该文项目已经共享 http://git.oschina.net/yangtf/cpp_ffmpeg
定义 CFFMpeg 封装ffmpeg的初始化,打开流等操作。它包含
OpenInput 打开流
FindAV 查找Audio和Video流并获取相应的解码器
Process 开始处理流
CloseInput关闭流
目前这个类不支持Publish一个流。上面的设计时为播放流服务。
定义一个PacketListener接口,这个接口中的方法会在适当的时机被CFFMpeg调用
onRecvPacket 收到一个packet(没有解码)
onVideoFrame 解码得到一个视频帧
onAudioFrame 解码得到一个音频帧
onVideoSize 根据视频解码器得到一些大小、格式之类的信息。
onAudioParam 根据音频解码得到一些音频帧的特性(暂未实现)
使用方法: 创建一个PacketListener的派生类,在这个类中写你要做的事。
下面例子中PacketListener的派生类就是MyListener
#include "CFFMpeg.h"
#include <tchar.h>
#include "MyListener.h"
#define __STDC_CONSTANT_MACROS
int _tmain(int argc, _TCHAR* argv[]){
printf("ss");
MyListener listener;
CFFMpeg mpeg(&listener);
//mpeg.OpenInput("gdigrab","desktop");
mpeg.OpenInput(NULL,"rtmp://live.hkstv.hk.lxdns.com/live/hks live=1");
mpeg.FindAV();
mpeg.Process();
mpeg.CloseInput();
return 0;
}
#pragma once
#include "packetlistener.h"
#include "SDLVideo.h"
class MyListener :
public PacketListener
{
public:
MyListener(void);
~MyListener(void);
virtual void onVideoFrame(AVFrame* frame);
virtual void onVideoSize(int width, int height,AVPixelFormat format);
CSDLVideo* win;
};
#include "MyListener.h"
MyListener::MyListener(void)
{
win = new CSDLVideo();
}
MyListener::~MyListener(void)
{
}
void MyListener::onVideoFrame(AVFrame* frame){
printf("MyListener onVideoFrame");
win->ShowAVFrame(frame);
}
void MyListener::onVideoSize(int width, int height,AVPixelFormat format)
{
printf("MyListener onVideoSize");
win->OpenVideo(width,height,format);
}
CFFMpeg的代码如下:
#pragma once
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "SDL/SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <SDL/SDL.h>
#ifdef __cplusplus
};
#endif
#endif
#include "PacketListener.h"
//Refresh Event
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
class CFFMpeg
{
public:
CFFMpeg(PacketListener* listener);
~CFFMpeg(void);
private:
PacketListener* listener;//我们的监听器接口
AVFormatContext *pFormatCtx;
int videoindex; // videoindex 流的索引即在pFormatCtx->streams数组中的索引
int audioindex;
AVCodecContext *pVideoCodecCtx;
AVCodec *pVideoCodec;
AVCodecContext *pAudioCodecCtx;
AVCodec *pAudioCodec;
SDL_Thread *video_tid;
AVInputFormat *ifmt;
AVFrame *pVideoFrame;
AVFrame *pAudioFrame;
public:
int OpenInput(char* format, char* fname);
void FindAV(void);
void Process();
private:
void _ProcessVideoPacket(AVPacket* packet);
void _ProcessAudioPacket(AVPacket* packet);
public:
void CloseInput(void);
};
#include "CFFMpeg.h"
CFFMpeg::CFFMpeg(PacketListener* listener)
{
this->listener = listener;
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
//Open File
//char filepath[]="src01_480x272_22.h265";
//avformat_open_input(&pFormatCtx,filepath,NULL,NULL)
//Register Device
avdevice_register_all();
//Windows
//初始化SDL
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf( "Could not initialize SDL - %s\n", SDL_GetError());
}
}
CFFMpeg::~CFFMpeg(void)
{
}
int CFFMpeg::OpenInput(char* format, char* fname)
{
AVDictionary* options = NULL;
//Set some options
//grabbing frame rate
//av_dict_set(&options,"framerate","5",0);
//The distance from the left edge of the screen or desktop
//av_dict_set(&options,"offset_x","20",0);
//The distance from the top edge of the screen or desktop
//av_dict_set(&options,"offset_y","40",0);
//Video frame size. The default is to capture the full screen
//av_dict_set(&options,"video_size","640x480",0);
//查找输入流
if(format != NULL){
ifmt=av_find_input_format(format);
}else{
ifmt = NULL;
}
//读取输入流,结果存在pFormatCtx中
if(avformat_open_input(&pFormatCtx,fname,ifmt,&options)!=0){
printf("Couldn't open input stream.\n");
return -1;
}
return 0;
}
/**
找到Audio和Video的流编号,并获取相应的解码器
*/
void CFFMpeg::FindAV(void)
{
if(avformat_find_stream_info(pFormatCtx,NULL)<0)
{
printf("Couldn't find stream information.\n");
return ;
}
videoindex=-1;
audioindex = -1;
//遍历所有的流,并找到视频流
for(int i=0; i<pFormatCtx->nb_streams; i++){
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoindex=i;
break;
}
}
for(int i=0; i<pFormatCtx->nb_streams; i++){
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO)
{
audioindex=i;
break;
}
}
//如果没找到,报错返回
if(videoindex==-1)
{
printf("Didn't find a video stream.\n");
pVideoCodecCtx =NULL;
pVideoCodec = NULL;
//return;
}else{
//记住视频解码器
pVideoCodecCtx=pFormatCtx->streams[videoindex]->codec;
pVideoCodec=avcodec_find_decoder(pVideoCodecCtx->codec_id);
if(pVideoCodec==NULL)
{
printf("Video Codec not found.\n");
//return;
}else{
printf("video codec %s %d \r\n",pVideoCodec->name,pVideoCodec->id);
//打开解码器
if(avcodec_open2(pVideoCodecCtx, pVideoCodec,NULL)<0)
{
printf("Could not open codec.\n");
}else{
listener->onVideoSize(pVideoCodecCtx->width,pVideoCodecCtx->height,pVideoCodecCtx->pix_fmt);
}
}
}
//如果没找到,报错返回
if(audioindex==-1)
{
printf("Didn't find a audio stream.\n");
pAudioCodecCtx = NULL;
pAudioCodec = NULL;
//return;
}else{
//记住视频解码器
pAudioCodecCtx=pFormatCtx->streams[audioindex]->codec;
pAudioCodec=avcodec_find_decoder(pAudioCodecCtx->codec_id);
if(pAudioCodec==NULL)
{
printf("Audio Codec not found.\n");
//return;
}else{
printf("audio codec %s %d \r\n",pAudioCodec->name,pAudioCodec->id);
if(avcodec_open2(pAudioCodecCtx, pAudioCodec,NULL)<0)
{
printf("Could not open codec.\n");
}
}
}
}
int thread_exit=0;
int sfp_refresh_thread(void *opaque)
{
printf("sfp_refresh_thread called\r\n");
thread_exit=0;
while (!thread_exit) {
SDL_Event event;
event.type = SFM_REFRESH_EVENT;
printf("send %d\n",event.type);
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit=0;
//Break
SDL_Event event;
event.type = SFM_BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}
void CFFMpeg::Process()
{
AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));
pAudioFrame=av_frame_alloc();
pVideoFrame=av_frame_alloc();
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread,NULL);
//Event Loop
SDL_Event event;
printf("SFM_REFRESH_EVENT = %d\r\n",SFM_REFRESH_EVENT);
for (;;) {
//Wait
SDL_WaitEvent(&event);
printf("event %d\n",event.type);
if(event.type==SFM_REFRESH_EVENT){
//------------------------------
if(av_read_frame(pFormatCtx, packet)>=0){
listener->onRecvPacket(packet);
if(packet->stream_index==videoindex){
_ProcessVideoPacket(packet);
}else if(packet->stream_index == audioindex){
_ProcessAudioPacket(packet);
}
av_free_packet(packet);
}else{
//Exit Thread
thread_exit=1;
}
}else if(event.type==SDL_QUIT){
thread_exit=1;
}else if(event.type==SFM_BREAK_EVENT){
break;
}
}
}
void CFFMpeg::_ProcessVideoPacket(AVPacket* packet)
{
int got_picture = -1;
int ret = avcodec_decode_video2(pVideoCodecCtx, pVideoFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Video Error.\n");
return;
}
if(got_picture){
listener->onVideoFrame(pVideoFrame);
}
}
void CFFMpeg::_ProcessAudioPacket(AVPacket* packet)
{
int got_picture = -1;
int ret = avcodec_decode_audio4(pAudioCodecCtx, pAudioFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Audio Error.\n");
return;
}
if(got_picture){
listener->onVideoFrame(pVideoFrame);
}
}
void CFFMpeg::CloseInput(void)
{
//av_free(pFrameYUV);
if(pAudioCodecCtx!=NULL){
avcodec_close(pAudioCodecCtx);
}
if(pVideoCodecCtx!=NULL){
avcodec_close(pVideoCodecCtx);
}
avformat_close_input(&pFormatCtx);
}
用于显示的SDL的封装如下
#pragma once
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "SDL/SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <SDL/SDL.h>
#ifdef __cplusplus
};
#endif
#endif
class CSDLVideo
{
public:
CSDLVideo(void);
~CSDLVideo(void);
private:
//Video Information
int width,height;
AVPixelFormat pix_fmt;
//Window Informaction
int screen_w,screen_h;
SDL_Surface *screen;
SDL_Overlay *bmp;
//Converter
struct SwsContext *img_convert_ctx;
AVFrame* pFrameYUV;
SDL_Rect rect;
public:
int OpenVideo(int width, int height,AVPixelFormat pix_fmt);
void ShowAVFrame(AVFrame* frame);
};
#include "SDLVideo.h"
CSDLVideo::CSDLVideo(void)
{
}
CSDLVideo::~CSDLVideo(void)
{
}
int CSDLVideo::OpenVideo(int width, int height,AVPixelFormat pix_fmt)
{
printf("Open Video Called");
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf( "Could not initialize SDL - %s\n", SDL_GetError());
return 1;
}
//设置显示模式
const SDL_VideoInfo *vi = SDL_GetVideoInfo();
//Half of the Desktop's width and height.
screen_w = vi->current_w/2;
screen_h = vi->current_h/2;
screen = SDL_SetVideoMode(screen_w, screen_h, 0,0);
if(!screen) {
printf("SDL: could not set video mode - exiting:%s\n",SDL_GetError());
return 1;
}
pFrameYUV=av_frame_alloc();
this->width= width;
this->height = height;
this->pix_fmt = pix_fmt;
bmp = SDL_CreateYUVOverlay(width, height,SDL_YV12_OVERLAY, screen);
printf("bmp seted \n");
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;
img_convert_ctx = sws_getContext(width, height, pix_fmt, width, height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
SDL_WM_SetCaption("Simplest FFmpeg Grab Desktop",NULL);
return 0;
}
void CSDLVideo::ShowAVFrame(AVFrame* pFrame)
{
SDL_LockYUVOverlay(bmp);
pFrameYUV->data[0]=bmp->pixels[0];
pFrameYUV->data[1]=bmp->pixels[2];
pFrameYUV->data[2]=bmp->pixels[1];
pFrameYUV->linesize[0]=bmp->pitches[0];
pFrameYUV->linesize[1]=bmp->pitches[2];
pFrameYUV->linesize[2]=bmp->pitches[1];
sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, height, pFrameYUV->data, pFrameYUV->linesize);
SDL_UnlockYUVOverlay(bmp);
SDL_DisplayYUVOverlay(bmp, &rect);
}