由于项目组重组,自己有幸开始做音视频编解码方面的研发工作,现将自己近期的工作收获作为BOLG的方式记录起来,方便自己日后查阅和学习。说到H264编解码,不能不提到ffmpeg,据自己查证的资料显示,现大部分软件的H264编解码基本都是使用ffmpeg作为自己的三方库工具,关于ffmpeg有多牛,这里不作赘述。
按照之前查阅的资料,ffmpeg可以解码rtp网络流、从内存读取数据流来解码、读取文件流并解码,本篇幅主要介绍ffmpeg如何从内存读取h264数据流并解码显示,这里只重点关注成功解码h264视频流的关键步骤,关于视频显示与音视叔同步部分将在后续内容继续更新!
先帖上部分代码:
CDecoder::CDecoder()
:m_avFmtContext( NULL )
,m_pFrame( NULL )
,m_pCodecCtx( NULL )
,m_bQuit( false )
,m_openTd( NULL )
,m_disPlayTd( NULL )
,m_direcDraw( NULL )
{
av_register_all();
}
CDecoder::~CDecoder()
{
}
bool CDecoder::init( void *hwnd )
{
if ( m_avFmtContext )
{
avformat_close_input( &m_avFmtContext );
m_avFmtContext = NULL;
}
if ( !m_avFmtContext )
{
m_avFmtContext = avformat_alloc_context();
AVCodec* pCodec = avcodec_find_decoder( AV_CODEC_ID_H264 );
if ( pCodec )
{
av_format_set_video_codec( m_avFmtContext, pCodec );
m_avFmtContext->video_codec_id = AV_CODEC_ID_H264;//AV_CODEC_ID_H264;
}
}
if ( !m_direcDraw )
{
m_direcDraw = new CDirectDraw((HWND)hwnd );
m_direcDraw->dirrectDrawInit((HWND)hwnd);
}
m_bQuit = false;
if ( !m_spOpenThread )
{
m_spOpenThread.reset( new boost::thread( boost::bind( &CDecoder::openStream, this ) ) );
}
return true;
}
void CDecoder::openStream()
{
uint8_t* pBuf = (uint8_t *)av_mallocz( sizeof(uint8_t) * BUF_SIZE );
m_avFmtContext->pb = avio_alloc_context( pBuf, BUF_SIZE, 0, this, readRawDataCB, NULL, NULL );
if ( !m_avFmtContext->pb )
{
std::cout << "avio_alloc_context error!" << std::endl;
return;
}
// [ 探测流信息,主要来填充AVInputFormat结构体,为下面的打开流IO作准备 ]
AVInputFormat *pAvInputFmt = NULL;
//AVInputFormat *pAvInputFmt = av_find_input_format("h264");
if ( av_probe_input_buffer( m_avFmtContext->pb, &pAvInputFmt, NULL, NULL, 0, 0 ) < 0 )
{
std::cout << __FUNCTION__ << " : " << __LINE__ << " error! " << std::endl;
//avio_close( m_avFmtContext->pb );
//av_err2str
m_avFmtContext->pb = NULL;
return;
}
av_init_packet(&m_avpkt);
if ( !m_pFrame )
{
m_pFrame = av_frame_alloc();
}
AVFrame *pFrameRGB = NULL;
// [ 打开流 ]
if ( avformat_open_input( &m_avFmtContext, NULL, pAvInputFmt, NULL ) < 0 )
{
std::cout << "avformat_open_input error!" << std::endl;
return;
}
//读取数据
int frameFinished = -1;
int videoStreamNum = -1;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
static struct SwsContext *img_convert_ctx = NULL;
while( true )
{
if ( m_bQuit )
return;
int ret = av_read_frame(m_avFmtContext, &m_avpkt);
if ( ret < 0 )
{
boost::thread::sleep( boost::get_system_time() + boost::posix_time::milliseconds( 10) );
continue;
}
if ( videoStreamNum == -1 )
{
// [ 查找是否有视频流 ]
for(int i=0; i<(m_avFmtContext->nb_streams); i++)
{
if(m_avFmtContext->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) //找出视频流
{
videoStreamNum=i;
break;
}
}
if(videoStreamNum==-1)
return;
// [ 找出解码器信息上下文和AVCodec解码器,用于下面打开解码器和解码AvPacket ]
pCodecCtx = m_avFmtContext->streams[videoStreamNum]->codec;
// 从视频流中找出相应解码器
pCodec=avcodec_find_decoder(m_avFmtContext->streams[videoStreamNum]->codec->codec_id);
if(pCodec==NULL)
{
fprintf(stderr, "Unsupported codec!\n");
return;
}
// [ 打开流信息中查找到流解码器 ]
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
return; // Could not open codec
pCodecCtx->width = 1920;
pCodecCtx->height = 1080;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if ( !pFrameRGB )
{
pFrameRGB = av_frame_alloc();
if ( !pFrameRGB )
return;
pFrameRGB->width = pCodecCtx->width;
pFrameRGB->height = pCodecCtx->height;
pFrameRGB->format = PIX_FMT_YUV420P;
if ( av_image_alloc( pFrameRGB->data, pFrameRGB->linesize,
pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, 1 ) < 0 )
{
av_frame_free( &pFrameRGB );
pFrameRGB = NULL;
return;
}
}
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
}
if(m_avpkt.stream_index==videoStreamNum)
{
// 解码
avcodec_decode_video2( pCodecCtx, m_pFrame, &frameFinished, &m_avpkt );
if ( frameFinished > 0 )
{
static uint8_t *p = NULL;
p = m_pFrame->data[1];
m_pFrame->data[1] = m_pFrame->data[2];
m_pFrame->data[2] = p;
sws_scale(img_convert_ctx, m_pFrame->data, m_pFrame->linesize,
0, pCodecCtx->height,pFrameRGB->data,pFrameRGB->linesize);
sPictureSize sPicSize;
sPicSize.nHeight = pCodecCtx->height;
sPicSize.nWidth = pCodecCtx->width;
m_direcDraw->dirrectDrawInputData(pFrameRGB->data[0], sPicSize);
m_direcDraw->directDrawUpDateData();
//SaveAsBMP( pFrameRGB, pCodecCtx->width, pCodecCtx->height, 1, 1 );
}
}
av_free_packet(&m_avpkt);
}
}