关闭

ffmpeg实时解码H264数据流

1091人阅读 评论(0) 收藏 举报
以下是一个测试程序,用的是读取h264文件数据然后用ffmpeg解码的方法,模拟实时数据的解码。测试已通过,解码正常。

至于如何编译ffmpeg、如何实现收发数据、如何拼帧这里不予说明,请查看相关文档。
 

 1、.h文件里面
#ifdef __cplusplus
extern "C" {
#endif

#include "libavformat/avformat.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"


#pragma comment(lib, "libgcc.a")
#pragma comment(lib, "libavcodec.a")
#pragma comment(lib, "libavformat.a")
#pragma comment(lib, "libavutil.a")
#pragma comment(lib, "libmingwex.a")


#ifdef __cplusplus
}
#endif 


2、.cpp文件里面

//写YUV(420P)数据
 
FILE *g_fd_yuv420p = fopen("C:\\decodeTest\\420p_stream.yuv", "wb");
static void SaveFrame_YUV420P_2(AVFrame *pFrame, int width, int height, int iFrame){
if(g_fd_yuv420p != NULL){
if(pFrame != NULL){
unsigned int pos = 0;
for(int y=0; y<height; y++){
fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width, g_fd_yuv420p);
}

for(int y=0; y<(height / 2); y++){
fwrite(pFrame->data[1] + y * pFrame->linesize[1], 1, width / 2, g_fd_yuv420p);
}
for(int y=0; y<(height / 2); y++){
fwrite(pFrame->data[2] + y * pFrame->linesize[2], 1, width / 2, g_fd_yuv420p);
}
}else{
fclose(g_fd_yuv420p);
}
}
}

//获取H264数据流中下一个帧开始的位置
int getNextNalPos(unsigned char *pData, int startPos, int dataLength, unsigned long *pPos){

if(pData != NULL && startPos >= 0 && (dataLength - startPos) > 4){
unsigned char *pTmp = pData + startPos;
int restLen = dataLength - startPos;
while(restLen >= 5){

int *p = (int *)pTmp;
if(p[0] == 0x01000000){

*pPos = (pTmp - pData);
if(pTmp[4] == 0x67){
return 1;//SPS
}else if(pTmp[4] == 0x68){
return 2;//PPS
}else{
return 0;
}
}

restLen --;
pTmp ++;
}
}
return -1;
}

//测试函数
void CtestCodecDlg::OnBnClickedButton2()
{
AVCodec *pCodec = NULL;
AVCodecContext *pContext = NULL;
AVFrame *pFrame = NULL;
AVPacket packet = {0};


av_register_all();

pCodec = avcodec_find_decoder(CODEC_ID_H264);
if(pCodec == NULL){
LOG("没有找到H264 codec");
goto stream_decode_finish;
}

pContext = avcodec_alloc_context3(pCodec);
if(pContext == NULL){
LOG("avcodec_alloc_context3 失败");
goto stream_decode_finish;
}
pContext->time_base.num = 1;
pContext->time_base.den = 25;
pContext->bit_rate = 0;
pContext->frame_number = 1;
pContext->codec_type = AVMEDIA_TYPE_VIDEO;
        //图像宽高的求解看第三步 
pContext->width = 352;
pContext->height = 288;
/* pContext->pix_fmt = AV_PIX_FMT_YUV420P;

pContext->profile = 578;
pContext->level = 13;
pContext->pkt_timebase.num = 1;
pContext->pkt_timebase.den = 1200000;*/

if(avcodec_open2(pContext, pCodec, NULL) < 0){
LOG("avcodec_open2 失败");
goto stream_decode_finish;
}

pFrame = avcodec_alloc_frame();
if(pFrame == NULL){
LOG("avcodec_alloc_frame 失败");
goto stream_decode_finish;
}

unsigned long cacheSize = (1024 << 10);
unsigned char *pSrcData = (unsigned char *)malloc(cacheSize);
if(pSrcData == NULL){
LOG("malloc 失败");
goto stream_decode_finish;
}
memset(pSrcData, 0, cacheSize);

FILE *fd = fopen("C:\\out_2.h264", "rb");
// FILE *fd = fopen("C:\\gaoqing.h264", "rb");
if(fd == NULL){
LOG("fopen 失败");
goto stream_decode_finish;
}

/* unsigned char extraData[64] = {0};
fread(extraData, 1, 36, fd);
pContext->extradata = extraData;
pContext->extradata_size = 36;
fseek(fd, 0, SEEK_SET);*/

int fdPos = 0;//文件指针
int frameNum = 1;
while(true){

bool bBreak = false;
int readLen = fread(pSrcData, 1, cacheSize, fd);
if(readLen < cacheSize){
fseek(fd, 0, SEEK_SET);
bBreak = true;
}

int pos = 0;
int processedLen = 0;
unsigned long nextNalPos = 0;
int ret = getNextNalPos(pSrcData, processedLen, readLen, &nextNalPos);
int nalType = -1;
while(ret != -1){

//TRACE("nextPos: %0x\n", nextNalPos);

if(ret == 1){

int tmpLen = nextNalPos - pos;
if(tmpLen > 0){//解码
packet.data = pSrcData + pos;
packet.size = tmpLen;
fdPos += tmpLen;
int frameFinished = 0;
int result = avcodec_decode_video2(pContext, pFrame, &frameFinished, &packet);
if(ret < 0){
LOG("avcodec_decode_video2 failed");
}

if(frameFinished){
// TRACE("frameNum: %d, processedLen: %d\n", frameNum ++, processedLen);
SaveFrame_YUV420P_2(pFrame, pContext->width, pContext->height, 0);
}

pos = nextNalPos;
}
processedLen = nextNalPos + 5;
nalType = 1;
}else if(ret == 2){

if(nalType == 0){
packet.data = pSrcData + pos;
packet.size = nextNalPos - pos;
fdPos += packet.size;
int frameFinished = 0;
int result = avcodec_decode_video2(pContext, pFrame, &frameFinished, &packet);
if(ret < 0){
LOG("avcodec_decode_video2 failed");
}

if(frameFinished){
// TRACE("frameNum: %d, processedLen: %d\n", frameNum ++, processedLen);
SaveFrame_YUV420P_2(pFrame, pContext->width, pContext->height, 0);
}

pos = nextNalPos;
}
processedLen = nextNalPos + 5;
nalType = 2;
}else if(ret == 0){

{
packet.data = pSrcData + pos;
packet.size = nextNalPos - pos;
fdPos += packet.size;
int frameFinished = 0;
int result = avcodec_decode_video2(pContext, pFrame, &frameFinished, &packet);
if(ret < 0){
LOG("avcodec_decode_video2 failed");
}

if(frameFinished){
// TRACE("frameNum: %d, processedLen: %d\n", frameNum ++, processedLen);
SaveFrame_YUV420P_2(pFrame, pContext->width, pContext->height, 0);
}

pos = nextNalPos;
}
processedLen = nextNalPos + 5;
nalType = 0;
}
ret = getNextNalPos(pSrcData, processedLen, readLen, &nextNalPos);
}

//TRACE("重新读取缓存!!位置:%d,processedLength:%d\n", fdPos, processedLen);
fseek(fd, fdPos, SEEK_SET);

if(bBreak){
break;
}
}
SaveFrame_YUV420P_2(pFrame, pContext->width, pContext->height, 0);


stream_decode_finish:
if(pFrame != NULL){
av_freep(&pFrame);
avcodec_free_frame(&pFrame);
}
if(pContext != NULL){
avcodec_close(pContext);
av_free(pContext);
}

if(pSrcData != NULL){
free(pSrcData);
pSrcData = NULL;
}
if(fd != NULL){
fclose(fd);
fd = NULL;
}
return;
}

3、相关说明
图像的宽高可以从H264码流的SPS中获取,获取方法:
SPS结构里面有两个数据成员:pic_width_in_mbs_minus1,pic_height_in_map_units_minus_1,分别表示图像的宽和高,以宏块(16x16)的值减1为单位,因此图像的实际宽为((
pic_width_in_mbs_minus1 + 1)<< 4),实际高为((pic_height_in_map_units_minus_1  + 1)<< 4)。
0
0

查看评论
* 以上用户言论只代表其个人观点,不代表CSDN网站的观点或立场
    个人资料
    • 访问:13251次
    • 积分:351
    • 等级:
    • 排名:千里之外
    • 原创:22篇
    • 转载:9篇
    • 译文:0篇
    • 评论:1条
    最新评论