最近因为项目需要,需要解码mp4文件或h264裸流。在总结网友资料以及ffmpeg新旧版本的不同而写的测试代码。成功将mp4文件解码(h264裸流也同样可解,需要改变源输入而已)
static void SaveFramePPM(uint8_t* pData_rgb, int width, int height, int iFrame)
{
if (iFrame > 5)
return;
FILE *pFile=NULL;
char szFilename[32] = {0};
sprintf(szFilename, "d:\\test\\video\\%d.ppm", iFrame);
if(NULL==( pFile = fopen(szFilename, "wb")))
return;
fprintf(pFile, "P6\n%d %d\n255\n", width, height);
fwrite(pData_rgb, 1, height * width * 3, pFile);
fclose(pFile);
}
int Mp4ToPPm(const char* filepath) {
// char filepath[] = "d:\\test\\video\\gop121.h264";
int nCount = 0;
int i = 0, videoIndex = -1, numBytes = 0, numBytes_rgb=0;
int outLinesize_rgb[4] = {0}, ret = 0, ret1 = 0;
uint8_t *out_buffer = NULL, *out_buffer_rgb = NULL;
uint8_t *outData_rgb[4] = {NULL,NULL,NULL,NULL};
AVFormatContext *pFormatCtx = NULL;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL, * pFrameYUV = NULL;
AVPacket packet;
static struct SwsContext *img_convert_ctx=NULL;
if (NULL == (pFormatCtx = avformat_alloc_context()))
return -1;
if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0) {
printf("Can't open the file\n");
return -1;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
printf("Couldn't find stream information.\n");
return -1;
}
av_dump_format(pFormatCtx, 0, filepath, 0);
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoIndex = i;
break;
}
}
if (videoIndex == -1)
return -1;
if (NULL == (pCodec = avcodec_find_decoder(pFormatCtx->streams[videoIndex]->codecpar->codec_id)))
{
printf("Unsupported codec!\n");
return -1;
}
if (NULL == (pCodecCtx = avcodec_alloc_context3(pCodec)))
return -1;
if (avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoIndex]->codecpar) < 0)
{
printf("Could not copy parameters to codec.\n");
return -1;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return -1;
}
if (NULL == (pFrame = av_frame_alloc()))
return -1;
if (NULL == (pFrameYUV = av_frame_alloc()))
return -1;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,\
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_POINT, NULL, NULL, NULL);
numBytes_rgb = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height, 1);
out_buffer_rgb = (uint8_t*)av_malloc(numBytes_rgb * sizeof(uint8_t));
outData_rgb[0] = out_buffer_rgb;
outLinesize_rgb[0] = pCodecCtx->width * 3;
outLinesize_rgb[1] = outLinesize_rgb[2] = outLinesize_rgb[3] = 0;
av_new_packet(&packet, numBytes);
while (av_read_frame(pFormatCtx, &packet) >= 0)
{
if (packet.stream_index == videoIndex)
{
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// if (ret >= 0)
if ((ret = avcodec_send_packet(pCodecCtx, &packet)) >= 0
&& (ret1 = avcodec_receive_frame(pCodecCtx, pFrame)) >= 0)
{
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data,pFrame->linesize, 0,pCodecCtx->height, outData_rgb, outLinesize_rgb);
SaveFramePPM(out_buffer_rgb, pCodecCtx->width, pCodecCtx->height, nCount);
++ nCount;
}
}
av_packet_unref(&packet);
}
av_free(out_buffer_rgb);
av_frame_free(&pFrame);
av_frame_free(&pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return 0;
}