// FfmpegCode.cpp: implementation of the CFfmpegCode class.
//
//
#include "stdafx.h"
#include "smartdev.h"
#include "FfmpegCode.h"
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif
//
// Construction/Destruction
//
CFfmpegCode::CFfmpegCode()
{
m_iWidth = 320;
m_iHeight = 240;
}
CFfmpegCode::~CFfmpegCode()
{
}
void CFfmpegCode::FfmpegInit()
{
//注册解码器,并且找到H264解码器
avcodec_init();
avcodec_register_all();
m_pAVCodec = avcodec_find_decoder(CODEC_ID_H264);
//分配解码器内存
m_pAVCodecContext = avcodec_alloc_context();
//3.打开解码器
avcodec_open(m_pAVCodecContext, m_pAVCodec);
//分配解码器用的帧缓冲
m_pAVFrame = avcodec_alloc_frame();
if(m_pAVCodec->capabilities&CODEC_CAP_TRUNCATED)
m_pAVCodecContext->flags|= CODEC_FLAG_TRUNCATED;
}
int CFfmpegCode::SwsScale(struct SwsContext *context,uint8_t* dst[], int dstStride[])
{
return sws_scale(context,m_pAVFrame->data,m_pAVFrame->linesize,0, m_iHeight, dst, dstStride);
}
void CFfmpegCode::H264DecodeTest(CClientDC &dc,int x,int y,int width,int high,CString filePath)
{
FfmpegInit();
CDC MemDC;
CBitmap DrawBitmap;
MemDC.CreateCompatibleDC(&dc);
DrawBitmap.CreateCompatibleBitmap(&dc,width,high);
MemDC.SelectObject(DrawBitmap);
MemDC.SetBkMode(TRANSPARENT);
unsigned char* pRGB = (unsigned char*)malloc(320*240*4);
int frame, size, got_picture, len;
uint8_t inbuf[4096 + FF_INPUT_BUFFER_PADDING_SIZE], *inbuf_ptr;
/* set end of buffer to 0 (this ensures that no overreading happens for damaged mpeg streams) */
memset(inbuf + 4096, 0, FF_INPUT_BUFFER_PADDING_SIZE);
FILE *pFile;
pFile = fopen(filePath, "rb");
if (!pFile)
{
fprintf(stderr, "could not open %s\n", filePath.GetBuffer(0));
return;
}
frame = 0;
while (1)
{
size = fread(inbuf, 1, 4096, pFile);
if (size == 0)
break;
inbuf_ptr = inbuf;
while (size>0)
{
len = DecodeVideo(&got_picture,inbuf_ptr, size);
if (len < 0)
{
printf("Error while decoding frame %d\n",frame);
return;
}
if (got_picture)
{
printf("saving frame %3d\n", frame);
SwsContext* pSwsCxt = SwsGetContext(PIX_FMT_YUV420P,PIX_FMT_RGB32, SWS_BILINEAR);
BYTE *rgb_src[3]= {pRGB, NULL, NULL};
int rgb_stride[3]={4*width, 0, 0};
int iResult = SwsScale(pSwsCxt,rgb_src,rgb_stride);
DrawBitmap.SetBitmapBits(153600*2,(void*)pRGB);
dc.BitBlt(0,0,width,high,&MemDC,0,0,SRCCOPY);
frame++;
}
size -= len;
inbuf_ptr += len;
Sleep(100);
}
}
free(pRGB);
FfmpegClose();
}
int CFfmpegCode::DecodeVideo(int *got_picture_ptr, uint8_t *buf, int buf_size)
{
return avcodec_decode_video(m_pAVCodecContext, m_pAVFrame, got_picture_ptr, buf, buf_size);
}
void CFfmpegCode::FfmpegClose()
{
//关闭解码器,释放解码器内存
if(m_pAVCodecContext)
{
avcodec_close(m_pAVCodecContext);
av_free(m_pAVCodecContext);
m_pAVCodecContext = NULL;
}
//释放解码画面内存
if(m_pAVFrame)
{
av_free(m_pAVFrame);
m_pAVFrame = NULL;
}
}
struct SwsContext* CFfmpegCode::SwsGetContext(int srcFormat,int dstFormat, int flags)
{
return sws_getContext(m_iWidth,m_iHeight,srcFormat,m_iWidth,m_iHeight,dstFormat,flags,NULL,NULL,NULL);
}
//
//
#include "stdafx.h"
#include "smartdev.h"
#include "FfmpegCode.h"
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif
//
// Construction/Destruction
//
CFfmpegCode::CFfmpegCode()
{
m_iWidth = 320;
m_iHeight = 240;
}
CFfmpegCode::~CFfmpegCode()
{
}
void CFfmpegCode::FfmpegInit()
{
//注册解码器,并且找到H264解码器
avcodec_init();
avcodec_register_all();
m_pAVCodec = avcodec_find_decoder(CODEC_ID_H264);
//分配解码器内存
m_pAVCodecContext = avcodec_alloc_context();
//3.打开解码器
avcodec_open(m_pAVCodecContext, m_pAVCodec);
//分配解码器用的帧缓冲
m_pAVFrame = avcodec_alloc_frame();
if(m_pAVCodec->capabilities&CODEC_CAP_TRUNCATED)
m_pAVCodecContext->flags|= CODEC_FLAG_TRUNCATED;
}
int CFfmpegCode::SwsScale(struct SwsContext *context,uint8_t* dst[], int dstStride[])
{
return sws_scale(context,m_pAVFrame->data,m_pAVFrame->linesize,0, m_iHeight, dst, dstStride);
}
void CFfmpegCode::H264DecodeTest(CClientDC &dc,int x,int y,int width,int high,CString filePath)
{
FfmpegInit();
CDC MemDC;
CBitmap DrawBitmap;
MemDC.CreateCompatibleDC(&dc);
DrawBitmap.CreateCompatibleBitmap(&dc,width,high);
MemDC.SelectObject(DrawBitmap);
MemDC.SetBkMode(TRANSPARENT);
unsigned char* pRGB = (unsigned char*)malloc(320*240*4);
int frame, size, got_picture, len;
uint8_t inbuf[4096 + FF_INPUT_BUFFER_PADDING_SIZE], *inbuf_ptr;
/* set end of buffer to 0 (this ensures that no overreading happens for damaged mpeg streams) */
memset(inbuf + 4096, 0, FF_INPUT_BUFFER_PADDING_SIZE);
FILE *pFile;
pFile = fopen(filePath, "rb");
if (!pFile)
{
fprintf(stderr, "could not open %s\n", filePath.GetBuffer(0));
return;
}
frame = 0;
while (1)
{
size = fread(inbuf, 1, 4096, pFile);
if (size == 0)
break;
inbuf_ptr = inbuf;
while (size>0)
{
len = DecodeVideo(&got_picture,inbuf_ptr, size);
if (len < 0)
{
printf("Error while decoding frame %d\n",frame);
return;
}
if (got_picture)
{
printf("saving frame %3d\n", frame);
SwsContext* pSwsCxt = SwsGetContext(PIX_FMT_YUV420P,PIX_FMT_RGB32, SWS_BILINEAR);
BYTE *rgb_src[3]= {pRGB, NULL, NULL};
int rgb_stride[3]={4*width, 0, 0};
int iResult = SwsScale(pSwsCxt,rgb_src,rgb_stride);
DrawBitmap.SetBitmapBits(153600*2,(void*)pRGB);
dc.BitBlt(0,0,width,high,&MemDC,0,0,SRCCOPY);
frame++;
}
size -= len;
inbuf_ptr += len;
Sleep(100);
}
}
free(pRGB);
FfmpegClose();
}
int CFfmpegCode::DecodeVideo(int *got_picture_ptr, uint8_t *buf, int buf_size)
{
return avcodec_decode_video(m_pAVCodecContext, m_pAVFrame, got_picture_ptr, buf, buf_size);
}
void CFfmpegCode::FfmpegClose()
{
//关闭解码器,释放解码器内存
if(m_pAVCodecContext)
{
avcodec_close(m_pAVCodecContext);
av_free(m_pAVCodecContext);
m_pAVCodecContext = NULL;
}
//释放解码画面内存
if(m_pAVFrame)
{
av_free(m_pAVFrame);
m_pAVFrame = NULL;
}
}
struct SwsContext* CFfmpegCode::SwsGetContext(int srcFormat,int dstFormat, int flags)
{
return sws_getContext(m_iWidth,m_iHeight,srcFormat,m_iWidth,m_iHeight,dstFormat,flags,NULL,NULL,NULL);
}