在虚拟机上yuv420可以正常显示 ,而945(D525)模块上却无法显示 ,后来验证了directdraw的yuv420也无法显示 ,由此怀疑显卡不支持 ,后把420转换为422显示。
420显示如下:
/*
编译命令:arm-linux-gcc -o show2642 264showyuv2.c -I/usr/local/ffmpeg_arm/include/ -L/usr/local/ffmpeg_arm/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 libSDL.a
gcc -o test test.c -I/usr/local/ffmpeg/include/ -L/usr/local/ffmpeg/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 -lSDL
*/
#include "stdio.h"
#include "stdlib.h"
#include "libavformat/avformat.h"
#include "libavdevice/avdevice.h"
#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavutil/channel_layout.h"
#include "libavutil/parseutils.h"
#include "libavutil/samplefmt.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/dict.h"
#include "libavutil/mathematics.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avstring.h"
#include "libavutil/imgutils.h"
#include "libavutil/timestamp.h"
#include "libavutil/bprint.h"
#include "libavutil/time.h"
#include "libavutil/threadmessage.h"
#include "SDL/SDL.h"
//#include "libavfilter/avcodec.h"
#include "libavcodec/avcodec.h"
#if HAVE_SYS_RESOURCE_H
#include <sys/time.h>
#include <sys/types.h>
#include <sys/resource.h>
#elif HAVE_GETPROCESSTIMES
#include <windows.h>
#endif
#if HAVE_GETPROCESSMEMORYINFO
#include <windows.h>
#include <psapi.h>
#endif
#if HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
#if HAVE_TERMIOS_H
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/time.h>
#include <termios.h>
#elif HAVE_KBHIT
#include <conio.h>
#endif
#if HAVE_PTHREADS
#include <pthread.h>
#endif
#include <time.h>
#include "libavutil/avassert.h"
#define MAX_LEN 1024 * 50
此方法参考官网的例子
static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize,
FILE *f)
{
// FILE *f;
int i;
// f = fopen(filename,"w");
// fprintf(f, "P5\n%d %d\n%d\n", xsize, ysize, 255);
for (i = 0; i < ysize; i++)
;// fwrite(buf + i * wrap, 1, xsize, f);
// fclose(f);
}
int main()
{
//下面初始化h264解码库
//avcodec_init();
int w = 720;
int h = 576,retu;
SDL_Rect rect;
av_register_all();
AVFrame *pFrame_ = NULL;
/* find the video encoder */
AVCodec *videoCodec = avcodec_find_decoder(AV_CODEC_ID_H264);//得到264的解码器类
if(!videoCodec)
{
printf("avcodec_find_decoder error\n");
return -1;
}
AVCodecParserContext *avParserContext = av_parser_init(AV_CODEC_ID_H264);//得到解析帧类,主要用于后面的帧头查找
if(!avParserContext)
{
printf("av_parser_init error\n");
return -1;
}
AVCodecContext *codec_ = avcodec_alloc_context3(videoCodec);//解码会话层
if(!codec_)
{
printf("avcodec_alloc_context3 error\n");
return -1;
}
//初始化参数,下面的参数应该由具体的业务决定
codec_->time_base.num = 1;
codec_->frame_number = 1; //每包一个视频帧
codec_->codec_type = AVMEDIA_TYPE_VIDEO;
codec_->bit_rate = 0;
codec_->time_base.den = 25;//帧率
codec_->width = 720;//视频宽
codec_->height = 576;//视频高
if(avcodec_open2(codec_, videoCodec, NULL) >= 0)//打开解码器
{
pFrame_ = av_frame_alloc();// Allocate video frame 成功打开解码器后, 此时可以分配帧内存, 当然你也可以在后面每次都分配、释放, 在此我省功夫, 只在开始分配一次
if (!pFrame_) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
}
else
{
printf("avcodec_open2 error\n");
return -1;
}
AVPacket packet = {0};
int dwBufsize = 10;
int frameFinished = dwBufsize;//这个是随便填入数字,没什么作用
av_init_packet(&packet);
packet.data = NULL;//这里填入一个指向完整H264数据帧的指针
packet.size = 0;//这个填入H264数据帧的大小
FILE *myH264 = fopen("1.264", "rb");//解码的文件264
if(myH264 == NULL)
{
perror("cant open 264 file\n");
return -1;
}
FILE *yuvfile = fopen("my264.yuv", "wb");//成功解码后保存成的YUV文件, 可以用YUV工具打开浏览
if(yuvfile == NULL)
{
perror("cant open YUV file\n");
return -1;
}
int readFileLen = 1;
char readBuf[MAX_LEN];
unsigned char *parseBuf = malloc(20*MAX_LEN);//这个地方浪费了我一个下午时间, 当时我用的是栈内存,即unsigned char parseBuf[20*MAX_LEN], 结果运行程序一直报错, 此处需要用堆内存才能正常解码
int parseBufLen = 0;
int frameCount = 0;
printf("begin...\n");
printf("readBuf address is %x\n", readBuf);
/SDL init
SDL_Surface* hello = NULL;
SDL_Surface* screen = NULL;
//Start SDL
// SDL_Init( SDL_INIT_EVERYTHING );
SDL_Init(SDL_INIT_VIDEO);
//Set up screen
screen = SDL_SetVideoMode( 1024, 768, 32, SDL_SWSURFACE );
SDL_Overlay* overlay = SDL_CreateYUVOverlay(w, h, SDL_YV12_OVERLAY, screen);
SDL_LockSurface(screen);
SDL_LockYUVOverlay(overlay);
//
while(readFileLen > 0)//开始解码工作
{
//printf("begin...\n");
readFileLen = fread(readBuf, 1, sizeof(readBuf), myH264);//首先从文件里读出数据
if(readFileLen <= 0)
{
printf("read over\n");
break;
}
else
{
int handleLen = 0;
int handleFileLen = readFileLen;
while(handleFileLen > 0)
{
int nLength = av_parser_parse2(avParserContext, codec_, &parseBuf, &parseBufLen, readBuf + handleLen, handleFileLen, 0, 0, 0);//查找264帧头
handleFileLen -= nLength;
handleLen += nLength;
if(parseBufLen <= 0)//当parseBufLen大于0时,说明查找到了帧头
{
continue;
}
packet.size = parseBufLen;//将查找到的帧长度送入
packet.data = parseBuf;//将查找到的帧内存送入
if(frameCount>100)break;
//printf("parseBuf address is %x\n", parseBuf);
while(packet.size > 0)
{//下面开始真正的解码
int decodeLen = avcodec_decode_video2(codec_, pFrame_, &frameFinished, &packet);
if(decodeLen < 0)
break;
packet.size -= decodeLen;
packet.data += decodeLen;
if(frameFinished > 0)//成功解码
{
int picSize = codec_->height * codec_->width;
//int newSize = picSize * 1.5;
//申请内存
//unsigned char *buf = malloc(newSize);
int height = pFrame_->height;
int width = pFrame_->width;
//printf("OK, get data\n");
//printf("Frame height is %d\n", height);
//printf("Frame width is %d\n", width);
frameCount ++;
printf("Frame count is %d\n", frameCount);
pgm_save(pFrame_->data[0], pFrame_->linesize[0],//保存Y
codec_->width, codec_->height, yuvfile);
pgm_save(pFrame_->data[1], pFrame_->linesize[1],//保存U
codec_->width/2, codec_->height/2, yuvfile);
pgm_save(pFrame_->data[2], pFrame_->linesize[2],//保存V
codec_->width/2, codec_->height/2, yuvfile);
///有了YUV数据, 后面可以用FFMPEG提供的转换方法,将其转成RGB数据,进行后续的显示或其它的图像处理工作
sdl
int i;
for(i=0;i<576;i++)
{//fwrite(buf + i * wrap, 1, xsize, f);
memcpy(overlay->pixels[0]+i*1280, pFrame_->data[0]+i*pFrame_->linesize[0], 720);
}
for(i=0;i<288;i++)
{
memcpy(overlay->pixels[2]+i*640, pFrame_->data[1]+i*pFrame_->linesize[1], 360);
memcpy(overlay->pixels[1]+i*640, pFrame_->data[2]+i*pFrame_->linesize[2], 360); }
SDL_UnlockYUVOverlay(overlay);
SDL_UnlockSurface(screen);
rect.w = w;
rect.h = h;
rect.x = rect.y = 0;
SDL_DisplayYUVOverlay(overlay, &rect);
//sdl
SDL_Delay(40);
}
else
printf("failed to decodec\n");
}
}
}
}
//释放工作
avcodec_close(codec_);
av_free(codec_);
av_free_packet(&packet);
av_frame_free(&pFrame_);
//SDL
SDL_FreeYUVOverlay(overlay);
SDL_FreeSurface(screen);
//Quit SDL
SDL_Quit();
fclose(yuvfile);
fclose(myH264);
}
422显示如下:
/*
编译命令:arm-linux-gcc -o show2642 264showyuv2.c -I/usr/local/ffmpeg_arm/include/ -L/usr/local/ffmpeg_arm/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 libSDL.a
gcc -o test test.c -I/usr/local/ffmpeg/include/ -L/usr/local/ffmpeg/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 -lSDL
*/
#include "stdio.h"
#include "stdlib.h"
#include "libavformat/avformat.h"
#include "libavdevice/avdevice.h"
#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavutil/channel_layout.h"
#include "libavutil/parseutils.h"
#include "libavutil/samplefmt.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/dict.h"
#include "libavutil/mathematics.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avstring.h"
#include "libavutil/imgutils.h"
#include "libavutil/timestamp.h"
#include "libavutil/bprint.h"
#include "libavutil/time.h"
#include "libavutil/threadmessage.h"
#include "SDL/SDL.h"
//#include "libavfilter/avcodec.h"
#include "libavcodec/avcodec.h"
#if HAVE_SYS_RESOURCE_H
#include <sys/time.h>
#include <sys/types.h>
#include <sys/resource.h>
#elif HAVE_GETPROCESSTIMES
#include <windows.h>
#endif
#if HAVE_GETPROCESSMEMORYINFO
#include <windows.h>
#include <psapi.h>
#endif
#if HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
#if HAVE_TERMIOS_H
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/time.h>
#include <termios.h>
#elif HAVE_KBHIT
#include <conio.h>
#endif
#if HAVE_PTHREADS
#include <pthread.h>
#endif
#include <time.h>
#include "libavutil/avassert.h"
#define MAX_LEN 1024 * 50
此方法参考官网的例子
此方法参考官网的例子
static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize,
FILE *f)
{
// FILE *f;
int i;
// f = fopen(filename,"w");
// fprintf(f, "P5\n%d %d\n%d\n", xsize, ysize, 255);
for (i = 0; i < ysize; i++)
;// fwrite(buf + i * wrap, 1, xsize, f);
// fclose(f);
}
int main()
{
//下面初始化h264解码库
//avcodec_init();
int w = 720;
int h = 576,retu;
SDL_Rect rect;
av_register_all();
AVFrame *pFrame_ = NULL;
/* find the video encoder */
AVCodec *videoCodec = avcodec_find_decoder(AV_CODEC_ID_H264);//得到264的解码器类
if(!videoCodec)
{
printf("avcodec_find_decoder error\n");
return -1;
}
AVCodecParserContext *avParserContext = av_parser_init(AV_CODEC_ID_H264);//得到解析帧类,主要用于后面的帧头查找
if(!avParserContext)
{
printf("av_parser_init error\n");
return -1;
}
AVCodecContext *codec_ = avcodec_alloc_context3(videoCodec);//解码会话层
if(!codec_)
{
printf("avcodec_alloc_context3 error\n");
return -1;
}
//初始化参数,下面的参数应该由具体的业务决定
codec_->time_base.num = 1;
codec_->frame_number = 1; //每包一个视频帧
codec_->codec_type = AVMEDIA_TYPE_VIDEO;
codec_->bit_rate = 0;
codec_->time_base.den = 25;//帧率
codec_->width = 720;//视频宽
codec_->height = 576;//视频高
if(avcodec_open2(codec_, videoCodec, NULL) >= 0)//打开解码器
{
pFrame_ = av_frame_alloc();// Allocate video frame 成功打开解码器后, 此时可以分配帧内存, 当然你也可以在后面每次都分配、释放, 在此我省功夫, 只在开始分配一次
if (!pFrame_) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
}
else
{
printf("avcodec_open2 error\n");
return -1;
}
AVPacket packet = {0};
int dwBufsize = 10;
int frameFinished = dwBufsize;//这个是随便填入数字,没什么作用
av_init_packet(&packet);
packet.data = NULL;//这里填入一个指向完整H264数据帧的指针
packet.size = 0;//这个填入H264数据帧的大小
FILE *myH264 = fopen("1.264", "rb");//解码的文件264
if(myH264 == NULL)
{
perror("cant open 264 file\n");
return -1;
}
FILE *yuvfile = fopen("my264.yuv", "wb");//成功解码后保存成的YUV文件, 可以用YUV工具打开浏览
if(yuvfile == NULL)
{
perror("cant open YUV file\n");
return -1;
}
int readFileLen = 1;
char readBuf[MAX_LEN];
unsigned char *parseBuf = malloc(20*MAX_LEN);//这个地方浪费了我一个下午时间, 当时我用的是栈内存,即unsigned char parseBuf[20*MAX_LEN], 结果运行程序一直报错, 此处需要用堆内存才能正常解码
int parseBufLen = 0;
int frameCount = 0;
printf("begin...\n");
printf("readBuf address is %x\n", readBuf);
/SDL init
SDL_Surface* hello = NULL;
SDL_Surface* screen = NULL;
//Start SDL
// SDL_Init( SDL_INIT_EVERYTHING );
SDL_Init(SDL_INIT_VIDEO);
//Set up screen
screen = SDL_SetVideoMode( 720, 576, 32, SDL_SWSURFACE );
SDL_Overlay* overlay = SDL_CreateYUVOverlay(w, h, SDL_YUY2_OVERLAY, screen);
SDL_LockSurface(screen);
SDL_LockYUVOverlay(overlay);
unsigned char yuv422[768*576*2];
//
while(readFileLen > 0)//开始解码工作
{
//printf("begin...\n");
readFileLen = fread(readBuf, 1, sizeof(readBuf), myH264);//首先从文件里读出数据
if(readFileLen <= 0)
{
printf("read over\n");
break;
}
else
{
int handleLen = 0;
int handleFileLen = readFileLen;
while(handleFileLen > 0)
{
int nLength = av_parser_parse2(avParserContext, codec_, &parseBuf, &parseBufLen, readBuf + handleLen, handleFileLen, 0, 0, 0);//查找264帧头
handleFileLen -= nLength;
handleLen += nLength;
if(parseBufLen <= 0)//当parseBufLen大于0时,说明查找到了帧头
{
continue;
}
packet.size = parseBufLen;//将查找到的帧长度送入
packet.data = parseBuf;//将查找到的帧内存送入
if(frameCount>100)break;
//printf("parseBuf address is %x\n", parseBuf);
while(packet.size > 0)
{//下面开始真正的解码
int decodeLen = avcodec_decode_video2(codec_, pFrame_, &frameFinished, &packet);
//if(decodeLen < 0)break;
packet.size -= decodeLen;
packet.data += decodeLen;
if(frameFinished > 0)//成功解码
{
int picSize = codec_->height * codec_->width;
//int newSize = picSize * 1.5;
//申请内存
//unsigned char *buf = malloc(newSize);
int height = pFrame_->height;
int width = pFrame_->width;
//printf("OK, get data\n");
//printf("Frame height is %d\n", height);
//printf("Frame width is %d\n", width);
frameCount ++;
printf("Frame count is %d\n", frameCount);
pgm_save(pFrame_->data[0], pFrame_->linesize[0],//保存Y
codec_->width, codec_->height, yuvfile);
pgm_save(pFrame_->data[1], pFrame_->linesize[1],//保存U
codec_->width/2, codec_->height/2, yuvfile);
pgm_save(pFrame_->data[2], pFrame_->linesize[2],//保存V
codec_->width/2, codec_->height/2, yuvfile);
///有了YUV数据, 后面可以用FFMPEG提供的转换方法,将其转成RGB数据,进行后续的显示或其它的图像处理工作
sdl
int i;
/* for(i=0;i<576;i++)
{//fwrite(buf + i * wrap, 1, xsize, f);
memcpy(overlay->pixels[0]+i*720, pFrame_->data[0]+i*pFrame_->linesize[0], 720);
}
for(i=0;i<288;i++)
{
memcpy(overlay->pixels[2]+i*360, pFrame_->data[1]+i*pFrame_->linesize[1], 360);
memcpy(overlay->pixels[1]+i*360, pFrame_->data[2]+i*pFrame_->linesize[2], 360); }*/
int k=0,y,x; //yuv420 -> yuv422
for( y=0;y<576;y++)
{
for( x=0;x<720;x++)
{
yuv422[k++] = pFrame_->data[0][y*pFrame_->linesize[0]+x];
yuv422[k++] = x%2==0?pFrame_->data[1][(y/2)*pFrame_->linesize[1]+x/2]:pFrame_->data[2][(y/2)*pFrame_->linesize[2]+x/2];
}
}
memcpy(overlay->pixels[0],yuv422, codec_->width*codec_->height*2);
SDL_UnlockYUVOverlay(overlay);
SDL_UnlockSurface(screen);
rect.w = w;
rect.h = h;
rect.x = rect.y = 0;
SDL_DisplayYUVOverlay(overlay, &rect);
//sdl
SDL_Delay(40);
}
else
printf("failed to decodec\n");
}
}
}
}
//释放工作
avcodec_close(codec_);
av_free(codec_);
av_free_packet(&packet);
av_frame_free(&pFrame_);
//SDL
SDL_FreeYUVOverlay(overlay);
SDL_FreeSurface(screen);
//Quit SDL
SDL_Quit();
fclose(yuvfile);
fclose(myH264);
}
采用sws_scale 实现的数据转换
/*
编译命令:arm-linux-gcc -o show2642 264showyuv2.c -I/usr/local/ffmpeg_arm/include/ -L/usr/local/ffmpeg_arm/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 libSDL.a
gcc -o test test.c -I/usr/local/ffmpeg/include/ -L/usr/local/ffmpeg/lib/ -lswresample -lavformat -lavutil -lavcodec -lswscale -lx264 -lSDL
*/
#include "stdio.h"
#include "stdlib.h"
#include "libavformat/avformat.h"
#include "libavdevice/avdevice.h"
#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavutil/channel_layout.h"
#include "libavutil/parseutils.h"
#include "libavutil/samplefmt.h"
#include "libavutil/fifo.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/dict.h"
#include "libavutil/mathematics.h"
#include "libavutil/pixdesc.h"
#include "libavutil/avstring.h"
#include "libavutil/imgutils.h"
#include "libavutil/timestamp.h"
#include "libavutil/bprint.h"
#include "libavutil/time.h"
#include "libavutil/threadmessage.h"
#include "SDL/SDL.h"
//#include "libavfilter/avcodec.h"
#include "libavcodec/avcodec.h"
#if HAVE_SYS_RESOURCE_H
#include <sys/time.h>
#include <sys/types.h>
#include <sys/resource.h>
#elif HAVE_GETPROCESSTIMES
#include <windows.h>
#endif
#if HAVE_GETPROCESSMEMORYINFO
#include <windows.h>
#include <psapi.h>
#endif
#if HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
#if HAVE_TERMIOS_H
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/time.h>
#include <termios.h>
#elif HAVE_KBHIT
#include <conio.h>
#endif
#if HAVE_PTHREADS
#include <pthread.h>
#endif
#include <time.h>
#include "libavutil/avassert.h"
#define MAX_LEN 1024 * 50
此方法参考官网的例子
static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize,
FILE *f)
{
// FILE *f;
int i;
// f = fopen(filename,"w");
// fprintf(f, "P5\n%d %d\n%d\n", xsize, ysize, 255);
for (i = 0; i < ysize; i++)
;// fwrite(buf + i * wrap, 1, xsize, f);
// fclose(f);
}
int main()
{
//下面初始化h264解码库
//avcodec_init();
int w = 720;
int h = 576,retu;
SDL_Rect rect;
av_register_all();
AVFrame *pFrame_ = NULL,*pFrameYUV;
struct SwsContext *img_convert_ctx =NULL;
pFrameYUV =av_frame_alloc();
/* find the video encoder */
AVCodec *videoCodec = avcodec_find_decoder(AV_CODEC_ID_H264);//得到264的解码器类
if(!videoCodec)
{
printf("avcodec_find_decoder error\n");
return -1;
}
AVCodecParserContext *avParserContext = av_parser_init(AV_CODEC_ID_H264);//得到解析帧类,主要用于后面的帧头查找
if(!avParserContext)
{
printf("av_parser_init error\n");
return -1;
}
AVCodecContext *codec_ = avcodec_alloc_context3(videoCodec);//解码会话层
if(!codec_)
{
printf("avcodec_alloc_context3 error\n");
return -1;
}
//初始化参数,下面的参数应该由具体的业务决定
codec_->time_base.num = 1;
codec_->frame_number = 1; //每包一个视频帧
codec_->codec_type = AVMEDIA_TYPE_VIDEO;
codec_->bit_rate = 0;
codec_->time_base.den = 25;//帧率
codec_->width = 720;//视频宽
codec_->height = 576;//视频高
if(avcodec_open2(codec_, videoCodec, NULL) >= 0)//打开解码器
{
pFrame_ = av_frame_alloc();// Allocate video frame 成功打开解码器后, 此时可以分配帧内存, 当然你也可以在后面每次都分配、释放, 在此我省功夫, 只在开始分配一次
if (!pFrame_) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
}
else
{
printf("avcodec_open2 error\n");
return -1;
}
AVPacket packet = {0};
int dwBufsize = 10;
int frameFinished = dwBufsize;//这个是随便填入数字,没什么作用
av_init_packet(&packet);
packet.data = NULL;//这里填入一个指向完整H264数据帧的指针
packet.size = 0;//这个填入H264数据帧的大小
FILE *myH264 = fopen("1.264", "rb");//解码的文件264
if(myH264 == NULL)
{
perror("cant open 264 file\n");
return -1;
}
FILE *yuvfile = fopen("my264.yuv", "wb");//成功解码后保存成的YUV文件, 可以用YUV工具打开浏览
if(yuvfile == NULL)
{
perror("cant open YUV file\n");
return -1;
}
int readFileLen = 1;
char readBuf[MAX_LEN];
unsigned char *parseBuf = malloc(20*MAX_LEN);//这个地方浪费了我一个下午时间, 当时我用的是栈内存,即unsigned char parseBuf[20*MAX_LEN], 结果运行程序一直报错, 此处需要用堆内存才能正常解码
int parseBufLen = 0;
int frameCount = 0;
printf("begin...\n");
printf("readBuf address is %x\n", readBuf);
/SDL init
SDL_Surface* hello = NULL;
SDL_Surface* screen = NULL;
//Start SDL
// SDL_Init( SDL_INIT_EVERYTHING );
SDL_Init(SDL_INIT_VIDEO);
//Set up screen
screen = SDL_SetVideoMode( 720, 576, 32, SDL_SWSURFACE );
SDL_Overlay* overlay = SDL_CreateYUVOverlay(w, h, SDL_YUY2_OVERLAY, screen);
SDL_LockSurface(screen);
SDL_LockYUVOverlay(overlay);
//
//
int numBytes = avpicture_get_size(AV_PIX_FMT_YUYV422, codec_->width,
codec_->height);
uint8_t* yuv422 = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameYUV, yuv422, AV_PIX_FMT_YUYV422,
codec_->width, codec_->height);
///
while(readFileLen > 0)//开始解码工作
{
//printf("begin...\n");
readFileLen = fread(readBuf, 1, sizeof(readBuf), myH264);//首先从文件里读出数据
if(readFileLen <= 0)
{
printf("read over\n");
break;
}
else
{
int handleLen = 0;
int handleFileLen = readFileLen;
while(handleFileLen > 0)
{
int nLength = av_parser_parse2(avParserContext, codec_, &parseBuf, &parseBufLen, readBuf + handleLen, handleFileLen, 0, 0, 0);//查找264帧头
handleFileLen -= nLength;
handleLen += nLength;
if(parseBufLen <= 0)//当parseBufLen大于0时,说明查找到了帧头
{
continue;
}
packet.size = parseBufLen;//将查找到的帧长度送入
packet.data = parseBuf;//将查找到的帧内存送入
if(frameCount>100)break;
//printf("parseBuf address is %x\n", parseBuf);
while(packet.size > 0)
{//下面开始真正的解码
int decodeLen = avcodec_decode_video2(codec_, pFrame_, &frameFinished, &packet);
if(decodeLen < 0)
break;
packet.size -= decodeLen;
packet.data += decodeLen;
if(frameFinished > 0)//成功解码
{
int picSize = codec_->height * codec_->width;
//int newSize = picSize * 1.5;
//申请内存
//unsigned char *buf = malloc(newSize);
int height = pFrame_->height;
int width = pFrame_->width;
//printf("OK, get data\n");
//printf("Frame height is %d\n", height);
//printf("Frame width is %d\n", width);
frameCount ++;
printf("Frame count is %d\n", frameCount);
pgm_save(pFrame_->data[0], pFrame_->linesize[0],//保存Y
codec_->width, codec_->height, yuvfile);
pgm_save(pFrame_->data[1], pFrame_->linesize[1],//保存U
codec_->width/2, codec_->height/2, yuvfile);
pgm_save(pFrame_->data[2], pFrame_->linesize[2],//保存V
codec_->width/2, codec_->height/2, yuvfile);
///有了YUV数据, 后面可以用FFMPEG提供的转换方法,将其转成RGB数据,进行后续的显示或其它的图像处理工作
sdl
int i;
/* for(i=0;i<576;i++)
{//fwrite(buf + i * wrap, 1, xsize, f);
memcpy(overlay->pixels[0]+i*720, pFrame_->data[0]+i*pFrame_->linesize[0], 720);
}
for(i=0;i<288;i++)
{
memcpy(overlay->pixels[2]+i*360, pFrame_->data[1]+i*pFrame_->linesize[1], 360);
memcpy(overlay->pixels[1]+i*360, pFrame_->data[2]+i*pFrame_->linesize[2], 360); }*/
img_convert_ctx = sws_getContext(codec_->width, codec_->height, codec_->pix_fmt, codec_->width, codec_->height, AV_PIX_FMT_YUYV422, 2, NULL, NULL, NULL);
sws_scale(img_convert_ctx, (const uint8_t* const*) pFrame_->data, pFrame_->linesize, 0, codec_->height, pFrameYUV->data,pFrameYUV->linesize);
memcpy(overlay->pixels[0],yuv422, codec_->width*codec_->height*2);
SDL_UnlockYUVOverlay(overlay);
SDL_UnlockSurface(screen);
rect.w = w;
rect.h = h;
rect.x = rect.y = 0;
SDL_DisplayYUVOverlay(overlay, &rect);
//sdl
SDL_Delay(40);
}
else
printf("failed to decodec\n");
}
}
}
}
//释放工作
avcodec_close(codec_);
av_free(codec_);
av_free_packet(&packet);
av_frame_free(&pFrame_);
//SDL
SDL_FreeYUVOverlay(overlay);
SDL_FreeSurface(screen);
//Quit SDL
SDL_Quit();
fclose(yuvfile);
fclose(myH264);
}