http://bbs.csdn.net/topics/350024712
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdbool.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <ctype.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <sys/kd.h>
#include <linux/fb.h>
char *fb_dev_name = NULL;
static int fb_dev_fd;
static int fb_tty_fd = -1;
static size_t fb_size;
static uint8_t *frame_buffer;
static uint8_t *center;
static struct fb_fix_screeninfo fb_finfo;
static struct fb_var_screeninfo fb_orig_vinfo;
static struct fb_var_screeninfo fb_vinfo;
static int fb_line_len;
static char *fbp = 0;
static int xres = 400;
static int yres = 240;
static int bits_per_pixel = 0;
static long int screenSize = 0;
static void fb_init(void);
static void fb_uninit(void);
static void ShowFrame(AVFrame *pFrame, int width, int height);
static inline void *memcpy_pic2(void *dst, const void *src,
int bytesPerLine, int height,
int dstStride,int srcStride,int limit2width);
int main (int argc, const char * argv[])
{
AVFormatContext *pFormatCtx;
int i, videoStream;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame;
AVFrame *pFrameRGB;
AVPacket packet;
int frameFinished;
int numBytes;
uint8_t *buffer;
fb_init();
// Register all formats and codecs
av_register_all();
// Open video file
if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
return -1; // Couldn't open file
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0)
return -1; // Couldn't find stream information
// Dump information about file onto standard error
dump_format(pFormatCtx, 0, argv[1], false);
// Find the first video stream
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
{
videoStream=i;
break;
}
if(videoStream==-1)
return -1; // Didn't find a video stream
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
return -1; // Codec not found
// Open codec
if(avcodec_open(pCodecCtx, pCodec)<0)
return -1; // Could not open codec
// Hack to correct wrong frame rates that seem to be generated by some codecs
if(pCodecCtx->time_base.num>1000 && pCodecCtx->time_base.den==1)
pCodecCtx->time_base.den=1000;
// Allocate video frame
pFrame=avcodec_alloc_frame();
// Allocate an AVFrame structure
pFrameRGB=avcodec_alloc_frame();
if(pFrameRGB==NULL)
return -1;
// Determine required buffer size and allocate buffer
numBytes=avpicture_get_size(PIX_FMT_RGB565, pCodecCtx->width,
pCodecCtx->height);
buffer=malloc(numBytes);
// Assign appropriate parts of buffer to image planes in pFrameRGB
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB565,
pCodecCtx->width, pCodecCtx->height);
// Read frames and save first five frames to disk
i=0;
while(av_read_frame(pFormatCtx, &packet)>=0)
{
// Is this a packet from the video stream?
if(packet.stream_index==videoStream)
{
// Decode video frame
// avcodec_decode_video(pCodecCtx, pFrame, &frameFinished,
// packet.data, packet.size);
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
&packet);
// Did we get a video frame?
if(frameFinished)
{
static struct SwsContext *img_convert_ctx;
#if 0
// Older removed code
// Convert the image from its native format to RGB swscale
img_convert((AVPicture *)pFrameRGB, PIX_FMT_RGB24,
(AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width,
pCodecCtx->height);
// function template, for reference
int sws_scale(struct SwsContext *context, uint8_t* src[], int srcStride[], int srcSliceY,
int srcSliceH, uint8_t* dst[], int dstStride[]);
#endif
// Convert the image into YUV format that SDL uses
if(img_convert_ctx == NULL) {
int w = pCodecCtx->width;
int h = pCodecCtx->height;
img_convert_ctx = sws_getContext(w, h,
pCodecCtx->pix_fmt,
w, h, PIX_FMT_RGB565,SWS_FAST_BILINEAR,// SWS_BICUBIC,
NULL, NULL, NULL);//我想在这里通过修改后两个w,h变量实现缩放。。但是效果不行。。会变成双屏显示 :(
if(img_convert_ctx == NULL) {
fprintf(stderr, "Cannot initialize the conversion context!\n");
exit(1);
}
}
int ret = sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0,
pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
// int ret = sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0,
// 240, pFrameRGB->data, pFrameRGB->linesize);
#if 0 // this use to be true, as of 1/2009, but apparently it is no longer true in 3/2009
if(ret) {
fprintf(stderr, "SWS_Scale failed [%d]!\n", ret);
exit(-1);
}
#endif
// Save the frame to disk
// if(i++<=5)
// printf("the pCodecCtx width is :%d height is :%d\n",pCodecCtx->width, pCodecCtx->height);
// printf("the linesize is %d the fb_line_len is %d\n",pFrame->linesize[0],fb_line_len);
ShowFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height);
// ShowFrame(pFrameRGB, 400, 240);
//int y;
// for(y=0; y< pCodecCtx->height; y++)
// write(fb_dev_fd,pFrameRGB->data[0] + y*pFrameRGB->linesize[0],pCodecCtx->width*2);
#if 1
//if(write(fb_dev_fd, (void *)buffer, numBytes) == -1)
// {
// printf ("fb write date error.\n");
// perror ("fb write");
// usleep (40000);
// }
// memcpy(center, (void *)buffer,numBytes);
#endif
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}
// Free the RGB image
free(buffer);
av_free(pFrameRGB);
// Free the YUV frame
av_free(pFrame);
// Close the codec
avcodec_close(pCodecCtx);
// Close the video file
av_close_input_file(pFormatCtx);
fb_uninit();
// return 0;
}
static void fb_init()
{
if(!(fb_dev_name = getenv("FRAMEBUFFER")))
fb_dev_name = strdup("/dev/fb0");
#if 1
if(-1 == (fb_dev_fd = open(fb_dev_name, O_RDWR))){
printf("Can't open %s:%s\n",fb_dev_name,strerror(errno));
exit(1);
}
#endif
if(ioctl(fb_dev_fd,FBIOGET_VSCREENINFO,&fb_vinfo)){
printf("Can't get VSCREENINFO:%s\n",strerror(errno));
exit(2);
}
if(ioctl(fb_dev_fd,FBIOGET_FSCREENINFO, &fb_finfo)){
printf("Error : reading fixed information.\n");
exit(3);
}
fb_orig_vinfo = fb_vinfo;
printf("%dx%d, %dbpp\n", fb_vinfo.xres, fb_vinfo.yres, fb_vinfo.bits_per_pixel );
xres = fb_vinfo.xres;
yres = fb_vinfo.yres;
bits_per_pixel = fb_vinfo.bits_per_pixel;
//计算屏幕的总大小(字节)
screenSize = fb_vinfo.xres * fb_vinfo.yres * fb_vinfo.bits_per_pixel / 8;
// screenSize = fb_vinfo.xres * fb_vinfo.yres;
printf("screensize=%d\n",screenSize);
//内存映射
frame_buffer = mmap(0, screenSize, PROT_READ | PROT_WRITE, MAP_SHARED, fb_dev_fd, 0);
if ((int)fbp == -1)
{
printf("Error: failed to map framebuffer device to memory.\n");
exit(4);
}
fb_line_len = fb_finfo.line_length;
fb_size = fb_finfo.smem_len;
center = frame_buffer ;// use for double buffer in future
}
static void fb_uninit()
{
munmap(fbp, screenSize);
close(fb_dev_fd);
return 0;
}
#if 1
static inline void* memcpy_pic2(void *dst, const void *src,
int bytesPerLine, int height,
int dstStride,int srcStride,int limit2width)
{
int i;
void *retval = dst;
#if 0
if(!limit2width && dstStride == srcStride)
{
if(srcStride < 0){
src = (uint8_t*)src + (height - 1)*srcStride;
dst = (uint8_t*)dst + (height -1 )*dstStride;
srcStride =- srcStride;
}
memcpy(dst,src,srcStride*height);
}else{
#endif
for(i= 0 ; i < height ; i++)
{
memcpy(dst, src, bytesPerLine);
src = (uint8_t*)src + srcStride;
dst = (uint8_t*)dst + dstStride;
}
// }
return retval;
}
#endif
#if 1
static void ShowFrame(AVFrame *pFrame, int width, int height)
{
uint8_t *d;
d = center+fb_line_len*150+fb_vinfo.bits_per_pixel / 8*200; //+ fb_line_len * y + fb_pixel_size * x;
memcpy_pic2(d, pFrame->data[0], width*2, height, fb_line_len,pFrame->linesize[0],0);
}
#endif
#0 swScale (c=0x814fa60, src=0xbfffef64, srcStride=0xbfffef44, srcSliceY=0,
srcSliceH=288, dst=0xbfffef54, dstStride=0xbfffef34)
at libswscale/swscale.c:334
#1 0xb6fc22d4 in sws_scale (c=0x814fa60, srcSlice=0xbffff044,
srcStride=0xbffff024, srcSliceY=0, srcSliceH=288, dst=0xbffff034,
dstStride=0xbffff014) at libswscale/swscale.c:920
#2 0xb7fa761b in scale_slice (link=0x8099d00, out_buf=0x8083e40,
cur_pic=0x8162060, sws=0x814fa60, y=0, h=288, mul=1, field=0)
at libavfilter/vf_scale.c:352
#3 0xb7fa7b77 in filter_frame (link=0x8099d00, in=0x8162060)
at libavfilter/vf_scale.c:409
#4 0xb7fb189d in default_end_frame (inlink=0x8099d00)
at libavfilter/video.c:332
#5 0xb7fb194c in ff_end_frame (link=0x8099d00) at libavfilter/video.c:354
#6 0xb7f78199 in ff_filter_frame (link=0x8099d00, frame=0x8162060)
at libavfilter/avfilter.c:649
#7 0xb7fb1901 in default_end_frame (inlink=0x8099e00)
at libavfilter/video.c:338
#8 0xb7fb194c in ff_end_frame (link=0x8099e00) at libavfilter/video.c:354
#9 0xb7f78199 in ff_filter_frame (link=0x8099e00, frame=0x8162060)
at libavfilter/avfilter.c:649
#10 0xb7f7ce7b in request_frame (link=0x8099e00) at libavfilter/buffersrc.c:374
#11 0xb7f7c57b in av_buffersrc_add_ref (s=0x8084d00, buf=0x8162060, flags=7)
---Type <return> to continue, or q <return> to quit---
at libavfilter/buffersrc.c:150
#12 0x08060761 in decode_video (ist=0x8092ea0, pkt=0xbffff3f0,
got_output=0xbffff434) at ffmpeg.c:1656
#13 0x08060fec in output_packet (ist=0x8092ea0, pkt=0xbffff5b0)
at ffmpeg.c:1776
#14 0x08066589 in process_input (file_index=0) at ffmpeg.c:2870
#15 0x080668e8 in transcode_step () at ffmpeg.c:2966
#16 0x08066a0d in transcode () at ffmpeg.c:3018
#17 0x08066fcf in main (argc=8, argv=0xbffffbe4) at ffmpeg.c:3202