linux 截取视频某一帧,Linux上MIPS平台交叉编译FFMpeg库 及使用库截取视频中的某一帧...

Linux下MIPS平台交叉编译FFMpeg库 及使用库截取视频中的某一帧

参考http://www.ffmpeg.com.cn网站

一.Linux下MIPS平台交叉编译FFMpeg库:

1.下载ffmpeg库的源代码

(http://sourceforge.net/project/showfiles.php?group_id=205275&package_id=248632):

[root@localhost ffmpeg]# ls

ffmpeg-laster.tar.gz

2.解压:

[root@localhost ffmpeg]# tar zxvf ffmpeg-laster.tar.gz

3.开始配置,并编译:

[root@localhost ffmpeg]# mkdir ffmpeg-release

[root@localhost ffmpeg]# cd ffmpeg

[root@localhost ffmpeg]# ./configure --enable-cross-compile --target-os=linux --cross-prefix=mipsel-linux- --cc=mipsel-linux-gcc --enable-shared --arch=mips --prefix=/opt/brcm/ffmpeg/ffmpeg-release

[root@localhost ffmpeg]# make

[root@localhost ffmpeg]# make install

[root@localhost ffmpeg]# cd ../ffmpeg-release

[root@localhost ffmpeg-release]# tar -zcvf ffjpeg_lib.tar.gz ./lib

板子上运行:

# cp ffjpeg_lib.tar.gz /usr/local/lib/

# cd /usr/local/lib/

# tar -zxvf ffjpeg_lib.tar.gz -C ../

# rm ffjpeg_lib.tar.gz

# cp ffmpeg-release/bin/* /bin/

# ffmpeg

FFmpeg version SVN-r21694, Copyright (c) 2000-2010 Fabrice Bellard, et al.

built on Nov 17 2012 02:25:17 with gcc 4.5.3

configuration: --enable-cross-compile --target-os=linux --cross-prefix=mipsel-linux- --cc=mipsel-linux-gcc --enable-shared --arch=mips --prefix=/opt/brcm/ffmpeg/ffmpeg-release

libavutil     50. 9. 0 / 50. 9. 0

libavcodec    52.52. 0 / 52.52. 0

libavformat   52.51. 0 / 52.51. 0

libavdevice   52. 2. 0 / 52. 2. 0

libswscale     0.10. 0 /  0.10. 0

Hyper fast Audio and Video encoder

usage: ffmpeg [options] [[infile options] -i infile]... {[outfile options] outfile}...

Use -h to get full help or, even better, run 'man ffmpeg'

#

到现在为止, 我们就成功的将 ffmpeg 移植到我们的开发板上了。

二.Linux下使用ffmpeg库:

1.命令行使用:

ffmpeg -i test.mp4 -y -f image2 -ss 30 -s 95*95 -vframes 1 a.jpg

找一个MP4文件:test.mp4,然后使用上面的命令就可以取得30秒时视频的图像,保存为

95*95像素的JPG文件。

ffmpeg -i 000094.mp4 -vframes 30 -pix_fmt rgb24  -y -f gif a.gif

把前30帧转化为GIF文件。

2.在程序中调用函数截获视频图片:

首先加入库支持,在我们的可执行程序的Makefile文件中,加入:

CFLAGS += -I/opt/brcm/ffmpeg/ffmpeg-release/include/

LFLAGS += -L/opt/brcm/ffmpeg/ffmpeg-release/lib/ -lavutil -lavformat -lavcodec -lswscale

(记住,后面最后四个参数缺一不可,否则编译不通过)

然后关于在编程中使用FFMPEG库的方法,附带一个成熟的Demo。

(test_snap.c和Makefile)

三.遇到的问题:

1.找不到关键帧:first frame is no keyframe: 这样的截图为透明色,不满足截图要求。

解决方案: 跟踪截图函数的返回值,如果no keyframe,将开始时间推迟1秒,再截图,直到成功为止。

test_snap.c 如下:

#include

#include

#include

#include

#include

#include

#undef sprintf

#undef uint8_t

#undef uint16_t

#undef uint32_t

#define uint8_t unsigned char

#define uint16_t unsigned short

#define uint32_t unsigned long

#pragma pack(2)

typedef struct BMPHeader

{

uint16_t identifier;

uint32_t file_size;

uint32_t reserved;

uint32_t data_offset;

} BMPHeader;

typedef struct BMPMapInfo

{

uint32_t header_size;

uint32_t width;

uint32_t height;

uint16_t n_planes;

uint16_t bits_per_pixel;

uint32_t compression;

uint32_t data_size;

uint32_t hresolution;

uint32_t vresolution;

uint32_t n_colors_used;

uint32_t n_important_colors;

}BMPMapInfo;

int CreateBmpImg(AVFrame *pFrame, int width, int height, int iFrame)

{

BMPHeader bmpheader;

BMPMapInfo bmpinfo;

FILE *fp;

int y;

char filename[32];

// Open file

memset(filename, 0x0, sizeof(filename));

sprintf(filename, "%d.bmp", iFrame+1);

JPRINTF(("Create BMP File : [%s] \n",filename));

fp = fopen(filename, "wb");

if(!fp)return -1;

bmpheader.identifier = ('M'<<8)|'B';

bmpheader.reserved = 0;

bmpheader.data_offset = sizeof(BMPHeader) + sizeof(BMPMapInfo);

bmpheader.file_size = bmpheader.data_offset + width*height*24/8;

bmpinfo.header_size = sizeof(BMPMapInfo);

bmpinfo.width = width;

bmpinfo.height = height;

bmpinfo.n_planes = 1;

bmpinfo.bits_per_pixel = 24;

bmpinfo.compression = 0;

bmpinfo.data_size = height*((width*3 + 3) & ~3);

bmpinfo.hresolution = 0;

bmpinfo.vresolution = 0;

bmpinfo.n_colors_used = 0;

bmpinfo.n_important_colors = 0;

fwrite(&bmpheader,sizeof(BMPHeader),1,fp);

fwrite(&bmpinfo,sizeof(BMPMapInfo),1,fp);

for(y=height-1; y>=0; y--)

fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, fp);

fclose(fp);

return 0;

}

//解码指定videostream,并保存frame数据到pFrame上

//返回: 0--成功,非0--失败

int DecodeVideoFrame(AVFormatContext *pFormatCtx, AVCodecContext *pCodecCtx,

int videoStream, int64_t endtime, AVFrame *pFrame, int *keyframe_err)

{

static AVPacket packet;

static uint8_t *rawData;

static int bytesRemaining = 0;

int bytesDecoded;

int frameFinished;

static int firstTimeFlag = 1;

int snap_cnt=0;

*keyframe_err=0;

if (firstTimeFlag)

{

firstTimeFlag = 0;

packet.data = NULL;//第一次解frame,初始化packet.data为null

}

while (1)

{

do

{

if (packet.data == NULL) av_free_packet(&packet); //释放旧的packet

if (av_read_frame(pFormatCtx, &packet) < 0)

{

//从frame读取数据保存到packet上,<0表明到了stream end

printf("-->av_read_frame end\n");

goto exit_decode;

}

} while (packet.stream_index != videoStream); //判断当前frame是否为指定的video stream

//判断当前帧是否到达了endtime,是则返回false,停止取下一帧

if (packet.pts >= endtime) return -1;

bytesRemaining = packet.size;

rawData = packet.data;

while (bytesRemaining > 0)

{

++snap_cnt;

bytesDecoded = avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, rawData, bytesRemaining);

if (bytesDecoded < 0) return -1;

bytesRemaining -= bytesDecoded;

rawData += bytesDecoded;

// if (frameFinished) return 0;

if (frameFinished)

{

if(snap_cnt<=1)

*keyframe_err=1;

return 0;

}

}

}

exit_decode:

bytesDecoded = avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, rawData, bytesRemaining);

if(packet.data != NULL) av_free_packet(&packet);

if (frameFinished != 0) return 0;

return -1;

}

void usage(const char *function)

{

printf("Usage: %s [File Name] [Start Time] [End Time]\n", function);

printf("Ex: ./railgun panda.mpg 003005 003010\n");

printf("Time Format: HrsMinsSecs. Ex 003005 means 00 hours 30 minutes 05 senconds\n");

printf("\n");

}

void ParseTime(IN ulong snap_start_time, int64_t *pStartSec,

IN ulong snap_end_time, int64_t *pEndSec)

{

int64_t starttime = 0, endtime = 0;

if (pStartSec)

{

starttime = snap_start_time;

*pStartSec = (3600*starttime/10000) + \

(60*(starttime%10000)/100) + \

(starttime%100);

}

if (pEndSec)

{

endtime = snap_end_time;

*pEndSec = (3600*endtime/10000) + \

(60*(endtime%10000)/100) + \

(endtime%100);

}

}

int __navi_snap_mtv_file(IN char *snap_file_name, IN ulong snap_start_time,

IN ulong snap_end_time,IN int snap_w, IN int snap_h)

{

//RINTF(("Open snap_file_name == [%s] \n",snap_file_name));

const char *filename;

AVFormatContext *ic = NULL;

AVCodecContext *dec = NULL;

AVCodec *codec = NULL;

AVFrame *frame = NULL;

AVFrame *frameRGB = NULL;

uint8_t *buffer = NULL;

int numBytes;

int i, videoStream;

int64_t startTime = 0;

int64_t endTime = 0;

int keyframe_err=0;

static struct SwsContext *img_convert_ctx = NULL;

// Register all formats and codecs

av_register_all();

filename = snap_file_name;

// parse begin time and end time

ParseTime(snap_start_time, &startTime, NULL, NULL);

ParseTime(snap_start_time, &startTime, snap_end_time, &endTime);

startTime *= AV_TIME_BASE;

endTime *= AV_TIME_BASE;

// Open video file

if(av_open_input_file(&ic, filename, NULL, 0, NULL)!=0)

{

printf("Cannt open input file\n");

goto exit_err;

}

// Retrieve stream information

if(av_find_stream_info(ic)<0)

{

printf("Cannt find stream info\n");

goto exit_err;

}

// Dump information about file onto standard error

dump_format(ic, 0, filename, 0);

// Find the first video stream

videoStream=-1;

for(i=0; inb_streams; i++)

if(ic->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)

{

videoStream=i;

break;

}

if(videoStream==-1)

{

printf("No video stream\n");

goto exit_err;

}

// Get a pointer to the codec context for the video stream

dec=ic->streams[videoStream]->codec;

// Find the decoder for the video stream

codec=avcodec_find_decoder(dec->codec_id);

if(codec==NULL)

{

printf("Found no codec\n");

goto exit_err;

}

// Open codec

if(avcodec_open(dec, codec)<0)

{

printf("Cannt open avcodec\n");

goto exit_err;

}

// Allocate video frame

frame=avcodec_alloc_frame();

// Allocate an AVFrame structure

frameRGB=avcodec_alloc_frame();

if(frameRGB==NULL)

{

av_free(frame);

printf("Cannt alloc frame buffer for RGB\n");

goto exit_err;

}

// Determine required buffer size and allocate buffer

numBytes=avpicture_get_size(PIX_FMT_RGB24, dec->width, dec->height);

buffer=(uint8_t *)av_malloc(numBytes);

if (!buffer)

{

av_free(frame);

av_free(frameRGB);

printf("Cannt alloc picture buffer\n");

goto exit_err;

}

// Assign appropriate parts of buffer to image planes in pFrameRGB

avpicture_fill((AVPicture *)frameRGB, buffer, PIX_FMT_RGB24, dec->width, dec->height);

img_convert_ctx = sws_getContext(dec->width, dec->height, dec->pix_fmt, snap_w, snap_h, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);

if (img_convert_ctx == NULL) {

printf("Cannot initialize the conversion context\n");

goto exit_err;

}

// Seek frame

startTime = av_rescale_q(startTime, AV_TIME_BASE_Q, ic->streams[videoStream]->time_base);

endTime = av_rescale_q(endTime, AV_TIME_BASE_Q, ic->streams[videoStream]->time_base);

avformat_seek_file(ic, videoStream, INT64_MIN, startTime, startTime, 0);

// Read frames and save first five frames to dist

i=0;

while(!DecodeVideoFrame(ic, dec, videoStream, endTime, frame, &keyframe_err))

{

// Save the frame to disk

i++;

sws_scale(img_convert_ctx, (AVPicture*)frame->data, (AVPicture*)frame->linesize,

0, dec->height, (AVPicture*)frameRGB->data, (AVPicture*)frameRGB->linesize);

if(0 == CreateBmpImg(frameRGB, snap_w, snap_h, i))

{

break;// get one bmp file, we jump out; if you want get more bmp file, clear this line.

}

}

exit_err:

// Free the RGB image

if (buffer)

av_free(buffer);

if (frameRGB)

av_free(frameRGB);

// Free the YUV frame

if (frame)

av_free(frame);

// Close the codec

if (dec)

avcodec_close(dec);

// Close the video file

if (ic)

av_close_input_file(ic);

if (img_convert_ctx)

sws_freeContext(img_convert_ctx);

if(keyframe_err==1)// if first frame is no keyframe.

return -1;

else

return 0;

}

解决关键帧的代码:

if(-1 == __navi_snap_mtv_file(pXPlayTempFileName,000050,000130,96,96))

{// if first frame is no keyframe.

int ret=-1;

for(i=0; i<30; ++i)

{

if(0==ret) break;

ret = __navi_snap_mtv_file(pXPlayTempFileName,000050+i,000130,96,96);

}

}

Makefile 如下:

CC = mipsel-linux-gcc

TARGET = test_snap

CFLAGS += -I/opt/ffmpeg/ffmpeg-release/include/

LFLAGS += -L/opt/ffmpeg/ffmpeg-release/lib/ -lavutil -lavformat -lavcodec -lswscale

$(TARGET):

$(CC) -o $(TARGET) test_snap.c $(LFLAGS) $(CFLAGS)

附带几个我自己看过的网站:

http://ray.imiddle.net/2008/10/ffmpeg-install-and-usage/

http://blog.csdn.net/menuconfig/article/details/2600890

http://bbs.chinaunix.net/thread-1932536-1-1.html

OK,今天就这多,去找女朋友了,老板可是催的很急的说... O(∩_∩)O哈哈~

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值