ffmpeg抽取视频1.mp4 30秒的rgb数据,指令如下:
ffmpeg -i 1.mp4 -t 30 -pix_fmt rgb24 -s 960x640 data24.rgb
-t 30表示时间,30秒
下面是rgb–>yuv–>h264–>mp4的流程
/*
ffmpeg视频编码 rgb转mp4
*/
#include <iostream>
extern "C"
{
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
using namespace std;
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"swscale.lib")
#define CHANNEL 3 //3通道
int main()
{
char infile[] = "D:/VideoTest/data24.rgb";
char outfile[] = "D:/VideoTest/rgb.mp4";
av_register_all();
avcodec_register_all();
FILE *fp = fopen(infile, "rb");
if (!fp)
{
cout << infile << " open failed!" << endl;
system("pause");
return -1;
}
int width = 960;
int height = 640;
int fps = 25;
//1 创建h264编码器
AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
cout << " avcodec_find_encoder AV_CODEC_ID_H264 failed!" << endl;
system("pause");
return -1;
}
//编码器上下文
AVCodecContext *c = avcodec_alloc_context3(codec);
if (!c)
{
cout << " avcodec_alloc_context3 failed!" << endl;
system("pause");
return -1;
}
//压缩比特率
c->bit_rate = 4000000;
c->width = width;
c->height = height;
c->time_base = { 1, fps };
c->framerate = { fps, 1 };
//画面组大小,关键帧
c->gop_size = 50;
c->max_b_frames = 0;
c->pix_fmt = AV_PIX_FMT_YUV420P;
c->codec_id = AV_CODEC_ID_H264;
c->thread_count = 4;
//全局的编码信息
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
//打开编码器
int ret = avcodec_open2(c, codec, NULL);
if (ret < 0)
{
cout << " avcodec_open2 failed!" << endl;
system("pause");
return -1;
}
cout << "avcodec_open2 success!" << endl;
//2 创建输出上下文
AVFormatContext *oc = NULL;
avformat_alloc_output_context2(&oc, 0, 0, outfile);
//3 编码的视频流
AVStream *st = avformat_new_stream(oc, NULL);
st->id = 0;
st->codecpar->codec_tag = 0;
avcodec_parameters_from_context(st->codecpar, c);
cout << "查看输出文件信息" << endl;
av_dump_format(oc, 0, outfile, 1);
//4 rgb to yuv
SwsContext *ctx = NULL;
ctx = sws_getCachedContext(ctx,
width, height, AV_PIX_FMT_BGRA,
width, height, AV_PIX_FMT_YUV420P,
SWS_BICUBIC,
NULL, NULL, NULL
);
// 输入空间
unsigned char *rgb = new unsigned char[width * height * CHANNEL]; //抽取的rgb数据是3通道
//rgb转换后的yuv视频帧
AVFrame *yuv = av_frame_alloc();
yuv->format = AV_PIX_FMT_YUV420P;
yuv->width = width;
yuv->height = height;
ret = av_frame_get_buffer(yuv, CHANNEL * 8);
if (ret < 0)
{
cout << " av_frame_get_buffer failed!" << endl;
system("pause");
return -1;
}
//5 wirte mp4 head
ret = avio_open(&oc->pb, outfile, AVIO_FLAG_WRITE);
if (ret < 0)
{
cout << " avio_open failed!" << endl;
system("pause");
return -1;
}
ret = avformat_write_header(oc, NULL);
if (ret < 0)
{
cout << " avformat_write_header failed!" << endl;
system("pause");
return -1;
}
int p = 0;
for(;;)
{
int len = fread(rgb, 1, width * height * CHANNEL, fp);
if (len <= 0)
{
break;
}
uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
indata[0] = rgb;
int inlinesize[AV_NUM_DATA_POINTERS] = { 0 };
inlinesize[0] = width * CHANNEL;
//把rgb数据indata转为yuv数据yuv
int h = sws_scale(ctx, indata, inlinesize, 0, height, yuv->data, yuv->linesize);
if (h <= 0)
break;
yuv->pts = p;
p = p + 3600; //3600 = 90000/25,st->time_base,时间基数的值是1/90000
ret = avcodec_send_frame(c, yuv); //把yuv进行编码
if (ret != 0)
{
continue;
}
AVPacket pkt; //packet是编码之后的数据
av_init_packet(&pkt);
ret = avcodec_receive_packet(c, &pkt);
if (ret != 0)
continue;
av_interleaved_write_frame(oc, &pkt);
cout << "pkt.size = " << pkt.size << endl;
}
//写入视频索引
av_write_trailer(oc);
avio_close(oc->pb);
avformat_free_context(oc);
avcodec_close(c);
avcodec_free_context(&c);
sws_freeContext(ctx);
delete[] rgb;
system("pause");
return 0;
}
av_interleaved_write_frame函数的使用说明:
该函数在写入帧之后,不用再去释放packet的内存,如果用av_write_frame,则需要调用av_packet_unref把引用计数清零。
封装后的视频是黑白的,因为没有a通道。