ffmpeg rgb_to_mp4

71 篇文章 4 订阅

视频编码
1.读取RGB文件转换为yuv
2.压缩为h264
3.封装为MP4

ffmpeg -i test.mp4 -pix_fmt bgra out.rgb

av_interleaved_write_frame


1.AVFormatContext *s
2.AVPacket *pkt
3.按照dts排序
4.av_write_frame (缓冲处理dts)

rgb_to_mp4.cpp


extern "C"
{
	#include <libavformat/avformat.h>
	#include <libswscale/swscale.h>
}
#include <iostream>
using namespace std;
int main()
{
	char infile[] = "out.rgb";
	char outfile[] = "rgb.mp4";
	//muxer,demuters
	av_register_all();
	avcodec_register_all();

	FILE *fp = fopen(infile,"rb");
	if(!fp)
	{
		cout<<infile<<"open file!"<<endl;
		return -1;
	}
	int width = 848;
	int height = 480;
	int fps = 25;
	//1. create codec
	AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
	if(!codec)
	{
		cout<<" avcodec_find_encoder AV_CODEC_ID_H264 failed!"<<endl;
		return -1;
	}
	AVCodecContext *c = avcodec_alloc_context3(codec);
	if(!c)
	{
		cout<<" avcodec_alloc_context3 failed!"<<endl;
		return -1;
	}
	//压缩比特率
	c->bit_rate = 400000000;

	c->width = width;
	c->height = height;
	c->time_base = {1,fps};
	c->framerate = {fps,1};
	//画面组大小
	c->gop_size = 50;
	c->max_b_frames = 0;
	c->pix_fmt = AV_PIX_FMT_YUV420P;
	c->codec_id = AV_CODEC_ID_H264;
	c->thread_count = 4;

	//全局的编码信息
	c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 
	int ret = avcodec_open2(c,codec,NULL);
	if(ret < 0)
	{
		cout<<" avcodec_open2 failed"<<endl;
		return -1;
	}
	cout<<"avcodec_open2 success!"<<endl;

	//2 create out context
	AVFormatContext *oc = NULL;
	avformat_alloc_output_context2(&oc,0,0,outfile);

	//3 add video stream
	AVStream *st = avformat_new_stream(oc,NULL);
	//st->codec = c;
	st->id = 0;
	st->codecpar->codec_tag = 0;
	avcodec_parameters_from_context(st->codecpar,c);
	cout<<"==========================================="<<endl;
	av_dump_format(oc,0,outfile,1);
	cout<<"==========================================="<<endl;

	//4 rgb to yuv
	SwsContext *ctx = NULL;
	ctx = sws_getCachedContext(
		ctx,width,height,AV_PIX_FMT_RGBA,
        width,height,AV_PIX_FMT_YUV420P,
						 SWS_BICUBIC,
						 NULL,NULL,NULL);
	//输入的空间
	unsigned char *rgb = new unsigned char[width*height*4];
	
	//输出的空间
	AVFrame *yuv = av_frame_alloc();
	yuv->format = AV_PIX_FMT_YUV420P;
	yuv->height = height;
	yuv->width = width;
	ret = av_frame_get_buffer(yuv,32);
	if(ret < 0)
	{
		cout<<" av_frame_get_buffer failed!"<<endl;
		return -1;
	}

	//5 write mp4 head
	ret = avio_open(&oc->pb,outfile,AVIO_FLAG_WRITE);
	if(ret < 0)
	{
		cout<<" avio_open failed!"<<endl;
		return -1;
	}
	ret = avformat_write_header(oc,NULL);
	if(ret < 0)
	{
		cout<<" avformat_write_header failed"<<endl;
		return -1;
	}

	int p = 0;
	for(;;)
	{
		int len = fread(rgb,1,width*height*4,fp);
		if(len <= 0) break;
		uint8_t *indata[AV_NUM_DATA_POINTERS] = {0};
		indata[0] = rgb;
		int inlinesize[AV_NUM_DATA_POINTERS] = {0};
		inlinesize[0] = width * 4;
		int h = sws_scale(ctx,indata,inlinesize,0,height,
			yuv->data,yuv->linesize);
		if(h <= 0)
			break;

		//6 encode frame
		yuv->pts = p;
//		yuv->pict_type = AV_PICTURE_TYPE_I;
		p = p + 3600;
		ret = avcodec_send_frame(c,yuv);
		if(ret != 0)
		{
			continue;
		}
		AVPacket pkt;
		av_init_packet(&pkt);
		ret = avcodec_receive_packet(c,&pkt);
		if(ret != 0)
		{
			continue;
		}
		// av_write_frame(oc,&pkt);
		// av_packet_unref(&pkt);
		av_interleaved_write_frame(oc,&pkt);
		cout<<"<"<<pkt.size<<">";
	}

	//写入视频索引
	av_write_trailer(oc);
	//关闭视频输出io
	avio_close(oc->pb);
	//清理封装输出上下文
	avformat_free_context(oc);
	//关闭编码器
	avcodec_close(c);
	//清理编码器上下文
	avcodec_free_context(&c);
	//清理视频冲采样上下文
	sws_freeContext(ctx);
	cout<<"\n=======================end======================\n";

	return 0;
}

这里是引用

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
要将 RGB 图像换为 H.264 视频,您可以使用 FFmpeg 库。下面是一个简单的 C++ 代码示例,演示如何使用 FFmpeg 库将 RGB 图像编码为 H.264 视频: ```cpp #include <iostream> #include <fstream> #include <cstring> #include <cmath> #include <stdint.h> extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "libavutil/opt.h" #include "libavutil/imgutils.h" } using namespace std; int main(int argc, char** argv) { // 1. Initialize FFmpeg av_register_all(); // 2. Open output file AVFormatContext* formatContext = nullptr; avformat_alloc_output_context2(&formatContext, nullptr, nullptr, "output.mp4"); if (!formatContext) { cout << "Failed to open output file" << endl; return -1; } // 3. Find video encoder AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264); if (!codec) { cout << "Failed to find video encoder" << endl; return -1; } // 4. Create new video stream AVStream* stream = avformat_new_stream(formatContext, codec); if (!stream) { cout << "Failed to create new video stream" << endl; return -1; } // 5. Set video stream parameters stream->codecpar->codec_id = codec->id; stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; stream->codecpar->width = 640; stream->codecpar->height = 480; stream->codecpar->format = AV_PIX_FMT_YUV420P; stream->time_base = { 1, 25 }; // 25 fps // 6. Open video encoder AVCodecContext* codecContext = avcodec_alloc_context3(codec); avcodec_parameters_to_context(codecContext, stream->codecpar); if (avcodec_open2(codecContext, codec, nullptr) < 0) { cout << "Failed to open video encoder" << endl; return -1; } // 7. Allocate frame buffers AVFrame* frame = av_frame_alloc(); frame->format = AV_PIX_FMT_RGB24; frame->width = 640; frame->height = 480; av_image_alloc(frame->data, frame->linesize, frame->width, frame->height, AV_PIX_FMT_RGB24, 1); AVFrame* frameYUV = av_frame_alloc(); frameYUV->format = AV_PIX_FMT_YUV420P; frameYUV->width = 640; frameYUV->height = 480; av_image_alloc(frameYUV->data, frameYUV->linesize, frameYUV->width, frameYUV->height, AV_PIX_FMT_YUV420P, 1); // 8. Convert RGB to YUV SwsContext* swsContext = sws_getContext(frame->width, frame->height, AV_PIX_FMT_RGB24, frameYUV->width, frameYUV->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, nullptr, nullptr, nullptr); if (!swsContext) { cout << "Failed to create SwsContext" << endl; return -1; } // 9. Write header to output file avformat_write_header(formatContext, nullptr); // 10. Encode and write video frames uint8_t* buffer = new uint8_t[640 * 480 * 3]; for (int i = 0; i < 100; i++) { // Generate RGB image for (int y = 0; y < 480; y++) { for (int x = 0; x < 640; x++) { buffer[y * 640 * 3 + x * 3 + 0] = (uint8_t)(sin(x / 10.0 + i / 10.0) * 128 + 128); buffer[y * 640 * 3 + x * 3 + 1] = (uint8_t)(sin(y / 10.0 + i / 7.0) * 128 + 128); buffer[y * 640 * 3 + x * 3 + 2] = (uint8_t)(sin(x / 7.0 + y / 10.0 + i / 5.0) * 128 + 128); } } // Convert RGB to YUV memcpy(frame->data[0], buffer, 640 * 480 * 3); sws_scale(swsContext, frame->data, frame->linesize, 0, frame->height, frameYUV->data, frameYUV->linesize); // Encode and write video frame AVPacket packet; av_init_packet(&packet); packet.data = nullptr; packet.size = 0; frameYUV->pts = i; avcodec_send_frame(codecContext, frameYUV); while (avcodec_receive_packet(codecContext, &packet) == 0) { av_interleaved_write_frame(formatContext, &packet); av_packet_unref(&packet); } } // 11. Write trailer to output file av_write_trailer(formatContext); // 12. Cleanup avformat_free_context(formatContext); avcodec_free_context(&codecContext); av_frame_free(&frame); av_frame_free(&frameYUV); sws_freeContext(swsContext); delete[] buffer; return 0; } ``` 在上面的代码中,我们首先初始化 FFmpeg 库。然后打开一个输出文件,指定 H.264 视频编码器,并创建一个新的视频流。接下来,我们设置视频流的参数,包括视频分辨率,帧率和像素格式等。然后打开视频编码器,并分配 RGB 和 YUV 帧缓冲区。我们使用 SwsContext 将 RGB换为 YUV 帧,并将它们编码为 H.264 视频帧,并将它们写入输出文件。最后,我们清理并关闭所有资源。 请注意,这只是一个简单的示例,实际使用中还需要进行更多的错误检查和异常处理。
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值