利用FFmpeg将H.264文件读入内存,再输出封装格式文件

/**
 *先将H.264文件读入内存,
 *再输出封装格式文件。
 */
#include "stdafx.h"

#define __STDC_CONSTANT_MACROS

extern "C"
{
#include "libavformat/avformat.h"
}

#define IO_BUFFER_SIZE 32768

FILE *fp_open;

/**
 *在avformat_open_input()中会首次调用该回调函数,
 *第二次一直到最后一次都是在avformat_find_stream_info()中循环调用,
 *文件中的数据每次IO_BUFFER_SIZE字节读入到内存中,
 *经过ffmpeg处理,所有数据被有序地逐帧存储到AVPacketList中。
 *以上是缓存设为32KB的情况,缓存大小设置不同,调用机制也有所不同。
 */
int fill_iobuffer(void *opaque, uint8_t *buf, int buf_size)
{
	if (!feof(fp_open)){
		int true_size = fread(buf, 1, buf_size, fp_open);
		return true_size;
	}
	else{
		return -1;
	}

}

int convert(const char *in_filename_v, const char *out_filename)
{
	AVInputFormat *ifmt_v = NULL;
	AVOutputFormat *ofmt = NULL;
	AVFormatContext *ifmt_ctx_v = NULL, *ofmt_ctx = NULL;
	AVPacket pkt;
	int ret, i;
	int videoindex_v = -1, videoindex_out = -1;
	int frame_index = 0;
	int64_t cur_pts_v = 0;

	av_register_all();

	fp_open = fopen(in_filename_v, "rb+");
	ifmt_ctx_v = avformat_alloc_context();
	unsigned char *iobuffer = (unsigned char *)av_malloc(IO_BUFFER_SIZE);
	AVIOContext *avio = avio_alloc_context(iobuffer, IO_BUFFER_SIZE, 0, NULL, fill_iobuffer, NULL, NULL);
	ifmt_ctx_v->pb = avio;

	ifmt_v = av_find_input_format("h264");
	if ((ret = avformat_open_input(&ifmt_ctx_v, "nothing", ifmt_v, NULL)) < 0) {
		printf("Could not open input file.");
		goto end;
	}

	if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0) {
		printf("Failed to retrieve input stream information");
		goto end;
	}
	printf("===========Input Information==========\n");
	av_dump_format(ifmt_ctx_v, 0, in_filename_v, 0);
	printf("======================================\n");
	avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename);
	if (!ofmt_ctx) {
		printf("Could not create output context\n");
		ret = AVERROR_UNKNOWN;
		goto end;
	}
	ofmt = ofmt_ctx->oformat;

	AVStream *in_stream = ifmt_ctx_v->streams[0];
	AVStream *out_stream = avformat_new_stream(ofmt_ctx, NULL);
	videoindex_v = 0;
	if (!out_stream) {
		printf("Failed allocating output stream\n");
		ret = AVERROR_UNKNOWN;
		goto end;
	}
	videoindex_out = out_stream->index;
	//Copy the settings of AVCodecContext
	if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
		printf("Failed to copy context from input to output stream codec context\n");
		goto end;
	}
	out_stream->codec->codec_tag = 0;
	/* Some formats want stream headers to be separate. */
	if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
		out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

	printf("==========Output Information==========\n");
	av_dump_format(ofmt_ctx, 0, out_filename, 1);
	printf("======================================\n");
	//Open output file
	if (!(ofmt->flags & AVFMT_NOFILE)) {
		if (avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE) < 0) {
			printf("Could not open output file '%s'", out_filename);
			goto end;
		}
	}
	//Write file header
	if (avformat_write_header(ofmt_ctx, NULL) < 0) {
		printf("Error occurred when opening output file\n");
		goto end;
	}

	while (1) {
		AVFormatContext *ifmt_ctx;
		int stream_index = 0;
		AVStream *in_stream, *out_stream;

		//Get an AVPacket
		ifmt_ctx = ifmt_ctx_v;
		stream_index = videoindex_out;

		if (av_read_frame(ifmt_ctx, &pkt) >= 0){
			do{
				in_stream = ifmt_ctx->streams[pkt.stream_index];
				out_stream = ofmt_ctx->streams[stream_index];

				if (pkt.stream_index == videoindex_v){
					//FIX:No PTS (Example: Raw H.264)
					//Simple Write PTS
					if (pkt.pts == AV_NOPTS_VALUE){
						//Write PTS
						AVRational time_base1 = in_stream->time_base;
						//Duration between 2 frames (μs)
						int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
						//Parameters
						pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
						pkt.dts = pkt.pts;
						pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
						frame_index++;
					}
					cur_pts_v = pkt.pts;
					break;
				}
			} while (av_read_frame(ifmt_ctx, &pkt) >= 0);
		}
		else{
			break;
		}

		//Convert PTS/DTS
		pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
		pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
		pkt.pos = -1;
		pkt.stream_index = stream_index;

		printf("Write 1 Packet. size:%5d\tpts:%lld\n", pkt.size, pkt.pts);
		//Write
		if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0) {
			printf("Error muxing packet\n");
			break;
		}
		av_free_packet(&pkt);

	}

	//Write file trailer
	av_write_trailer(ofmt_ctx);

end:
	avformat_close_input(&ifmt_ctx_v);
	/* close output */
	if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
		avio_close(ofmt_ctx->pb);
	avformat_free_context(ofmt_ctx);
	fclose(fp_open);
	if (ret < 0 && ret != AVERROR_EOF) {
		printf("Error occurred.\n");
		return -1;
	}
	return 0;
}

int main(int argc, char* argv[])
{
	//const char *in_filename_v = argv[1]; //Input file URL
	//const char *out_filename = argv[2]; //Output file URL
	const char *in_filename_v = "media files/JINUSEAN_17s.h264"; //Input file URL
	const char *out_filename = "media files/JINUSEAN_17s.mkv"; //Output file URL
	convert(in_filename_v, out_filename);
	return 0;
}

 

  • 2
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值