C++实现基于FFMpeg的YUV420转h264软编码

俗话说得好,百度来的代码100个只能用1个。这个代码在装好ffmpeg的情况下,nèng下去马上就能运行,无花里胡哨依赖,适合伸手党、搞不清楚党、啥环境都没有党。

功能

将YUV420数据实时软编码为h264,并存储到本地out.h264文件中。功能已封装,可以编成链接库来用。

使用方法

在其他线程中创建H264_ENCODER的实例,调用Init方法,即启动编码线程,编码线程等待队列中的帧数据;使用时直接将unsigned char* yuvbuf 的YUV420数据使用Push方法推入,即可自动编码;需要退出释放内存时,调用End方法。
一些设置可以在cpp文件头部配置那里改。如果编码不过来,一直丢帧的话,可以适当降低码率。

废话少说,上代码

// h264_encoder.h
// FFMpeg软编码
// 使用时创建H264_ENCODER实例,调用Init方法即启动线程,调用End方法即停止线程。使用时将YUV420帧Push即可。
#ifndef __H264_ENCODER_H__
#define __H264_ENCODER_H__

#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>

class H264_ENCODER {
public:
	int Run();
	void End();
	void Init();
	void Push(unsigned char *buf, int len);
	void Pop(unsigned char *resbuf, int &len);
private:
	std::thread thread_id;
	std::mutex do_mutex;
	std::condition_variable condition;
	bool run_enable;
	std::queue<unsigned char*> framebuf_queue;
	std::queue<int> framelen_queue;
};

#endif // __H264_ENCODER_H__
// h264_encoder.cpp
#include <iostream>
#include <unistd.h>

extern "C"
{
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}

#include "h264_encoder.h"

// 参数设置
char outfile[] = "out.h264";
// 源图像参数
const int WIDTH = 2688;
const int HEIGHT = 1520;
const int FPS = 50;
const int MAX_QUEUE_LEN = 16;
const int BITRATE = 100000;

void H264_ENCODER::Init() {
	run_enable = true;
	thread_id = std::thread(&H264_ENCODER::Run, this);
	thread_id.detach(); // 分离线程,使其成为后台线程
}
void H264_ENCODER::Push(unsigned char *buf, int len) {
	std::unique_lock<std::mutex> lock(do_mutex);

	unsigned char *yuvbuf = new unsigned char[len];
	memcpy(yuvbuf, buf, len);
	framebuf_queue.push(yuvbuf);
	framelen_queue.push(len);

	if (framebuf_queue.size() > MAX_QUEUE_LEN) {
		std::cout << "编码丢帧" << std::endl;
		delete[] framebuf_queue.front();
		framelen_queue.pop();
		framebuf_queue.pop();
	}

	condition.notify_one();
}
void H264_ENCODER::Pop(unsigned char *resbuf, int &len) {
	if (resbuf != NULL && !framebuf_queue.empty() && !framelen_queue.empty()) {
		len = framelen_queue.front();
		memcpy(resbuf, framebuf_queue.front(), len);
		delete[] framebuf_queue.front();
		framelen_queue.pop();
		framebuf_queue.pop();
	}
}
int H264_ENCODER::Run() {
	// 创建YUV420缓冲区同时分配内存
	unsigned char *yuv420Buf = new unsigned char[WIDTH * HEIGHT * 3 / 2];

	// 注册所有和编解码器有关的组件
	av_register_all();

	// 创建编码器
	AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
	if (!codec) {
		std::cout << "avcodec_find_encoder AV_CODEC_ID_H264 failed!" << std::endl;
		return -1;
	}
	// 给编码器分配内存,返回对应编码器上下文
	AVCodecContext *codecCtx = avcodec_alloc_context3(codec);
	if (!codecCtx) {
		std::cout << "avcodec_alloc_context3  failed!" << std::endl;
		return -1;
	}
	// 配置编码器上下文的成员
	codecCtx->width = WIDTH;                // 设置编码视频宽度
	codecCtx->height = HEIGHT;              // 设置编码视频高度
	codecCtx->time_base.num = 1;
	codecCtx->time_base.den = FPS;          // 设置时间基,num为分子,den为分母,如果是1/25则表示25帧/s
	codecCtx->framerate = {FPS, 1};         // 设置帧率
	codecCtx->pix_fmt = AV_PIX_FMT_YUV420P; // 设置输出像素格式
	codecCtx->bit_rate = BITRATE;           // 100000的话约100kb/s
	codecCtx->gop_size = 25;                // 每25帧一个I帧
	codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
	codecCtx->codec_id = AV_CODEC_ID_H264;
	// 打开编码器
	int ret = avcodec_open2(codecCtx, codec, NULL);
	if (ret < 0) {
		std::cout << "avcodec_open2  failed!" << std::endl;
		return -1;
	}
	std::cout << "avcodec_open2 success!" << std::endl;

	// 创建YUV视频帧并配置
	AVFrame *yuvFrame = av_frame_alloc();
	yuvFrame->format = AV_PIX_FMT_YUV420P;
	yuvFrame->width = WIDTH;
	yuvFrame->height = HEIGHT;
	ret = av_frame_get_buffer(yuvFrame, 32);
	if (ret < 0) {
		std::cout << "av_frame_get_buffer  failed!" << std::endl;
		return -1;
	}

	// 初始化写文件功能
	AVFormatContext* formatContext = nullptr;
	AVOutputFormat* outputFormat = av_guess_format(nullptr, outfile, nullptr);
	if (!outputFormat) {
		std::cout << "Could not determine output format." << std::endl;
		return 1;
	}
	if (avformat_alloc_output_context2(&formatContext, outputFormat, nullptr, outfile) < 0) {
		std::cout << "Failed to allocate output context." << std::endl;
		return 1;
	}
	AVStream* videoStream = avformat_new_stream(formatContext, nullptr);
	if (!videoStream) {
		std::cerr << "Failed to create video stream." << std::endl;
		return 1;
	}
	videoStream->codecpar->codec_id = AV_CODEC_ID_H264;
	videoStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
	videoStream->codecpar->width = codecCtx->width;
	videoStream->codecpar->height = codecCtx->height;
	videoStream->time_base = codecCtx->time_base;
	videoStream->avg_frame_rate = codecCtx->framerate;
	if (avio_open(&formatContext->pb, outfile, AVIO_FLAG_WRITE) < 0) {
		std::cout << "Failed to open output file." << std::endl;
		return 1;
	}
	if ((ret = avformat_write_header(formatContext, nullptr)) < 0) {
		std::cout << "Failed to write output file header. ret: " << ret << std::endl;
		return -1;
	}

	// 循环写视频文件
	int count = 0;
	while (run_enable) {
		std::unique_lock<std::mutex> lock(do_mutex);
		// 每次读取一帧数据到yuv420Buf
		if (!framebuf_queue.empty()) {
			while (!framebuf_queue.empty()) {
				int len = 0;
				Pop(yuv420Buf, len);
				do_mutex.unlock();

				// 创建YUV视频帧并绑定YUV缓冲区(avpicture_fill是给yuvFrame初始化一些字段,并且会自动填充data和linesize)
				avpicture_fill((AVPicture *)yuvFrame, yuv420Buf, AV_PIX_FMT_YUV420P, WIDTH, HEIGHT);

				// H264编码 将未压缩的AVFrame数据(yuv)给编码器 (并行)
				yuvFrame->pts = count++ * (codecCtx->time_base.num * 1000 / codecCtx->time_base.den); // 时间戳
				ret = avcodec_send_frame(codecCtx, yuvFrame);
				if (ret != 0) {
					continue;
				}
				// 将编码数据保存在AVPacket
				AVPacket pkt;
				av_init_packet(&pkt);

				while (avcodec_receive_packet(codecCtx, &pkt) >= 0) { // 获取编码结果,由于编码是并行,所以开始几轮循环均没有结果
					if (av_write_frame(formatContext, &pkt) < 0) { // 写文件
						std::cout << "Error writing video frame." << std::endl;
					}
				}

				av_packet_unref(&pkt);
				do_mutex.lock(); // 防止多帧积压时下一轮循环未加锁
			}
			do_mutex.unlock();
		} else {
			condition.wait(lock);
		}
	}

	std::cout << "======================encoder stop...=========================" << std::endl;
	// Finalize output file
	av_write_trailer(formatContext);
	avio_close(formatContext->pb);
	avformat_free_context(formatContext);

	// 释放YUV420缓冲区
	delete[] yuv420Buf;
	av_frame_free(&yuvFrame);
	//关闭编码器
	avcodec_close(codecCtx);
	//清理编码器上下文
	avcodec_free_context(&codecCtx);

	std::cout << "======================encoder end=========================" << std::endl;
	return 0;
}

void H264_ENCODER::End() {
	std::unique_lock<std::mutex> lock(do_mutex);
	run_enable = false;
	condition.notify_one();
}

编译指令,main自己写一个就好。为了复制方便,不写在代码block里
g++ -Wall -g -o main main.cpp h264_encoder.cpp -I/usr/include/aarch64-linux-gnu -lavformat -lavcodec -lavutil -lswscale -lpthread -lstdc++ -ldl -lrt

注意:
-I/usr/include/aarch64-linux-gnu
这段还是要搞清楚自己的ffmpeg装在哪的,我这是arm linux64,如果是x86的话,一般在/usr/include/x86_64-linux-gnu下,即替换为
-I/usr/include/x86_64-linux-gnu

其他

avcodec_receive_packet这个函数前面几帧是返回-11的,所以过2s左右才有结果,详情见如下
关于FFmpeg编码时,avcodec_receive_packet返回-11的解决办法

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是使用FFmpeg实现H.264硬编码C++代码示例: ```c++ #include <iostream> #include <stdio.h> #include <string.h> #include <stdlib.h> extern "C" { #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libavutil/opt.h> #include <libavutil/imgutils.h> } #define INBUF_SIZE 4096 int main(int argc, char **argv) { AVCodec *codec; AVCodecContext *codec_context = NULL; int ret, i, j, k, got_output; FILE *f; AVFrame *frame; AVPacket pkt; uint8_t inbuf[INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE]; AVFormatContext *format_context = NULL; AVStream *stream = NULL; AVIOContext *io_context = NULL; if (argc <= 2) { std::cout << "Usage: " << argv[0] << " <input file> <output file>" << std::endl; return 0; } av_register_all(); avcodec_register_all(); // Open input file if (avformat_open_input(&format_context, argv[1], NULL, NULL) < 0) { std::cerr << "Could not open input file" << std::endl; return -1; } // Find stream info if (avformat_find_stream_info(format_context, NULL) < 0) { std::cerr << "Could not find stream info" << std::endl; return -1; } // Find video stream for (i = 0; i < format_context->nb_streams; i++) { if (format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { stream = format_context->streams[i]; break; } } if (stream == NULL) { std::cerr << "Could not find video stream" << std::endl; return -1; } // Find decoder codec = avcodec_find_decoder(stream->codecpar->codec_id); if (codec == NULL) { std::cerr << "Could not find codec" << std::endl; return -1; } // Allocate codec context codec_context = avcodec_alloc_context3(codec); if (codec_context == NULL) { std::cerr << "Could not allocate codec context" << std::endl; return -1; } // Fill codec context with stream parameters if (avcodec_parameters_to_context(codec_context, stream->codecpar) < 0) { std::cerr << "Could not fill codec context with stream parameters" << std::endl; return -1; } // Open codec if (avcodec_open2(codec_context, codec, NULL) < 0) { std::cerr << "Could not open codec" << std::endl; return -1; } // Open output file if (avio_open(&io_context, argv[2], AVIO_FLAG_WRITE) < 0) { std::cerr << "Could not open output file" << std::endl; return -1; } // Allocate frame frame = av_frame_alloc(); if (frame == NULL) { std::cerr << "Could not allocate frame" << std::endl; return -1; } // Initialize packet av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; // Read frames from input file while (av_read_frame(format_context, &pkt) >= 0) { // Decode frame ret = avcodec_send_packet(codec_context, &pkt); if (ret < 0) { std::cerr << "Error decoding frame" << std::endl; break; } while (ret >= 0) { ret = avcodec_receive_frame(codec_context, frame); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { break; } else if (ret < 0) { std::cerr << "Error decoding frame" << std::endl; break; } // Convert frame to YUV420P format AVFrame *tmp_frame = av_frame_alloc(); if (tmp_frame == NULL) { std::cerr << "Could not allocate temporary frame" << std::endl; break; } tmp_frame->format = AV_PIX_FMT_YUV420P; tmp_frame->width = codec_context->width; tmp_frame->height = codec_context->height; ret = av_frame_get_buffer(tmp_frame, 32); if (ret < 0) { std::cerr << "Could not allocate temporary frame data" << std::endl; break; } ret = av_frame_copy(tmp_frame, frame); if (ret < 0) { std::cerr << "Error copying frame data" << std::endl; break; } ret = av_frame_copy_props(tmp_frame, frame); if (ret < 0) { std::cerr << "Error copying frame props" << std::endl; break; } // Encode frame ret = avcodec_send_frame(codec_context, tmp_frame); if (ret < 0) { std::cerr << "Error encoding frame" << std::endl; break; } while (ret >= 0) { AVPacket outpkt; av_init_packet(&outpkt); outpkt.data = NULL; outpkt.size = 0; ret = avcodec_receive_packet(codec_context, &outpkt); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { break; } else if (ret < 0) { std::cerr << "Error encoding frame" << std::endl; break; } // Write packet to output file avio_write(io_context, outpkt.data, outpkt.size); av_packet_unref(&outpkt); } av_frame_free(&tmp_frame); } av_packet_unref(&pkt); } // Flush encoder ret = avcodec_send_frame(codec_context, NULL); if (ret < 0) { std::cerr << "Error flushing encoder" << std::endl; return -1; } while (ret >= 0) { AVPacket outpkt; av_init_packet(&outpkt); outpkt.data = NULL; outpkt.size = 0; ret = avcodec_receive_packet(codec_context, &outpkt); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { break; } else if (ret < 0) { std::cerr << "Error flushing encoder" << std::endl; return -1; } // Write packet to output file avio_write(io_context, outpkt.data, outpkt.size); av_packet_unref(&outpkt); } // Close output file avio_close(io_context); // Free resources avcodec_free_context(&codec_context); av_frame_free(&frame); avformat_close_input(&format_context); return 0; } ``` 此代码使用FFmpeg库打开输入文件,并将其解码为AVFrame。然后,它将AVFrame换为YUV420P格式,将其编码为H.264并将其写入输出文件。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值