OpenCV调用FFmpeg库推送RTSP视频流

typedef struct AvH264EncConfig_T
{

	int width = 1280;
	int height = 720;
	int frame_rate = 12;
	int64_t bit_rate = 1024;
	int gop_size = 60;
	int max_b_frames = 0;
} AvH264EncConfig;

class AvH264
{

public:
	AvH264();
	int open(AvH264EncConfig h264_config);
	AVPacket *encode(cv::Mat mat);
	void close();

private:
	AVCodec *cdc_;
	AVCodecContext *cdc_ctx_;
	AVFrame *avf_;
	AVPacket *avp_;
	int frame_size_;
	int pts_;
};

AvH264::AvH264()
{

	cdc_ = NULL;
	cdc_ctx_ = NULL;
	avf_ = NULL;
	avp_ = NULL;
}

int AvH264::open(AvH264EncConfig h264_config)
{

	pts_ = 0;
	cdc_ = avcodec_find_encoder(AV_CODEC_ID_H264);
	if (!cdc_)
	{

		return -1;
	}
	cdc_ctx_ = avcodec_alloc_context3(cdc_);
	if (!cdc_ctx_)
	{

		return -1;
	}
	cdc_ctx_->bit_rate = h264_config.bit_rate;
	cdc_ctx_->width = h264_config.width;
	cdc_ctx_->height = h264_config.height;
	cdc_ctx_->time_base = { 1, h264_config.frame_rate };
	cdc_ctx_->framerate = { h264_config.frame_rate, 1 };
	cdc_ctx_->gop_size = h264_config.gop_size;
	cdc_ctx_->max_b_frames = h264_config.max_b_frames;
	cdc_ctx_->pix_fmt = AV_PIX_FMT_YUV420P;
	cdc_ctx_->codec_id = AV_CODEC_ID_H264;
	cdc_ctx_->codec_type = AVMEDIA_TYPE_VIDEO;
	cdc_ctx_->qmin = 10;
	cdc_ctx_->qmax = 51;
	//cdc_ctx_->qcompress = 0.6;
	AVDictionary *dict = 0;
	av_dict_set(&dict, "preset", "slow", 0);
	av_dict_set(&dict, "tune", "zerolatency", 0);
	av_dict_set(&dict, "profile", "main", 0);
	// av_dict_set(&dict, "rtsp_transport", "tcp", 0);
	avf_ = av_frame_alloc();
	avp_ = av_packet_alloc();
	if (!avf_ || !avp_)
	{

		return -1;
	}
	frame_size_ = cdc_ctx_->width * cdc_ctx_->height;
	avf_->format = cdc_ctx_->pix_fmt;
	avf_->width = cdc_ctx_->width;
	avf_->height = cdc_ctx_->height;
	// alloc memory
	int r = av_frame_get_buffer(avf_, 0);
	if (r < 0)
	{
		return -1;
	}
	r = av_frame_make_writable(avf_);
	if (r < 0)
	{

		return -1;
	}
	return avcodec_open2(cdc_ctx_, cdc_, &dict);
}

void AvH264::close()
{
	if (cdc_ctx_)
		avcodec_free_context(&cdc_ctx_);
	if (avf_)
		av_frame_free(&avf_);
	if (avp_)
		av_packet_free(&avp_);
}

AVPacket *AvH264::encode(cv::Mat mat)
{
	if (mat.empty())
		return NULL;
	cv::resize(mat, mat, cv::Size(cdc_ctx_->width, cdc_ctx_->height));
	cv::Mat yuv;
	cv::cvtColor(mat, yuv, cv::COLOR_BGR2YUV_I420);
	unsigned char *pdata = yuv.data;
	avf_->data[0] = pdata;
	avf_->data[1] = pdata + frame_size_;
	avf_->data[2] = pdata + frame_size_ * 5 / 4;
	avf_->pts = pts_++;
	int r = avcodec_send_frame(cdc_ctx_, avf_);
	if (r >= 0)
	{
		r = avcodec_receive_packet(cdc_ctx_, avp_);
		if (r == 0)
		{
			//avp_->stream_index = 0;
			return avp_;
		}
		if (r == AVERROR(EAGAIN) || r == AVERROR_EOF)
		{

			return NULL;
		}
	}
	return NULL;
}


void SendFrameThread(xop::RtspServer *rtsp_server, xop::MediaSessionId session_id, char *link)
{
	cv::VideoCapture capture(link);
	if (!capture.isOpened())
		std::cout << "failed";
	cv::Mat frame;
	AvH264 h264;
	AvH264EncConfig conf;
	conf.bit_rate = 1024;
	conf.width = 1280;
	conf.height = 720;
	conf.gop_size = 60;
	conf.max_b_frames = 0;
	conf.frame_rate = 12;
	h264.open(conf);
	while (1)
	{
		capture >> frame;
		imshow("frame", frame);
		cv::waitKey(1);
		if (frame.data)
		{
			AVPacket *pkt = h264.encode(frame);
			xop::AVFrame videoFrame = { 0 };
			videoFrame.size = frame.rows * frame.cols * 3;
			videoFrame.timestamp = xop::H264Source::GetTimestamp();
			videoFrame.buffer.reset(new uint8_t[videoFrame.size]);
			memcpy(videoFrame.buffer.get(), pkt, videoFrame.size);
			rtsp_server->PushFrame(session_id, xop::channel_0, videoFrame);
		}
		else
		{
			break;
		}

		xop::Timer::Sleep(40);
	};
	h264.close();
}


int main()
{
	std::string suffix = "live";
	std::string ip = "127.0.0.1";
	std::string port = "554";
	std::string rtsp_url = "rtsp://" + ip + ":" + port + "/" + suffix;
	std::shared_ptrxop::EventLoop event_loop(new xop::EventLoop());
	std::shared_ptrxop::RtspServer server = xop::RtspServer::Create(event_loop.get());

	if (!server->Start("0.0.0.0", atoi(port.c_str())))
	{
		printf("RTSP Server listen on %s failed.\n", port.c_str());
		return 0;
	}

	xop::MediaSession *session = xop::MediaSession::CreateNew("live");
	session->AddSource(xop::channel_0, xop::H264Source::CreateNew());
	//session->StartMulticast();
	session->SetNotifyCallback([](xop::MediaSessionId session_id, uint32_t clients) {
		std::cout << "The number of rtsp clients: " << clients << std::endl;
	});

	xop::MediaSessionId session_id = server->AddSession(session);

	std::thread t1(SendFrameThread, server.get(), session_id, argv[1]);
	t1.detach();

	std::cout << "Play URL: " << rtsp_url << std::endl;

	while (1)
	{
		xop::Timer::Sleep(100);
	}

	getchar();
}

 

  • 1
    点赞
  • 19
    收藏
    觉得还不错? 一键收藏
  • 5
    评论
要使用 OpenCV 播放 RTSP 视频流,您需要用 FFmpeg 获取视频流,并将每一帧转换为 OpenCV 可读取的格式。以下是一个简单的 Python 代码示例,展示了如何使用 FFmpegOpenCV 播放 RTSP 视频流: ```python import cv2 import ffmpeg # RTSP 地址 url = 'rtsp://xxx.xxx.xxx.xxx:xxxx' # 使用 FFmpeg 获取视频流 stream = ffmpeg.input(url) stream = ffmpeg.output(stream, 'pipe:', format='rawvideo', pix_fmt='rgb24') process = ffmpeg.run_async(stream, pipe_stdout=True) # 使用 OpenCV 播放视频流 while True: # 读取一帧视频流 in_bytes = process.stdout.read(1920*1080*3) if not in_bytes: break # 将视频流转换为 OpenCV 格式 frame = numpy.frombuffer(in_bytes, numpy.uint8).reshape([1080, 1920, 3]) # 显示视频帧 cv2.imshow('frame', frame) if cv2.waitKey(1) & 0xFF == ord('q'): break # 释放资源 process.stdout.close() cv2.destroyAllWindows() ``` 在这个示例中,我们首先使用 FFmpeg 获取 RTSP 视频流,并将其转换为裸视频流格式(rawvideo),然后通过管道(pipe)将视频流传输给 OpenCV。在循环中,我们不断读取视频帧,并将其转换为 OpenCV 可读取的格式,最后使用 `cv2.imshow()` 函数显示视频帧。 需要注意的是,这个示例仅适用于 RGB24 格式的视频流,如果您需要处理其他格式的视频流,可能需要修改一些参数和代码。另外,这个示例还没有加入异常处理和错误处理,您需要自行添加以保证程序的稳定性和可靠性。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值