异步udpserver接收rtp转html5(一)

本文探讨了如何将通过UDP接收的RTP流(如GB28181协议)转换为HTML5播放兼容的格式。利用FFmpeg发送RTP流到服务器,服务器端使用asio库接收并处理UDP数据,解码H264视频流,并可能转化为FLV或FMP4等适合Web播放的格式。此外,还提到了解包和帧处理的细节,以及可能的HTML5播放实现方式。
摘要由CSDN通过智能技术生成

第二篇在这里
第二篇 rtsp rtp gb28181转html5播放

udpserver

假定服务端有一个端口接收rtp over udp,接收到数据以后转成html5能展示的,这种效果有作用否?

我们知道ffmpeg是可以直接发送rtp流的,gb28181 也是可以发送rtp流的,那么我们制作一个简洁的服务器,在udp端口接收到以后转成tcp,让后在网页上展示。

ffmpeg发送命令ffmpeg \

ffmpeg -re -i video.mp4 -an -c:v copy -f rtp -sdp_file video.sdp “rtp://192.168.1.109:5004”
使用ffmpeg来发送文件,去除音频,当然也可以接收自己写的程序发送rtp,下面,我们使用asio来制作一个异步的服务器,来接收udp流。

code

#pragma once

#include <cstdlib>
#include <iostream>
#include <memory>
#include <unordered_map>
#include <boost/asio.hpp>
#include <boost/asio/spawn.hpp>
#include "c_rtp.h"
using boost::asio::ip::udp;

using namespace boost;

#include "c_hub.h"
#include "c_rtp.h"

#define DEFINE_EC \
boost::system::error_code ec;

class c_flvserver;

class c_udpserver:public std::enable_shared_from_this<c_udpserver>
{
	//asio::strand<asio::io_context::executor_type> v_strand;
	asio::io_context &v_context;

	asio::ip::udp::socket v_socket;
	c_flvserver *v_flvserver = NULL;
	unordered_map<uint32_t, s_rtp_h264_context*> v_ctxs;
	//s_rtp_h264_context v_rtp;
	s_rtp_h264_context * getctx(uint32_t ssrc)
	{
		auto it = v_ctxs.find(ssrc);
		if (it != v_ctxs.end())
			return it->second;
		else
		{
			s_rtp_h264_context *sc = new s_rtp_h264_context();
			v_ctxs[ssrc] = sc;
			return sc;
		}
	}
	void data_call(s_rtp_h264_context *ctx);
	s_flvhub * func_context(uint32_t ssrc);

	int live_rtp_unpack_h264(s_rtp_h264_context *ctx, uint8_t *data, int inlen);
	//int live_rtp_unpack_h265(s_rtp_h264_context *ctx, uint8_t *data, int inlen);
	//int live_rtp_unpack_aac(s_rtp_h264_context *ctx, uint8_t *data, int inlen);
	//int live_rtp_unpack_opus(s_rtp_h264_context *ctx, uint8_t *data, int inlen);
	//int live_rtp_unpack(s_rtp_h264_context *ctx, uint8_t *data, int inlen);

public:
	c_udpserver(asio::io_context& io_context, short port, c_flvserver *flvserver):
		//v_strand(io_context.get_executor()),
		v_context(io_context), v_flvserver(flvserver),
		v_socket(v_context)
		 //v_socket(v_context, udp::endpoint(udp::v4(), port))
	{
		//boost::asio::ip::udp::endpoint ep(boost::asio::ip::address::from_string("192.168.1.206"),port);
		udp::endpoint ep(udp::v4(), port);
		v_socket.open(ep.protocol());
		v_socket.set_option(boost::asio::ip::udp::socket::reuse_address(true));
		boost::asio::socket_base::receive_buffer_size recv_option(1*1024*1024);
		v_socket.set_option(recv_option);
		v_socket.bind(ep);
	}
	~c_udpserver()
	{
		//v_context_maps.clear();
	}

	
	//to flv server
	void do_receive();

#ifdef _WEBRTC_
	void do_receive_webrtc()
	{
		//std::cout << "start to receive" << std::endl;
		//auto self(shared_from_this());
		udp::endpoint from;
		v_socket.async_receive_from(asio::buffer(data_, max_length),
			from, [this](boost::system::error_code ec, std::size_t len)
		{
			uint8_t *d = (uint8_t*)&(data_[0]);
			if (len < 12)   //如果RTP包的长度小于12,说明包传输有错误
				return;
			if ((*d >> 6) != 2) /* RTP version number(rtp版本号必须为2) */
				return;
			if (*d & 0x20)
			{
				std::cout << "padding" << std::endl;
			}
			RTP_FIXED_HEADER *rtp_hdr = (RTP_FIXED_HEADER*)d;
			uint32_t ssrc = *((uint32_t*)(d + 8));
			int type = (*(d+1)) & 0x7f;
			ssrc = ntohl(ssrc);
			//std::cout << "seq:" << ntohs(rtp_hdr->seq_no) << 
			//	"--payload-->"<<type<<std::endl;
			std::cout<<"len is:"<<len<<"-->" << "the ssrc:""--" << ssrc << std::endl;

			s_hub * hub = func_context(ssrc);
			auto iter = hub->v_list.begin();
			while (iter != hub->v_list.end())
			{
				std::cout << "send data" << std::endl;
				bool ret = (*iter)->send_data_need_protect(
					1, 1, len, data_);

				if (!ret)

				{
					hub->v_list.erase(iter++);
				}
				else
					iter++;
			}
			do_receive_webrtc();
		});
#endif
#if 0
		boost::asio::spawn(v_context,
			[this](asio::yield_context yield)
		{
			std::cout << "ddd" << std::endl;
			udp::endpoint se;
			DEFINE_EC

			for (;;)
			{
				if (v_socket.is_open())
				{
					size_t len = v_socket.async_receive_from(boost::asio::buffer(data_, max_length),
						se, yield[ec]);
					RTP_FIXED_HEADER *rtp_hdr = (RTP_FIXED_HEADER*)&data_[0];
					uint32_t ssrc = rtp_hdr->ssrc;
					std::cout << "the ssrc:" << ssrc << std::endl;
					s_hub * hub = func_context(ssrc);
					auto iter = hub->v_list.begin();
					while (iter != hub->v_list.end())
					{
						bool ret = (*iter)->send_data_need_protect(
							1, 1, len, data_);

						if (!ret)
						{
							hub->v_list.erase(iter++);
						}
						else
							iter++;
					}
				}
				else
					v_context.post(yield);
			}
		});
#endif

	

	void do_send(std::size_t length);
	
	void do_judge_spspps(s_rtp_h264_context *ctx);
private:
	udp::endpoint sender_endpoint_;
	enum { max_length = 1500};
	char data_[max_length];
};

//int main(int argc, char* argv[])
//{
//	try
//	{
//		if (argc != 2)
//		{
//			std::cerr << "Usage: async_udp_echo_server <port>\n";
//			return 1;
//		}
//
//		boost::asio::io_context io_context;
//
//		server s(io_context, 6000);
//
//		io_context.run();
//	}
//	catch (std::exception& e)
//	{
//		std::cerr << "Exception: " << e.what() << "\n";
//	}
//
//	return 0;
//}

udp 接收细节

void c_udpserver::do_receive()
{
	udp::endpoint from;
	v_socket.async_receive_from(asio::buffer(data_, max_length),
		from, [this](boost::system::error_code ec, std::size_t len)
	{
		uint8_t *data = (uint8_t*)&(data_[0]);
		if (len < 12)   //if rtp payload length <12,the error occur
			return;
		if ((*data >> 6) != 2) /* RTP version number is 2*/
			return;
		if (*data & 0x20)
		{
			std::cout << "padding" << std::endl;
		}
		rtp_header *rtp = (rtp_header *)data;
		//big(net) to little(home)
		uint32_t ssrc = b2l(rtp->ssrc);
		
		uint8_t payload = rtp->type;
		s_rtp_h264_context *ctx = getctx(ssrc);
		ctx->v_payload = payload;
		switch (ctx->v_payload)
		{
		case 96:
			live_rtp_unpack_h264(ctx, data, (int)len);
			break;
		case 97:
			//live_rtp_unpack_aac(ctx, data, len);
			break;
		}

		//0 :no frame 1:one frame
		/*if (live_rtp_unpack(ctx, data, (int)len) == 1)
		{
			data_call(ctx);
		}*/
		do_receive();
	});
}

解包函数

int c_udpserver::live_rtp_unpack_h264(s_rtp_h264_context *ctx, uint8_t *data, int inlen)
{
	int jump = 0;
	int len = 0;
	uint8_t *buffer = rtp_payload(data, inlen, &len,ctx->v_last_ts , ctx->v_ssrc,
		ctx->v_seq, ctx->v_payload);
	if (len < 1) {
		printf("nalu is null\n");
		return -1;
	}
	uint8_t m = *(data + 1) & 0x80; //m!=1 then the data is end
	/* H.264 depay */
	uint8_t fragment = *buffer & 0x1F;

	/* Frame manipulation */
	if ((fragment > 0) && (fragment < 24)) {
		//Add a start code here
		//maybe single < MTU data, maybe sps or pps in here
		switch (fragment)// == 0x07) //sps
		{
		case 0x07:
			ctx->set_sps(buffer, len);
			break;
		case 0x08:
			ctx->set_pps(buffer, len);
			do_judge_spspps(ctx);
			break;
		default:
		{
			uint8_t *temp = ctx->v_buffer + ctx->v_len;
			*temp = 0;
			*(temp + 1) = 0;
			*(temp + 2) = 0;
			*(temp + 3) = 1;
			ctx->v_len += 4;
			memcpy(ctx->v_buffer + ctx->v_len, buffer, len);
			ctx->v_len += len;
		}
		break;
		}

		//std::cout << " AAAAA ";
	}
	else if (fragment == 24) {	/*18  STAP-A */
	 /* De-aggregate the NALs and write each of them separately */
	 /*STAP-A single time  ,maybe sps pps in there ,like ffmpeg */
	 /**/
		buffer++;
		int tot = len - 1;
		uint16_t psize = 0;
		int is_add = 0;
		while (tot > 0) {
			memcpy(&psize, buffer, 2);
			psize = b2l(psize);
			buffer += 2;
			tot -= 2;
			/* Now we have a single NAL */
			uint8_t nal = *(buffer) & 0x1F;
			switch (nal)
			{
			case 0x07:
				ctx->set_sps(buffer, psize);
				break;
			case 0x08:
				ctx->set_pps(buffer, psize);
				do_judge_spspps(ctx);
				break;
			default:
				is_add = 1;
				break;
			}
			if (is_add)
			{
				//data_call_1(ctx, buffer, psize);
#if 1 //this is copy to buffer
				uint8_t *temp = ctx->v_buffer + ctx->v_len;
				*temp = 0x00;
				*(temp + 1) = 0x00;
				*(temp + 2) = 0x00;
				*(temp + 3) = 0x01;
				ctx->v_len += 4;
				memcpy(ctx->v_buffer + ctx->v_len, buffer, psize);
				ctx->v_len += psize;
#endif
			}
			/* Go on */
			buffer += psize;
			tot -= psize;
		}
		//ctx->reset();
		//std::cout << "STAP-A";
	}
	else if ((fragment == 28) || (fragment == 29)) {
		/* fix me just for FU-A, not for FU-B */
		uint8_t indicator = *buffer;
		uint8_t header = *(buffer + 1);
		jump = 2;
		len -= 2;
		
		if ((header & 0x80) && !(header &0x40)) {
			/* First part of fragmented packet (S bit set) */
			ctx->v_current_FU = 1;
			uint8_t *temp = ctx->v_buffer + ctx->v_len;
			*temp = 0x00;
			*(temp + 1) = 0x00;
			*(temp + 2) = 0x00;
			*(temp + 3) = 0x01;
			*(temp + 4) = (indicator & 0xE0) | (header & 0x1F);
			ctx->v_len += 5;
			memcpy(ctx->v_buffer + ctx->v_len, buffer + jump, len);
			ctx->v_len += len;
		}
		else if (!(header & 0x80) && !(header & 0x40))
		{
			if (ctx->v_current_FU)
			{
				ctx->v_current_FU++;
				memcpy(ctx->v_buffer + ctx->v_len, buffer + jump, len);
				ctx->v_len += len;
			}
			else//分片丢失,RTP丢包
			{
				ctx->reset();
				//ctx->v_current_FU = 0; 
				return -1;
			}
		}
		else if (!(header & 0x80) && (header & 0x40)) {
			/* Last part of fragmented packet (E bit set) */
			if (ctx->v_current_FU) {
				ctx->v_current_FU = 0;
				memcpy(ctx->v_buffer + ctx->v_len, buffer + jump, len);
				ctx->v_len += len;
				//data_call(ctx);
				//ctx->reset();
				//return 0;
				//printf("end of m:%d", m);
				//cout << m <<" come here " <<"end "<< endl;
			}
			else
			{
				ctx->reset();
				return -1;
			}
		}
		else if ((header & 0x80) && (header & 0x40))
		{//fragment error 分片错误,RTP丢包
			cout << "error rtp" << endl;
			ctx->reset();
			return -1;
		}
	}
	
	if (m > 0)
	{
		data_call(ctx);
		//cout << "the ts is " << ctx->v_last_ts << endl;
		ctx->reset();
	}
	else
	{
		//cout << "come here" << endl;
	}
	return 0;
}

转html5

三种方式:
1、一种是直接的es流
2、第二是转成fmp4
3 、转flv,使用flv.js去播放
三种都可以选择,
等待后续。。。

Spring Boot并不直接支持UDP服务,但我们可以使用Java原生的DatagramSocket类来实现UDP服务,并结合Spring Boot进行开发。 首先,我们可以创建一个UDP服务的类,例如: ```java @Component public class UDPServer { private DatagramSocket socket; private byte[] buffer = new byte[2048]; @PostConstruct public void start() throws IOException { socket = new DatagramSocket(1234); // 监听端口1234 new Thread(this::receive).start(); // 启动接收线程 } @PreDestroy public void stop() { if (socket != null) { socket.close(); } } private void receive() { DatagramPacket packet = new DatagramPacket(buffer, buffer.length); while (!socket.isClosed()) { try { socket.receive(packet); // 接收数据包 byte[] data = packet.getData(); // 获取数据 // 在这里进行数据格式化等操作 } catch (IOException e) { e.printStackTrace(); } } } } ``` 上述代码中,我们使用@PostConstruct注解在Spring Boot启动时启动UDP服务,使用@PreDestroy注解在Spring Boot关闭时关闭UDP服务。同时,我们启动一个线程接收UDP数据包,并在接收到数据包后进行数据格式化等操作。 当我们需要进行数据格式化时,可以使用Java原生的javax.sound.sampled包中的类进行操作。例如,我们可以将RTP流数据化为PCM数据: ```java private void receive() { DatagramPacket packet = new DatagramPacket(buffer, buffer.length); while (!socket.isClosed()) { try { socket.receive(packet); // 接收数据包 byte[] data = packet.getData(); // 获取数据 // 将RTP流数据化为PCM数据 AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(data), new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 8000, 16, 1, 2, 8000, false), data.length); ByteArrayOutputStream baos = new ByteArrayOutputStream(); AudioSystem.write(ais, AudioFileFormat.Type.WAVE, baos); byte[] pcmData = baos.toByteArray(); // 在这里进行其他操作 } catch (IOException e) { e.printStackTrace(); } } } ``` 上述代码中,我们使用AudioInputStream类将RTP流数据化为PCM数据,并使用ByteArrayOutputStream类将PCM数据写入字节数组中。 最后,我们可以在Spring Boot应用中注入UDPServer类,并通过它来接收UDP数据包并进行格式化等操作。 ```java @RestController @RequestMapping("/api") public class MyController { @Autowired private UDPServer udpServer; // 处理请求 } ```
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

qianbo_insist

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值