SRS通过SrsSourceManager管理RTMP各种流资源,它管理着SrsSource类,而SrsSource是一个FLV-tag队列,rtmp根据ID查找全局SrsSource对象,获得音视频数据,因此搞懂数据如何在SrsSource中流转,就搞懂了srs如何将GB28181转化为RTMP。
一、GB何时创建SrsSource
1. 媒体服务器向SRS-UDP端口(默认5060)发送register注册信息。
2. srs收到注册信息后创建SrsGb28181SipSession对象,作为一个与对端交互的会话,随后回复OK。
3. SrsGb28181SipSession会话定时发送设备查询指令。
4. 对端回复OK
5. 经由3消息,对端返回详细设备列表信息,SrsGb28181SipSession会话收到后,创建与设备ID绑定的channel对象,其中就包含了SrsSource。
6. 回复对端Ok消息
7. srs发送播放请求,并附带接收端口
8. 对端返回SDP确认信息
9. 对端开始向接收端口发送媒体数据
如上分析,可以知道,当媒体服务器返回设备信息列表时【步骤5】,srs根据设备ID创建了全局资源,用于沟通国标协议与RTMP协议。以下是资源创建的关键代码:
srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
{
srs_error_t err = srs_success;
if (!jitter_buffer) {
jitter_buffer = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer->SetDecodeErrorMode(kSelectiveErrors);
jitter_buffer->SetNackMode(kNack, -1, -1);
jitter_buffer->SetNackSettings(250, 450, 0);
if (!jitter_buffer_audio) {
jitter_buffer_audio = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer_audio->SetDecodeErrorMode(kSelectiveErrors);
jitter_buffer_audio->SetNackMode(kNack, -1, -1);
jitter_buffer_audio->SetNackSettings(250, 450, 0);
if (!source_publish) return err;
req = r;
server = s;
if ((err = _srs_sources->fetch_or_create(req, (ISrsSourceHandler*)server, &source)) != srs_success) {
return srs_error_wrap(err, "create source");
}
//TODO: ???
// if (!source->can_publish(false)) {
// return srs_error_new(ERROR_GB28181_SESSION_IS_EXIST, "stream %s busy", req->get_stream_url().c_str());
// }
if ((err = source->on_publish()) != srs_success) {
return srs_error_wrap(err, "on publish");
}
return err;
}
srs_error_t SrsGb28181RtmpMuxer::initialize(SrsServer *s, SrsRequest* r)
{
srs_error_t err = srs_success;
if (!jitter_buffer) {
jitter_buffer = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer->SetDecodeErrorMode(kSelectiveErrors);
jitter_buffer->SetNackMode(kNack, -1, -1);
jitter_buffer->SetNackSettings(250, 450, 0);
if (!jitter_buffer_audio) {
jitter_buffer_audio = new SrsRtpJitterBuffer(channel_id);
}
jitter_buffer_audio->SetDecodeErrorMode(kSelectiveErrors);
jitter_buffer_audio->SetNackMode(kNack, -1, -1);
jitter_buffer_audio->SetNackSettings(250, 450, 0);
if (!source_publish) return err;
req = r;
server = s;
if ((err = _srs_sources->fetch_or_create(req, (ISrsSourceHandler*)server, &source)) != srs_success) {
return srs_error_wrap(err, "create source");
}
//TODO: ???
// if (!source->can_publish(false)) {
// return srs_error_new(ERROR_GB28181_SESSION_IS_EXIST, "stream %s busy", req->get_stream_url().c_str());
// }
if ((err = source->on_publish()) != srs_success) {
return srs_error_wrap(err, "on publish");
}
二、接收RTP数据到SrsSource
这一部分会解释SRS如何处理国标RTP数据的,以及如何把处理好的数据转移到SrsSource对象中。
如前图7-9步所示,srs请求实时点播时会附带上接收端口,随后的RTP数据会被发送到此端口上。关于SRS接收RTP的流程,前一节已说明,这里不再赘述,现在看看,SRS通过on_udp_packet
收到RTP数据是如何处理的。
srs_error_t SrsGb28181PsRtpProcessor::on_udp_packet(const sockaddr* from, const int fromlen, char* buf, int nb_buf)
{
if (config->jitterbuffer_enable){
return on_rtp_packet_jitter(from, fromlen, buf, nb_buf);
}else{
return on_rtp_packet(from, fromlen, buf, nb_buf);
}
}
on_rtp_packet_jitter与on_rtp_packet都是处理RTP的,功能大同小异,这里只分析on_rtp_packet_jitter。
srs_error_t SrsGb28181PsRtpProcessor::on_rtp_packet_jitter(const sockaddr* from, const int fromlen, char* buf, int nb_buf)
{
srs_error_t err = srs_success;
pprint->elapse();
char address_string[64];
char port_string[16];
if (getnameinfo(from, fromlen,
(char*)&address_string, sizeof(address_string),
(char*)&port_string, sizeof(port_string),
NI_NUMERICHOST|NI_NUMERICSERV)){
// return srs_error_new(ERROR_SYSTEM_IP_INVALID, "bad address");
srs_warn("gb28181 ps rtp: bad address");
return srs_success;
}
int peer_port = atoi(port_string);
if (true) {
SrsBuffer stream(buf, nb_buf);
SrsPsRtpPacket *pkt = new SrsPsRtpPacket();;
if ((err = pkt->decode(&stream)) != srs_success) {
srs_freep(pkt);
// return srs_error_wrap(err, "ps rtp decode error");
srs_warn("gb28181 ps rtp: decode error");
srs_freep(err);
return srs_success;
}
std::stringstream ss3;
ss3 << pkt->ssrc << ":" << port_string;
std::string jitter_key = ss3.str();
pkt->completed = pkt->marker;
if (pprint->can_print()) {
srs_trace("<- " SRS_CONSTS_LOG_GB28181_CASTER " gb28181: client_id %s, peer(%s, %d) ps rtp packet %dB, age=%d, vt=%d/%u, sts=%u/%u/%#x, paylod=%dB",
channel_id.c_str(), address_string, peer_port, nb_buf, pprint->age(), pkt->version,
pkt->payload_type, pkt->sequence_number, pkt->timestamp, pkt->ssrc,
pkt->payload->length()
);
}
SrsGb28181RtmpMuxer *muxer = fetch_rtmpmuxer(channel_id, pkt->ssrc);
if (muxer){
rtmpmuxer_enqueue_data(muxer, pkt->ssrc, peer_port, address_string, pkt);
}
SrsAutoFree(SrsPsRtpPacket, pkt);
}
return err;
}
on_rtp_packet_jitter做了两件事,
1.解码RTP包
2.将解码后的数据插入缓存。
要说明的是插入的缓存是在全局资源SrsGb28181RtmpMuxer 中的,它就是第一章获取设备时创建的channel,其中包含了关键的SrsSource成员变量,也就是,插入的缓存被处理后,会被全部转移到改成员对象中,等待rtmp请求。所以,我们接下来研究下SrsGb28181RtmpMuxer 。
srs_error_t SrsGb28181RtmpMuxer::do_cycle()
{
srs_error_t err = srs_success;
recv_rtp_stream_time = srs_get_system_time();
send_rtmp_stream_time = srs_get_system_time();
uint32_t cur_timestamp = 0;
int buffer_size = 0;
bool keyframe = false;
//consume ps stream, and check status
while (true) {
pprint->elapse();
if ((err = trd->pull()) != srs_success) {
return srs_error_wrap(err, "gb28181 rtmp muxer cycle");
}
SrsGb28181Config config = gb28181_manger->get_gb28181_config();
if (config.jitterbuffer_enable){
if(jitter_buffer->FoundFrame(cur_timestamp)){
jitter_buffer->GetFrame(&ps_buffer, ps_buflen, buffer_size, keyframe, cur_timestamp);
if (buffer_size > 0){
if ((err = ps_demixer->on_ps_stream(ps_buffer, buffer_size, cur_timestamp, 0)) != srs_success){
srs_warn("gb28181: demix ps stream error:%s", srs_error_desc(err).c_str());
srs_freep(err);
};
}
}
.......
}
这是SrsGb28181RtmpMuxer 内循环的一小段代码,on_rtp_packet_jitter插入的数据会在这里得到处理,每完整的扒出一段PS帧,就被送入on_ps_stream,此函数会去除PS的同步码,最后通过rtmp_write_packet_by_source将处理后的数据送入SrsSource中。
srs_error_t SrsGb28181RtmpMuxer::rtmp_write_packet_by_source(char type, uint32_t timestamp, char* data, int size)
{
srs_error_t err = srs_success;
send_rtmp_stream_time = srs_get_system_time();
//create a source that will process stream without the need for internal rtmpclient
if (type == SrsFrameTypeAudio) {
SrsMessageHeader header;
header.message_type = RTMP_MSG_AudioMessage;
// TODO: FIXME: Maybe the tbn is not 90k.
header.timestamp = timestamp & 0x3fffffff;
SrsCommonMessage* shared_video = new SrsCommonMessage();
SrsAutoFree(SrsCommonMessage, shared_video);
// TODO: FIXME: Check error.
shared_video->create(&header, data, size);
source->on_audio(shared_video);
}else if(type == SrsFrameTypeVideo) {
SrsMessageHeader header;
header.message_type = RTMP_MSG_VideoMessage;
// TODO: FIXME: Maybe the tbn is not 90k.
header.timestamp = timestamp & 0x3fffffff;
SrsCommonMessage* shared_video = new SrsCommonMessage();
SrsAutoFree(SrsCommonMessage, shared_video);
// TODO: FIXME: Check error.
shared_video->create(&header, data, size);
source->on_video(shared_video);
}
return err;
}