多媒体服务器核心实现(转发)

32 篇文章 5 订阅
24 篇文章 20 订阅

 多媒体服务就是将推流进来的媒体流转发至消费的播放器,其中对于webrtc的媒体流主要是一个信令交互,然后建立连接后实现流的发布和向订阅的peer进行转发

 

 

 播放器跟服务器建立webrtc连接peerconnection

func createPeerConnection(msg Message) {
	log.Debug("Incoming CMD message Request")
	req := &Session{}
	req.Type = CMDMSG_ANSWER
	req.DeviceId = config.Config.Mqtt.CLIENTID
	m := media_interface.GetGlobalStreamM()
	s, err := m.GetStream(msg.Describestreamname)
	if err != nil || s == nil {
		resultstr := fmt.Sprintf("no stream %s", msg.Describestreamname)
		log.Debugf("error %s no stream %s", msg.SeqID, resultstr)
		req.Msg = resultstr
		req.Type = CMDMSG_ERROR
		answermsg := PublishMsg{
			WEB_SEQID: msg.SeqID,
			Topic:     TOPIC_ERROR,
			Msg:       req,
		}
		log.Debugf("error %s", msg.SeqID)
		SendMsg(answermsg)
		return
	}
	p := media_interface.Peer{}
	p.InitPeer(msg.Suuid, msg.SeqID, "", "")
	peerConnection, err := webrtc.NewPeerConnection(webrtc.Configuration{
		ICEServers:   msg.ICEServers,
		SDPSemantics: webrtc.SDPSemanticsUnifiedPlanWithFallback,
	})
	if err != nil {
		panic(err)
	}
	peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
		if connectionState == webrtc.ICEConnectionStateDisconnected {
			// atomic.AddInt64(&peerConnectionCount, -1)
			if err := peerConnection.Close(); err != nil {
				log.Debug("peerConnection.Close error %v", err)
				return
			}
			log.Debug("peerConnection.Closed")
		} else if connectionState == webrtc.ICEConnectionStateConnected {
			log.Debug("peerConnection.Connected ")
			// atomic.AddInt64(&peerConnectionCount, 1)
		}
	})
	if s.IsRtpStream() {
		//webrtc track forward use s.remotetrack.codec().RTPCodecCapability
		var videoRTPTrack, audioRTPTrack *webrtc.TrackLocalStaticRTP
		// var  err  error
		rmtrack, err := s.GetRemoteTrack()
		streamname := msg.Describestreamname

		if err != nil {
			for _, v := range rmtrack {
				if v.Kind().String() == "video" {
					videoRTPTrack, err = webrtc.NewTrackLocalStaticRTP(v.Codec().RTPCodecCapability, streamname+"-"+v.Kind().String()+v.ID(), streamname)
					if err != nil {
						panic(err)
					}
					p.AddVideoRTPTrack(videoRTPTrack)
					if _, err = peerConnection.AddTrack(videoRTPTrack); err != nil {
						panic(err)
					}
					p.AddAudioRTPTrack(audioRTPTrack)

				} else if v.Kind().String() == "audio" {
					audioRTPTrack, err = webrtc.NewTrackLocalStaticRTP(v.Codec().RTPCodecCapability, streamname+"-"+v.Kind().String()+v.ID(), streamname)
					if err != nil {
						panic(err)
					}
					p.AddVideoRTPTrack(videoRTPTrack)
					if _, err = peerConnection.AddTrack(audioRTPTrack); err != nil {
						panic(err)
					}
					p.AddAudioRTPTrack(audioRTPTrack)
				}
			}
		} else {
			videoRTPTrack, err = webrtc.NewTrackLocalStaticRTP(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeH264}, "video", "pion")
			if err != nil {
				panic(err)
			}
			if _, err = peerConnection.AddTrack(videoRTPTrack); err != nil {
				panic(err)
			}
			p.AddVideoRTPTrack(videoRTPTrack)
			audioRTPTrack, err = webrtc.NewTrackLocalStaticRTP(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeOpus}, "audio", "pion")
			if err != nil {
				panic(err)
			}
			if _, err = peerConnection.AddTrack(audioRTPTrack); err != nil {
				panic(err)
			}
			p.AddAudioRTPTrack(audioRTPTrack)
		}

		p.AddConnect(msg.Describestreamname, peerConnection)

	} else {
		videoTrack, err := webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeH264}, "video", "pion")
		if err != nil {
			panic(err)
		}
		if _, err = peerConnection.AddTrack(videoTrack); err != nil {
			panic(err)
		}

		// audioTrack, err := webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypePCMA}, "audio", "pion")
		audioTrack, err := webrtc.NewTrackLocalStaticSample(webrtc.RTPCodecCapability{MimeType: webrtc.MimeTypeOpus}, "audio", "pion")
		if err != nil {
			panic(err)
		}
		if _, err = peerConnection.AddTrack(audioTrack); err != nil {
			panic(err)
		}

		p.AddConnect(msg.Describestreamname, peerConnection)
		p.AddAudioTrack(audioTrack)
		p.AddVideoTrack(videoTrack)
	}
	err = s.AddPeer(&p)
	if err != nil {
		resultstr := fmt.Sprintf("no stream %s", msg.Describestreamname)
		log.Debugf("error %s no stream %s", msg.SeqID, resultstr)
		req.Msg = resultstr
		req.Type = CMDMSG_ERROR
		answermsg := PublishMsg{
			WEB_SEQID: msg.SeqID,
			Topic:     TOPIC_ERROR,
			Msg:       req,
		}
		log.Debugf("error %s", msg.SeqID)
		SendMsg(answermsg)
	}

	offer := msg.RtcSession
	if err := peerConnection.SetRemoteDescription(offer); err != nil {
		panic(err)
	}

	gatherComplete := webrtc.GatheringCompletePromise(peerConnection)
	answer, err := peerConnection.CreateAnswer(nil)
	if err != nil {
		panic(err)
	} else if err = peerConnection.SetLocalDescription(answer); err != nil {
		panic(err)
	}
	<-gatherComplete

	req.Data = enc.Encode(*peerConnection.LocalDescription())
	answermsg := PublishMsg{
		WEB_SEQID: msg.SeqID,
		Topic:     TOPIC_ANSWER,
		Msg:       req,
	}
	log.Debug("answer ", answermsg)
	SendMsg(answermsg)

}

 服务器跟webrtc设备端进行信令交互并建立连接

func apiRtcRequest(msg *Message, sdp *webrtc.SessionDescription, device *livekitclient.Device, time_out time.Duration) (string, error) {
	req := &Session{}

	req.DeviceId = device.Deviceid //msg.SeqID //config.Config.Mqtt.CLIENTID
	msg.SeqID = config.Config.Mqtt.CLIENTID
	msg.RtcSession = *sdp
	sdp_topic := TOPIC_OFFER
	msg.Topicprefix = config.Config.Mqtt.SUBTOPIC[:len(config.Config.Mqtt.SUBTOPIC)-2]
	if sdp.Type == webrtc.SDPTypeOffer {
		req.Type = CMDMSG_OFFER
		msg.Mode = MODE_OFFER
		sdp_topic = TOPIC_OFFER
	} else if sdp.Type == webrtc.SDPTypeAnswer {
		req.Type = CMDMSG_ANSWER
		msg.Mode = MODE_ANSWER
		sdp_topic = TOPIC_ANSWER
	}
	msg.Video = true //
	msg.Audio = true //
	// msg.SeqID = device.Deviceid
	msg.Describestreamname = device.Streamname //"testdevicestream"
	req.Data = enc.Encode(msg)
	sdpmsg := PublishMsg{
		WEB_SEQID: config.Config.Mqtt.CLIENTID, //msg.SeqID,
		Topic:     sdp_topic,
		Msg:       req,
		BTodevice: true,
	}
	log.Debug("sdp signale %s", msg.SeqID, "msg.Topicprefix", msg.Topicprefix)
	SendMsg(sdpmsg)
	if sdp.Type == webrtc.SDPTypeOffer {
		timeout := time.After(time_out)
		for {
			select {
			case <-timeout:
				log.Debugf("sdp wait timeout")
				return "", errors.New("timeout")
			case sdpanswer := <-SDPCh:
				log.Debugf("get sdpanswer", sdpanswer)
				return (*sdpanswer).RtcSession.SDP, nil

			}
		}
	} else {
		return "", nil
	}

}
func NewStream(host, room, display, savePath string, device *livekitclient.Device, msg *Message) (*Stream, error) {
	var err error
	pion_stream := &Stream{
		Host:          host,
		Room:          room,
		Display:       display,
		rtcUrl:        "webrtc://" + host + "/" + room + "/" + display,
		savePath:      savePath,
		hasAudioTrack: false,
		hasVideoTrack: false,
		videoFinish:   make(chan struct{}, 1),
		audioFinish:   make(chan struct{}, 1),
		device:        device,
	}
	pion_stream.ctx, pion_stream.cancel = context.WithCancel(context.Background())

	//创建PeerConncetion
	pion_stream.pc, err = newPeerConnection(webrtc.Configuration{
		ICEServers: (*msg).ICEServers,
	})
	if err != nil {
		return nil, errors.Wrapf(err, "创建PeerConnection失败")
	}

	//设置方向
	pion_stream.pc.AddTransceiverFromKind(webrtc.RTPCodecTypeAudio, webrtc.RTPTransceiverInit{
		Direction: webrtc.RTPTransceiverDirectionRecvonly,
	})
	pion_stream.pc.AddTransceiverFromKind(webrtc.RTPCodecTypeVideo, webrtc.RTPTransceiverInit{
		Direction: webrtc.RTPTransceiverDirectionRecvonly,
	})

	//创建offer
	offer, err := pion_stream.pc.CreateOffer(nil)
	if err != nil {
		return nil, errors.Wrap(err, "创建Local offer失败")
	}

	// 设置本地sdp
	if err = pion_stream.pc.SetLocalDescription(offer); err != nil {
		return nil, errors.Wrap(err, "设置Local SDP失败")
	}
	// msg := Message{}
	// device := livekitclient.Device{
	// 	Deviceid: "device_1",
	// }
	// 设置远端SDP
	timeout := 10 * time.Second
	answer, err := apiRtcRequest(msg, &offer, device, timeout)
	if err != nil {
		return nil, errors.Wrap(err, "SDP协商失败")
	}

	if err = pion_stream.pc.SetRemoteDescription(webrtc.SessionDescription{
		Type: webrtc.SDPTypeAnswer, SDP: answer,
	}); err != nil {
		return nil, errors.Wrap(err, "设置Remote SDP失败")
	}
	// if !bWrite {
	// 	for _, proom := range device.PRoomList {
	// 		if proom != nil {
	// 			proom.RTPTrackPublished(device.Streamname)
	// 		}
	// 	}
	// }
	var pstream *media_interface.Stream
	// bPublish := false
	pion_stream.pc.OnTrack(func(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver) {
		fmt.Println("------------------")

		// if !bWrite && bPublish && strings.Contains(track.Codec().MimeType, "video") {
		if !bWrite {
			pstream, err = publishStream(track, device.Deviceid, device.Streamname)
			if err != nil {
				log.Debug("publishStream fail", err)
				return
			}
			// bPublish = true
		}
		err = pion_stream.onTrack(track, receiver, pstream)
		if err != nil {
			codec := track.Codec()
			logrus.Errorf("Handle  track %v, pt=%v\nerr %v", codec.MimeType, codec.PayloadType, err)
			pion_stream.cancel()
		}
		pion_stream.pc.Close()
	})

	pion_stream.pc.OnICEConnectionStateChange(func(state webrtc.ICEConnectionState) {
		logrus.Infof("ICE state %v", state)

		if state == webrtc.ICEConnectionStateFailed || state == webrtc.ICEConnectionStateClosed {
			if pion_stream.ctx.Err() != nil {
				return
			}

			logrus.Warnf("Close for ICE state %v", state)
			pion_stream.cancel()
			pion_stream.pc.Close()
			m := media_interface.GetGlobalStreamM()
			err := m.DeleteStream(device.Streamname)
			if err != nil {
				log.Debug("DeleteStream error", err)
				//return err
			}
			// for _, proom := range device.PRoomList {
			// 	if proom != nil {
			// 		proom.TrackClose(device.Streamname)
			// 	}
			// }
		}
	})
	key := "webrtc://" + host + "/" + room
	Streams.Store(key, pion_stream)
	// timeout1 := time.After(20 * time.Minute)
	for pion_stream.ctx.Err() == nil {
		if pion_stream.bAudioStop && pion_stream.bVideoStop {
			break
		}
		//log.Debug("timeoutu->", timeoutu)
		//pion_stream.Stop()
		//return pion_stream, nil
		time.Sleep(time.Second)
	}

	// time.Sleep(time.Second)
	// }
	pion_stream.pc.Close()
	time.Sleep(time.Second)
	log.Debug("forward task end")
	return pion_stream, nil
}

webrtc track到来时发布流并向所有订阅本流的播放器进行转发逻辑 

func (pps *Stream) onTrack(track *webrtc.TrackRemote, receiver *webrtc.RTPReceiver, stream *media_interface.Stream) error {
	// Send a PLI on an interval so that the publisher is pushing a keyframe
	codec := track.Codec()

	trackDesc := fmt.Sprintf("channels=%v", codec.Channels)
	if track.Kind() == webrtc.RTPCodecTypeVideo {
		trackDesc = fmt.Sprintf("fmtp=%v", codec.SDPFmtpLine)
	}
	logrus.Infof("Got track %v, pt=%v tbn=%v, %v", codec.MimeType, codec.PayloadType, codec.ClockRate, trackDesc)
	if bWrite {
		var err error
		if codec.MimeType == "audio/opus" {
			var da media.Writer
			defer func() {
				if da != nil {
					da.Close()
				}
			}()
			audiopath := pps.savePath + pps.Display + "_audio.ogg"
			if da, err = oggwriter.New(audiopath, codec.ClockRate, codec.Channels); err != nil {
				return errors.Wrapf(err, "创建"+audiopath+"失败")
			}
			pps.hasAudioTrack = true
			logrus.Infof("Open ogg writer file=%v , tbn=%v, channels=%v", audiopath, codec.ClockRate, codec.Channels)
			if err = pps.writeTrackToDisk(da, track); err != nil {
				return err
			}
			pps.audioFinish <- struct{}{}
		} else if codec.MimeType == "video/H264" {
			var dv_h264 media.Writer
			videopath := pps.savePath + pps.Display + "_video.h264"

			if dv_h264, err = h264writer.New(videopath); err != nil {
				return err
			}
			logrus.Infof("Open h264 writer file=%v", videopath)
			pps.hasVideoTrack = true
			if err = pps.writeTrackToDisk(dv_h264, track); err != nil {
				return err
			}
			pps.audioFinish <- struct{}{}
		} else {
			logrus.Warnf("Ignore track %v pt=%v", codec.MimeType, codec.PayloadType)
		}
	} else {
		// codec := track.Codec()
		if strings.EqualFold(codec.MimeType, webrtc.MimeTypeOpus) {
			// fmt.Println("Got Opus track, saving to disk as output.opus (48 kHz, 2 channels)")
			// saveToDisk(oggFile, track)

			//转发音频数据包
			//go func() {
			b := make([]byte, 1500)
			pps.bAudioStop = false
			for !pps.bAudioStop {
				if !stream.IsRtpStream() {
					pkt, _, err := track.ReadRTP()
					// fmt.Println(filename, pkt.Timestamp)
					if err != nil {
						log.Debug(err)
						pps.bAudioStop = true
						continue
						//break
					}

					stream.SendStreamAudioFromWebrtc(pkt.Payload)
				} else {

					n, _, readErr := track.Read(b)
					if readErr != nil {
						// log.Debug(readErr)
						pps.bAudioStop = true
						continue
						//break
						//panic(readErr)
					}
					stream.SendStreamAudioFromWebrtc(b[:n])

				}
				// stream.SendStreamAudioFromWebrtc(pkt.Payload)
				// // for _, proom := range pps.device.PRoomList {
				// // 	if proom != nil {
				// // 		// proom.TrackPublished(device.Streamname)
				// // 		// track.Read
				// // 		// if pps.device.Streamname!=nil{
				// // 		nn, err := proom.TrackSendRtpPackets(pps.device.Streamname, "audio", b[:n]) //20*time.Millisecond)
				// // 		if err != nil || nn != n {
				// // 			log.Debug("audio forward error ", err, "->", nn)
				// // 			pps.bAudioStop = true
				// // 		}
				// // 	}
				// // }
				// time.Sleep(20 * time.Millisecond)
			}

			//	}()
		} else if strings.EqualFold(codec.MimeType, webrtc.MimeTypeH264) {
			//转发视频频数据包
			//go func() {
			b := make([]byte, 1500)
			go func() {
				ticker := time.NewTicker(time.Millisecond * 200)
				for range ticker.C {
					errSend := pps.pc.WriteRTCP([]rtcp.Packet{&rtcp.PictureLossIndication{MediaSSRC: uint32(track.SSRC())}})
					if errSend != nil {
						log.Debug(errSend)
						pps.bVideoStop = true
						// break
					}
					if pps.bVideoStop {
						break
					}
				}
			}()
			pps.bVideoStop = false
			// bFirstStart := false
			// h264p := make([]byte, 1024*64)
			// h264len := 0
			for !pps.bVideoStop {
				// Read
				if stream.IsRtpStream() {
					n, _, readErr := track.Read(b)
					if readErr != nil {
						log.Debug(readErr)
						pps.bVideoStop = true
						continue
						//break
					}
					stream.SendStreamVideo(b[:n])
					// log.Debug("RTP:")
					// log.InfoHex(b[:n],n)
					
				// 	pkt, _, err := track.ReadRTP()
				// 	b=pkt.Payload
				// 	if err!=nil{
				// 		log.Debug(err)
				// 	}
                //    log.Debug("RTP:")
				// 	// fmt.Println("RTP:",pkt)
				// 	log.InfoHex(b,len(pkt.Payload)) 
					// h264packet := H264Packet{}
					// datas, err := h264packet.GetRTPRawH264(pkt)
					// if err != nil {
					// 	log.Debug(err)
					// 	pps.bVideoStop = true
					// 	continue
					// 	//break
					// }
					// hex_string_data := hex.EncodeToString(b[:n])
					// fmt.Println("rtp",hex_string_data)
					// if n > 30 {
					// 	n = 30
					// }
					// log.InfoHex(b, n)
				} else {
					pkt, _, err := track.ReadRTP()
					if err != nil {
						log.Debug(err)
						pps.bVideoStop = true
						continue
						//break
						//return
					}
					// n, _, readErr := track.Read(b)
					// if readErr != nil {
					// 	log.Debug(readErr)
					// 	break
					// }
					// datas, bstart := UnpackRTP2H264(pkt.Payload)

					// if bstart && !bFirstStart {
					// 	bFirstStart = true
					// 	h264p = append(h264p[:h264len], datas...)
					// 	h264len += len(datas)
					// 	continue
					// } else if bstart && bFirstStart {
					// 	h264p = append(h264p[:h264len], datas...)
					// 	h264len += len(datas)
					// 	continue
					// } else if !bstart && bFirstStart {
					// 	bFirstStart = false
					// 	h264p = append(h264p[:h264len], datas...)
					// 	h264len += len(datas)
					// } else if !bstart && !bFirstStart {
					// 	h264p = append(h264p[:h264len], datas...)
					// 	h264len += len(datas)
					// }

					h264packet := H264Packet{}
					datas, err := h264packet.GetRTPRawH264(pkt)
					if err != nil {
						log.Debug(err)
						pps.bVideoStop = true
						continue
						//break
					}
					// log.Debugln("rtp",pkt)
					stream.SendStreamVideo(datas)
				}
				// ptklen := len(pkt.Payload)
				// log.Debug("send video rtp payload len ->", ptklen, "raw h264 len->", len(datas))
				// if ptklen >= 10 {
				// 	log.InfoHex(pkt.Payload, 10)
				// } else {
				// 	log.InfoHex(pkt.Payload, ptklen)
				// }
				// 	if h264p == nil {
				// 		continue
				// 	}
				// 	log.Debug("h264 data len", h264len)
				// 	stream.SendStreamVideo(h264p[:h264len])
				// 	if h264len > 10 {
				// 		h264len = 10
				// 	}
				// 	log.InfoHex(h264p, h264len)
				// 	h264len = 0
				// 	time.Sleep(time.Second / 30)
				// }
				// log.Debug(b[0:10])
				// for _, proom := range pps.device.PRoomList {
				// 	if proom != nil {
				// 		// proom.TrackPublished(device.Streamname)
				// 		// track.Read
				// 		// track.PayloadType()

				// 		nn, err := proom.TrackSendRtpPackets(pps.device.Streamname, "video", b[:n]) //time.Second/30)
				// 		if err != nil || nn != n {
				// 			log.Debug("video forward error ", err, "->", nn)
				// 			pps.bVideoStop = true
				// 		} else {
				// 			// log.Debug("video forward ok data len", n, "room is ", proom)
				// 		}
				// 	}
				// }

			}
			//}()
		}
	}
	return nil
}

func (pps *Stream) writeTrackToDisk(w media.Writer, track *webrtc.TrackRemote) error {
	for pps.ctx.Err() == nil {
		pkt, _, err := track.ReadRTP()
		// fmt.Println(filename, pkt.Timestamp)
		if err != nil {
			if pps.ctx.Err() != nil {
				return nil
			}
			log.Debug("writeTrackToDisk error ", err, w, track)
			return err
		}

		if w == nil {
			continue
		}

		if err := w.WriteRTP(pkt); err != nil {
			if len(pkt.Payload) <= 2 {
				continue
			}
			logrus.Warnf("Ignore write RTP %vB err %+v\n", len(pkt.Payload), err)
		} else {
			log.Debug("WriteRTP track pkt type->", pkt.PayloadType, "pkt", pkt)
		}
	}

	return pps.ctx.Err()
}

func (pps *Stream) Stop() bool {
	pps.cancel()
	if pps.hasAudioTrack {
		<-pps.audioFinish
	}
	if pps.hasVideoTrack {
		<-pps.videoFinish
	}

	if pps.hasVideoTrack && pps.hasAudioTrack {
		audiopath := pps.savePath + pps.Display + "_audio.ogg"
		videopath := pps.savePath + pps.Display + "_video.h264"

		cmd := exec.Command("ffmpeg",
			"-i",
			audiopath,
			"-i",
			videopath,
			pps.savePath+pps.Display+".ts",
			"-y")
		if err := cmd.Run(); err != nil {
			logrus.Errorf("拼接音频和视频失败:%v", err)
			return false
		}
		return true
	}
	return false
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值