1.还是基于h5stream的h5splayer.js学习,还是基于websocket。
与原来的不同,在onopen里面要发送一些open指令
ws.onopen = function () {
inc.innerHTML += '.. connection open<br/>';
var t = {
type: "open"
};
ws.send(JSON.stringify(t))
};
然后会onreceive里面会受到一些信息。
connecting to server ..
.. connection open
receive:{
"sdp" : "v=0\r\no=- 2612379890657624089 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE video\r\na=msid-semantic: WMS token1\r\nm=video 9 UDP/TLS/RTP/SAVPF 96 97 98 99 100 101 127\r\nc=IN IP4 0.0.0.0\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=ice-ufrag:M7/v\r\na=ice-pwd:s+xvE6/hacBd5++3xPE+1qpT\r\na=ice-options:trickle\r\na=fingerprint:sha-256 44:D2:9C:A6:A3:9F:01:5C:AD:CE:86:E6:2F:E8:EF:C0:6D:26:68:F5:2E:6A:82:89:C8:87:74:42:C8:FC:7F:F5\r\na=setup:actpass\r\na=mid:video\r\na=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\na=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:4 urn:3gpp:video-orientation\r\na=extmap:5 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay\r\na=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type\r\na=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/video-timing\r\na=extmap:10 http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07\r\na=sendonly\r\na=rtcp-mux\r\na=rtcp-rsize\r\na=rtpmap:96 H264/90000\r\na=rtcp-fb:96 goog-remb\r\na=rtcp-fb:96 transport-cc\r\na=rtcp-fb:96 ccm fir\r\na=rtcp-fb:96 nack\r\na=rtcp-fb:96 nack pli\r\na=fmtp:96 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42001f\r\na=rtpmap:97 rtx/90000\r\na=fmtp:97 apt=96\r\na=rtpmap:98 H264/90000\r\na=rtcp-fb:98 goog-remb\r\na=rtcp-fb:98 transport-cc\r\na=rtcp-fb:98 ccm fir\r\na=rtcp-fb:98 nack\r\na=rtcp-fb:98 nack pli\r\na=fmtp:98 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f\r\na=rtpmap:99 rtx/90000\r\na=fmtp:99 apt=98\r\na=rtpmap:100 red/90000\r\na=rtpmap:101 rtx/90000\r\na=fmtp:101 apt=100\r\na=rtpmap:127 ulpfec/90000\r\na=ssrc-group:FID 3833096967 1073194061\r\na=ssrc:3833096967 cname:C1L/zoQ17dXx0SDt\r\na=ssrc:3833096967 msid:token1 video_label\r\na=ssrc:3833096967 mslabel:token1\r\na=ssrc:3833096967 label:video_label\r\na=ssrc:1073194061 cname:C1L/zoQ17dXx0SDt\r\na=ssrc:1073194061 msid:token1 video_label\r\na=ssrc:1073194061 mslabel:token1\r\na=ssrc:1073194061 label:video_label\r\n",
"type" : "offer"
}
type:string length:2130
receive:{
"candidate" : "candidate:725387133 1 tcp 1518280447 169.254.250.71 50001 typ host tcptype passive generation 0 ufrag M7/v network-id 1",
"sdpMLineIndex" : 0,
"sdpMid" : "video",
"type" : "remoteice"
}
type:string length:215
receive:{
"candidate" : "candidate:3525817373 1 tcp 1518214911 192.168.6.12 50001 typ host tcptype passive generation 0 ufrag M7/v network-id 2",
"sdpMLineIndex" : 0,
"sdpMid" : "video",
"type" : "remoteice"
}
type:string length:214
receive:{
"candidate" : "candidate:2564955588 1 tcp 1518149375 192.168.1.16 50001 typ host tcptype passive generation 0 ufrag M7/v network-id 3",
"sdpMLineIndex" : 0,
"sdpMid" : "video",
"type" : "remoteice"
}
type:string length:214
从type来看,第一个是offer,后面三个是remoteice
对应不同的消息,做不同的处理
对于offer(请求),创建RTCPeerConnection,参考:https://developer.mozilla.org/zh-CN/docs/Web/API/RTCPeerConnection
一个基本的RTCPeerConnection使用需要协调本地机器以及远端机器的连接,它可以通过在两台机器间生成Session Description的数据交换协议来实现。呼叫方发送一个offer(请求),被呼叫方发出一个answer(应答)来回答请求。双方-呼叫方以及被呼叫方,最开始的时候都要建立他们各自的RTCPeerConnection对象。
setRemoteDescription=>onaddstream=>URL.createObjectURL,video.play()
createAnswer,setLocalDescription
代码 rtc.html,虽然功能还没有实现。
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html>
<head>
<title>websocket client</title>
<script src="js/jquery-3.1.1.js"></script>
<script src="js/adapter.js"></script>
<script type="text/javascript">
var count=0;
function getImageString(arrayBuffer){
var bytes = new Uint8Array(arrayBuffer);
var binary= "";
var len = bytes.byteLength;
for (var i = 0; i < len; i++) {
binary += String.fromCharCode( bytes[ i ] )
}
var src="https://img-blog.csdnimg.cn/2022010702372264874.png"+window.btoa( binary );//png,jpg都没有关系
return src;
}
function loadImage(imgId,data){
var reader = new FileReader();
reader.onload = () => {
var img = document.getElementById(imgId);
img.src = getImageString(reader.result);
};
reader.readAsArrayBuffer(data);
}
function processRTCOffer(offer){
var description=new RTCSessionDescription(offer);
console.log("ProcessRTCOffer", offer);
var config={M:[]};
var option={optional:[{DtlsSrtpKeyAgreement: true}]};
var connection=new RTCPeerConnection(config,option);
connection.onicecandidate=function(event){//只要本地代理ICE 需要通过信令服务器传递信息给其他对等端时就会触发
console.log("------------- RTCPeerConnection.onicecandidate",event);
// if (event.candidate) {
// var candidate;
// //console.log("onIceCandidate currentice", event.candidate);
// candidate = event.candidate;
// //console.log("onIceCandidate currentice",candidate, JSON.stringify(candidate));
// var candidateObj = JSON.parse(JSON.stringify(candidate));
// candidateObj.type = "remoteice";
// //console.log("onIceCandidate currentice new", candidateObj, JSON.stringify(candidateObj));
// //ws.send(JSON.stringify(candidateObj));//h5spalyer.js里面有发送,但是测试发现这里不发送也能获取到视频。
// }
// else {
// console.log("End of candidates.");
// }
};
connection.onaddstream=function(mediaStreamEvent){//
console.log("------------- RTCPeerConnection.onaddstream",mediaStreamEvent);
var stream=mediaStreamEvent.stream;
var video=document.getElementById('video1');
video.src=URL.createObjectURL(stream);
//需要引用Adapter.js,默认的URL.createObjectURL不支持MediaSource类型
//Uncaught TypeError: Failed to execute 'createObjectURL' on 'URL': No function was found that matched the signature provided.
video.play();//开始播放
//Uncaught (in promise) DOMException
//需要将video设置为muted(静音),或者由用户点击来播放。
};
connection.oniceconnectionstatechange=function(event){
console.log("------------- RTCPeerConnection.oniceconnectionstatechange state: " + connection.iceConnectionState)
};
connection.setRemoteDescription(description); //=>onaddstream
var offerOptions={mandatory:{offerToReceiveAudio: true, offerToReceiveVideo: true}};
connection.createAnswer(offerOptions).then(function(answer){
console.log("Create answer:",answer);//answer is RTCSessionDescription
connection.setLocalDescription(answer,function(){ //=>onicecandidate
console.log("ProcessRTCOffer createAnswer", answer);
ws.send(JSON.stringify(answer));//=>onicecandidate
});
});
ws.connection=connection;
}
function processRemoteIce(remoteIce){
var connection=ws.connection;
try {
var iceCandidate = new RTCIceCandidate({
sdpMLineIndex: remoteIce.sdpMLineIndex, candidate: remoteIce.candidate
});
console.log("ProcessRemoteIce", iceCandidate);
//console.log("Adding ICE candidate :" + JSON.stringify(iceCandidate));
//当本机当前页面的 RTCPeerConnection 接收到一个从远端页面通过信号通道发来的新的 ICE 候选地址信息,本机可以通过调用RTCPeerConnection.addIceCandidate() 来添加一个 ICE 代理
connection.addIceCandidate(iceCandidate, function () {
console.log(" addIceCandidate OK")
},
function (error) {
console.log("addIceCandidate error:" + JSON.stringify(error))
})
}
catch (err) {
alert("connect ProcessRemoteIce error: " + err)
}
}
var startPlay = function (url) {
var inc = document.getElementById('incomming');
var wsImpl = window.WebSocket || window.MozWebSocket;
var form = document.getElementById('sendForm');
var input = document.getElementById('sendText');
inc.innerHTML += "connecting to server ..<br/>";
window.video=document.getElementById('video1');
window.ws = new wsImpl(url);
//To receive the image as ArrayBuffer on the client side, you have to specify the binaryType after creating a WebSocket:
ws.binaryType = "arraybuffer";//有了这个,就不用FileReader读取Blob的内容了。
// when data is comming from the server, this metod is called
ws.onmessage = function (evt) {
console.log('>>>>> websocket.onmessage');
//console.log('onmessage',evt.data);
var dataObj = JSON.parse(evt.data);
if(dataObj.type =='offer'){
processRTCOffer(dataObj);//第一次
}else if (dataObj.type=='remoteice'){
processRemoteIce(dataObj);//后面3次
}
};
window.isVideoPlaying=false;
// when the connection is established, this method is called
ws.onopen = function () {
console.log('>>>>> websocket.onopen',video);
inc.innerHTML += '.. connection open<br/>';
ws.send(JSON.stringify({type: "open"})); //必须发送open指令,不然后续的都无法开始,onmessage将收不到offer
};
// when the connection is closed, this method is called
ws.onclose = function () {
inc.innerHTML += '.. connection closed<br/>';
}
}
window.onload = function(){
var url=$('#inputUrl').val();
$('#btnPlay').click(function(){
console.log('play',url);
var inc = document.getElementById('incomming');
inc.innerHTML="";
startPlay(url);
});
}
</script>
</head>
<body>
<input id='inputUrl' value='ws://localhost:8085/api/v1/h5srtcapi?token=token1&profile=main&session=null' style='width:100%;'/>
<button id='btnPlay'>play</button>
<pre id="incomming"></pre>
<video class="h5video1" id="video1" autoplay webkit-playsinline playsinline>
</video>
</body>
</html>
其中在video.src=URL.createObjectURL(stream);卡了一下,原来需要引用adapter.js,不然不支持MediaStream类型的参数,导致提示,URL没有createObjectURL函数。
adapter.js里面有个
URL.createObjectURL = function(stream) {
if ('getTracks' in stream) {
var url = 'polyblob:' + (++newId);
streams.set(url, stream);
utils.deprecated('URL.createObjectURL(stream)',
'elem.srcObject = stream');
return url;
}
return nativeCreateObjectURL(stream);
};
结果是src="polyblob:1"
------------------------------------------------------------------------
然后在video.play卡住了。
然后不知怎么,多刷新几次,可以了,有一定概率可以播放。
此时:chrome://webrtc-internals/
还是那些代码。
----------------------------------------------------------------------------------------
Uncaught (in promise) DOMException问题处理
参考1:Uncaught (in promise) DOMException谷歌浏览器js报错分析
输入chrome://flags/#autoplay-policy后,我的和它的结果不一样
参考2:解决Chrome浏览器无法自动播放音频视频的问题,Uncaught (in promise) DOMException
解决办法 1.静音 2.让用户手动点击
感觉现在的方式都是让用户点击吧
界面上加个按钮,而之所以我的代码会出错,h5s的代码没这个问题是它原本就是让人点击再播放的。
window.onload = function(){
var url=$('#inputUrl').val();
$('#btnPlay').click(function(){
console.log('play',url);
var inc = document.getElementById('incomming');
inc.innerHTML="";
startPlay(url);
});
}
</script>
</head>
<body>
<input id='inputUrl' value='ws://localhost:8085/api/v1/h5srtcapi?token=token1&profile=main&session=null' style='width:100%;'/>
<button id='btnPlay'>play</button>
<pre id="incomming"></pre>
<video class="h5video1" id="video1" autoplay webkit-playsinline playsinline>
</video>
</body>
</html>
------------------------------------------------------------------------------------
总之就是通过几次请求(offer)、应答(answer),建立的webrtc连接。
最后wireshark里面发现有RTP协议的包
就是不知道是发给浏览器的呢,还是发给H5Stream的呢。
-----------------------------------------------------------------------------------
整理一下代码的过程,
1.通过websocket连接一个ws地址
window.ws = new wsImpl('ws://localhost:8085/api/v1/h5srtcapi?token=token1&profile=main&session=null');
//To receive the image as ArrayBuffer on the client side, you have to specify the binaryType after creating a WebSocket:
ws.binaryType = "arraybuffer";//有了这个,就不用FileReader读取Blob的内容了。
2.在onopen里面发送open消息
ws.onopen = function () {
console.log('>>>>> websocket.onopen',video);
inc.innerHTML += '.. connection open<br/>';
ws.send(JSON.stringify({type: "open"})); //必须发送open指令,不然后续的都无法开始,onmessage将收不到offer
};
3.在onmessage中处理收到的消息,建立webrtc通道
ws.onmessage = function (evt) {
console.log('>>>>> websocket.onmessage');
//console.log('onmessage',evt.data);
var dataObj = JSON.parse(evt.data);
if(dataObj.type =='offer'){
processRTCOffer(dataObj);//第一次
}else if (dataObj.type=='remoteice'){
processRemoteIce(dataObj);//后面3次
}
};
3.1 第一次返回的类型是offer,处理
创建RTCPeerConnection,
3.1.1.根据offer,setRemoteDescription,触发onaddstream,用mediastream设置video.src。
3.1.2.createAnswer,根据新的answer设置setLocalDescription,ws发送answer
3.1.3.触发onicecandidate,ws发送candidate(发现不发送也可以)
3.2 后面有3次返回remoteice,处理
3.2.1 RTCPeerConnection添加addIceCandidate
整个过程的事件在chrome://webrtc-internals/里面其实就列出来了。
--------------------------------------------------------------------
我的代码在FireFox中无法使用,H5Sream的可以。而IE则都不行。