一、捕获本地媒体流getUserMedia
1.index.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<div>
<button id="start">开始录制</button>
<button id="stop">停止录制</button>
</div>
<div>
<video autoplay controls id="stream"></video>
</div>
<script>
// 只获取视频
let constraints = {audio: false, video: true};
let startBtn = document.getElementById('start')
let stopBtn = document.getElementById('stop')
let video = document.getElementById('stream')
startBtn.onclick = function() {
navigator.getUserMedia(constraints, function(stream) {
video.srcObject = stream;
window.stream = stream;
}, function(err) {
console.log(err)
})
}
stopBtn.onclick = function() {
video.pause();
}
</script>
</body>
</html>
部署本地服务器后,chrome中访问192.168.11.129:9050
会报错:navigator.getUserMedia is not a function
。原因是,Chrome 47以后,getUserMedia API只能允许来自“安全可信”的客户端的视频音频请求,如HTTPS和本地的Localhost。所以将访问地址改成localhost:9050即可。
二、同网页示例
例子来源:https://codelabs.developers.google.com/codelabs/webrtc-web/#4
1.index.html
<!DOCTYPE html>
<html>
<head>
<title>Realtime communication with WebRTC</title>
<style>
body {
font-family: sans-serif;
}
video {
max-width: 100%;
width: 320px;
}
</style>
</head>
<body>
<h1>Realtime communication with WebRTC</h1>
<video id="localVideo" autoplay playsinline></video>
<video id="remoteVideo" autoplay playsinline></video>
<div>
<button id="startButton">开始</button>
<button id="callButton">调用</button>
<button id="hangupButton">挂断</button>
</div>
<!-- 适配各浏览器 API 不统一的脚本 -->
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
</script>
</body>
</html>
2.main.js
'use strict';
//log
function trace(text) {
text = text.trim();
const now = (window.performance.now() / 1000).toFixed(3);
console.log(now, text);
}
// 设置两个video,分别显示本地视频流和远端视频流
const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');
localVideo.addEventListener('loadedmetadata', logVideoLoaded);
remoteVideo.addEventListener('loadedmetadata', logVideoLoaded);
remoteVideo.addEventListener('onresize', logResizedVideo);
function logVideoLoaded(event) {
const video = event.target;
trace(`${video.id} videoWidth: ${video.videoWidth}px, ` +
`videoHeight: ${video.videoHeight}px.`);
}
function logResizedVideo(event) {
logVideoLoaded(event);
if (startTime) {
const elapsedTime = window.performance.now() - startTime;
startTime = null;
trace(`Setup time: ${elapsedTime.toFixed(3)}ms.`);
}
}