什么是视频
了解媒体数据存储和应用的基础原理。
格式与内容
- 文件扩展名≈媒体封装格式(媒体容器类型)
- 媒体封装格式≠音视频编码格式(使用了谁家的编码器)
- 文件内容:
1) 头信息(格式、时长、帧率、码率、分辨率...)
2) 索引信息
3) 视频数据
4) 音频数据
5) 附加增强数据...
视频数据
- 显示器颜色呈现基于RGB(红绿蓝)颜色空间模型
- 视频领域大多基于YUV颜色空间做抽样存储
- 帧内预测&帧间预测复用进一步有效的压缩数据
- P帧(前向预测帧)、B帧(双向预测帧)、I帧(参考帧)
- 基于通用标准集N多技术于一身 --- 视频编码器
H.264(AVC)、H.265(HEVC)、VP8、VP9...
音频数据
- 声音:不同振幅&频率而产生的机械波;数字形式是一维波形
- 对自然中连续的声波采样,做数字化PCM存储
- 扬声器还原PCM(脉冲编码调制)数字信号为模拟音频信号
- 音频压缩基本算法:预测、变换
- 基于通用标准集N多技术于一身 --- 音频编码器
AAC、MP3...
传输协议
- 传统场景
- 流媒体(直播)
- HLS:苹果为利用现有CDN设施而发明的"流媒体"协议
- HTTP(S)-FLV:基于HTTP的流媒体协议
- RTMP、RTP/RTSP、TS、MMS...
- 点播传输
-
HTTP(S):通过Range方式或参数方式完成Seek
-
- 流媒体(直播)
-
Web端
-
HTTP(S)、WS(S)、P2P...
-
播放器原理
- 解协议(加载数据)
- 解封装(解复用)
- 解码
- 渲染
小结
本节介绍了:视频格式&内容容器、视音频编码、传输协议、播放器原理。
通过这些信息的了解,能大致掌握视频存储和应用的基本原理。
好玩的Web端API
了解通过Web端接口可以实现哪些方向的具体应用
判断浏览器端视频兼容情况
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>JS Bin</title>
</head>
<body>
</body>
</html>
let videoEl = document.createElement("video");
let types = {
'mp4': 'audio/mp4',
'MP4': 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"',
'webm': 'video/webm; codecs="vp8, vorbis"',
'ogg': 'video/ogg; codecs="theora, vorbis"',
'm3u8': 'application/vnd.apple.mpegURL',
'ts': 'video/mp2t; codecs="avc1.42E01E,mp4a.40.2"'
};
Object.keys(types).forEach(key => {
const type = types[key];
const ret = videoEl.canPlayType(type) || '不支持';
console.log(key + ': ' + ret);
});
基于Video时间轴控制实现交互式视频
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>video</title>
</head>
<body>
<video id="video" src="http://m.h5mkt.com/2017/biz/yaha_s01e01/resource/audio/liaotian.mp4">
您的浏览器不支持Video标签。
</video>
<p>
<button id="paly_pause">播放</button>
<button id="start" class="s1">跟妹子聊天</button>
<button id="step" class="s1">跳过跟妹子聊天</button>
<button id="dream" class="s2">跟陌生男聊聊梦想</button>
<button id="drink" class="s2">喝口饮料先</button>
</p>
</body>
</html>
let video = $('video');
video.ontimeupdate = ()=>{
let {currentTime} = video;
show(currentTime > 64 ? '.s2' : '.s1');
hide(currentTime > 64 ? '.s1' : '.s2');
if(
(currentTime > 64 && currentTime < 65) ||
(currentTime > 113 && currentTime < 114)
){
video.pause();
}
};
let ppBtn = $('paly_pause');
video.onplay = ()=>{
ppBtn.innerText = '暂停';
};
video.onpause = ()=>{
ppBtn.innerText = '播放';
};
ppBtn.onclick = ()=>{
video[video.paused ? 'play' : 'pause' ]();
};
$('start').onclick = ()=>{
video.currentTime = 1;
video.play();
};
$('step').onclick = ()=>{
video.currentTime = 60;
video.play();
};
$('dream').onclick = ()=>{
video.currentTime = 83;
video.play();
};
$('drink').onclick = ()=>{
video.currentTime = 116;
video.play();
};
hide('.s2');
function show(sel){
document.querySelectorAll(sel).forEach(el=>{
el.style.display='inline'
});
}
function hide(sel){
document.querySelectorAll(sel).forEach(el=>{
el.style.display='none'
});
}
function $(id){
return document.getElementById(id);
}
基于 FileReader API 播放本地文件
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>本地文件应用</title>
</head>
<body>
<video controls></video>
<p>
MP4:<input type="file" accept="video/*">
</p>
</body>
</html>
let iptFileEl = document.querySelector('input[type="file"]');
let videoEl = document.querySelector('video');
iptFileEl.onchange = e =>{
let file = iptFileEl.files && iptFileEl.files[0];
playFile(file);
};
function playFile(file){
if(file){
let fileReader = new FileReader();
fileReader.onload = evt => {
if(FileReader.DONE == fileReader.readyState){
videoEl.src = fileReader.result;
}else{
console.log('FileReader Error:', evt);
}
}
fileReader.readAsDataURL(file);
}else{
videoEl.src = '';
}
}
基于 getUserMedia 调用摄像头或麦克风
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>JS Bin</title>
</head>
<body>
<div>
<video autoplay></video>
</div>
<button id="play">play</button>
<button id="stop">stop</button>
<button id="sketch">sketch</button>
</body>
</html>
video{
display:block;
width:100%;
background:#000;
}
.sketch{
position: relative;
filter:grayscale(1) brightness(3) blur(1px);
overflow: hidden;
}
.sketch:after {
content: '';
position: absolute;
top: -50%;
left: -50%;
width: 200%;
height: 200%;
background:
linear-gradient(to top, transparent 3px, rgba(255, 255, 255, 0.51) 3px),
linear-gradient(to left, transparent 3px, rgba(255, 255, 255, 0.37) 3px);
background-size: 3px 4px;
transform: rotate(-45deg);
pointer-events: none;
}
const getUserMediaPromise = options => new Promise((resolve, reject) => {
const nvgt = window.navigator;
if(nvgt) {
if(nvgt.mediaDevices && nvgt.mediaDevices.getUserMedia) {
return nvgt.mediaDevices.getUserMedia(options).then(resolve, reject);
}
const getUserMedia = nvgt.getUserMedia || nvgt.webkitGetUserMedia || nvgt.mozGetUserMedia;
if(getUserMedia) {
return getUserMedia(options, resolve, reject)
}
}
reject('当前环境不支持获取媒体设备。');
});
let streamTrack;
const video = document.querySelector('video');
document.querySelector('#play').onclick = () => {
getUserMediaPromise({
audio: false,
video: true
}).then(stream => {
video.srcObject = stream;
streamTrack = stream.getTracks()[0];
},
err => {
console.log('getUserMedia error: [' + err.name + '] ' + err.message)
});
};
document.querySelector('#stop').onclick = () => {
streamTrack && streamTrack.stop();
};
const box = document.querySelector('div');
document.querySelector('#sketch').onclick = () => {
box.className = box.className ==='' ? 'sketch' : '';
};
基于 getUserMedia、MediaRecorder 实现录像
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>recorded</title>
</head>
<body>
<video id="preview" autoplay muted></video>
<video id="recorded" autoplay loop></video>
<div>
<button id="opencamera">Open camera</button>
<button id="closecamera" disabled>Close camera</button>
<br />
<button id="record" disabled>Start Recording</button>
<button id="stoprecord" disabled>Stop Recording</button>
<br />
<button id="play" disabled>Play</button>
<button id="download" disabled>Download</button>
</div>
</body>
</html>
video {
height: 232px;
margin: 0 12px 20px 0;
vertical-align: top;
width: calc(20em - 10px);
background: #ccc;
}
const getUserMediaPromise = options => new Promise((resolve, reject) => {
const nvgt = window.navigator;
if(nvgt) {
if(nvgt.mediaDevices && nvgt.mediaDevices.getUserMedia) {
return nvgt.mediaDevices.getUserMedia(options).then(resolve, reject);
}
const getUserMedia = nvgt.getUserMedia || nvgt.webkitGetUserMedia || nvgt.mozGetUserMedia;
if(getUserMedia) {
return getUserMedia(options, resolve, reject)
}
}
reject('当前环境不支持获取媒体设备。');
});
const video = document.querySelector('#preview');
let cameraStream;
const opencameraBtn = document.querySelector('#opencamera');
const closecameraBtn = document.querySelector('#closecamera');
const recordBtn = document.querySelector('#record');
const stopRecordBtn = document.querySelector('#stoprecord');
const playBtn = document.querySelector('#play');
const downloadBtn = document.querySelector('#download');
opencameraBtn.onclick = () => getUserMediaPromise({
audio: false,
video: true
}).then(
stream => {
cameraStream = video.srcObject = stream;
opencameraBtn.disabled = true;
closecameraBtn.disabled = false;
recordBtn.disabled = false;
},
err => {
console.log('getUserMedia error: [' + err.name + '] ' + err.message)
}
);
closecameraBtn.onclick = () => {
cameraStream && cameraStream.getTracks()[0].stop();
cameraStream = null;
opencameraBtn.disabled = false;
closecameraBtn.disabled = true;
stopRecordBtn.onclick();
};
let mediaRecorder;
let recordedBlobs;
const mimeType = ['video/webm;codecs=vp9', 'video/webm;codecs=vp8', 'video/webm', ''].find(type => {
return MediaRecorder.isTypeSupported(type);
});
// console.log('mimeType', mimeType);
recordBtn.onclick = () => {
recordedBlobs = [];
try {
mediaRecorder = new MediaRecorder(cameraStream, { mimeType });
} catch(e) {
alert('Exception while creating MediaRecorder: ' + e + '. mimeType: ' + mimeType);
return;
}
recordBtn.disabled = true;
stopRecordBtn.disabled = false;
playBtn.disabled = true;
downloadBtn.disabled = true;
mediaRecorder.onstop = evt => {
console.log('Recorder stopped');
};
mediaRecorder.ondataavailable = function(event) {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
};
mediaRecorder.start(20); // 单次收集数据毫秒时长,ondataavailable 触发频率时长间隔
};
const recordedVideo = document.querySelector('#recorded');
stopRecordBtn.onclick = () => {
mediaRecorder && mediaRecorder.stop();
mediaRecorder = null;
// console.log('Recorded Blobs: ', recordedBlobs);
recordedVideo.controls = true;
playBtn.disabled = false;
downloadBtn.disabled = false;
stopRecordBtn.disabled = true;
if(!cameraStream) {
recordBtn.disabled = true;
}
};
const getRecordedBlobUrl = () => {
const superBuffer = new Blob(recordedBlobs, {type: mimeType.split(';')[0]});
return window.URL.createObjectURL(superBuffer);
};
playBtn.onclick = () => {
recordedVideo.src = getRecordedBlobUrl();
}
downloadBtn.onclick = () => {
var a = document.createElement('a');
a.style.display = 'none';
a.href = getRecordedBlobUrl();
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
基于MediaSource播放JS拉取的媒体数据
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>JS Bin</title>
</head>
<body>
<video controls></video>
</body>
</html>
video{
width:100%;
display:block;
background:#000;
}
const video = document.querySelector('video');
const fetchMp4 = (url, cb) => {
const xhr = new XMLHttpRequest();
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
const assetURL = 'https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4';
const mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
// 创建动态媒体源,并关联到video元素上
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', () => {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
// 拉取数据
fetchMp4(assetURL, buf => {
sourceBuffer.addEventListener('updateend', () => {
// 媒体流传输完毕
mediaSource.endOfStream();
// video.play();
});
// 将数据喂给 Video -- 注意这里只是一次性输入整个MP4数据
sourceBuffer.appendBuffer(buf);
});
});
小结
本节通过具体的实例介绍了:视频播放控制、数据获取&采集、录制存储、创建动态媒体源播放。
本章节涉及内容配合网络存储、实时传输,可以试着实现好玩的应用,而且不限于点播或直播。
Web端点播直播&播放方案
点播直播的区别
- 应用流程
- 点播:创作者 => 上传 => 转码 => 存储 <=> CDN分发 <=> 观众
- 直播:创作者 => 推流 <=> 存储 <=> 转码 <=> CDN分发 <=> 观众
- 媒体类型的选择
- HTTP(S)-MP4..
点播服务 - HTTP(S)-FLV
点播、直播 - HTTP(S)-HLS
点播、直播(高延迟)
- HTTP(S)-MP4..
播放器解决方案
- 原生浏览器支持的
- 直接走原生Video播放
- 原生浏览器不支持的
- 协议或容器类型不支持
- JS解协议下载数据、解容器、重新封装,然后通过MSE喂给Video解码、渲染播放
例如Web端播放FLV、HLS:http://chimee.org
- JS解协议下载数据、解容器、重新封装,然后通过MSE喂给Video解码、渲染播放
- 解码器不支持
- JS下载数据,WASM 解容器、解码,通过 WebGL&WebAudio 渲染播放
例如Web端播放HEVC编码视频:https://zyun.360.cn/developer/doc?did=QHWWPlayer
- JS下载数据,WASM 解容器、解码,通过 WebGL&WebAudio 渲染播放
- 有解密需求的
- 参考前两条,在解容器之后对每帧数据启用解密逻辑。
- 协议或容器类型不支持
小结
本节通过具体的实例介绍了:点播直播业务流程的不同,Web端播放器解决方案的差异。
目前来说Web端媒体选型,必须基于浏览器所支持的能力边界:如协议类型、容器类型、解码能力,亦或可自行转容器(HLS\FLV)、自行解码(WASM)的能力。
参考资料