在前面的章节中,我们解决emcc环境以及使用emcc来编译ffmpeg得到网页开发中可以使用的js库,本章节,我们就来实现一个简单的播放器.
视频课程以及源码下载:
https://edu.csdn.net/course/detail/35615
章节列表:
- 搭建webassembly网页播放器(一)—centeros 虚拟环境搭建
- 搭建webassembly网页播放器(二)—emcc环境搭建
- 搭建webassembly网页播放器(三)—emcc编译ffmpeg编译库
- 搭建webassembly网页播放器(四)—网页调用ffmpeg.js单元测试用例
- 搭建webassembly网页播放器(五)—网页播放器开发
- 搭建webassembly网页播放器(六)—websocket后台服务程序
播放效果如下:
c程序定义的接口如何在JS层调用
前端我们使用websocket从server端来不断的拉取h264数据流(Server端功能在第六章节进行介绍!)
var ws_url = "ws://192.168.5.6:6789"
let ptr; // 解码器句柄,对应c++层的HANDLE
var websock = new WebSocket(ws_url);
websock.binaryType = "arraybuffer";
websock.onopen = function(event)
{
console.log("websocket 连接成功.")
ptr = Module._initDecoder();
var canvas_width = 640;
var canvas_height = 480;
renderer = initialCanvas(video,canvas_width,canvas_height);
}
在我们连接上websocket server的时候,我们调用了初始化ffmpeg的函数:
Module._initDecoder();
在C/C++层这个定义如下:
/**
* 初始化H264解码器
* @return 成功返回句柄,失败返回NULL
*/
LPJSDecodeHandle EMSCRIPTEN_KEEPALIVE initDecoder();
JS层调用C/C++接口的方式方法极为简单,Moudle._+函数名即可.
JS向C/C++传递数据
到了这一步,我们在JS层通过websocket协议获取到了H264数据,但是我们的解码是在C/C++ 层,因此我们需要把数据从JS层传递到C/C++ 层.
var js_h264_buf = new Uint8Array(event.data);
var js_h264_len = js_h264_buf.length;
console.log("收到一帧数据,长度为:" ,js_h264_len);
var dst = Module._GetBuffer(ptr,js_h264_len); // 通知C/C++分配好一块内存用来接收JS收到的H264流.
HEAPU8.set(js_h264_buf,dst); // 将JS层的数据传递给C/C++层.
这里大家要理解句柄的概念,本质上在C/C++层,指针就是变量在内存中的地址. 通过指针我们就可以找到变量。
上述的流程是 :
- 调用接口函数 GetBuffer,告诉C/C++层分配好一块内存,用来接收JS传递过来的数据.
- 通过HEAPU8.set() 把js层的二进制数据传递到C/C++
HEAPU8.set()是webassembly的内置函数,存储在于ffmpeg.js中.
C/C++向JS传递数据
C/C++在解码成功后,得到了YUV数据,这个时候需要反向把数据从C/C++层传递给JS层.
if( Module._Decode(ptr,js_h264_len) >= 0 )
{
var width = Module._GetWidth(ptr);
var height = Module._GetHeight(ptr);
var renderlength = width * height * 3 / 2;
console.log("解码成功 , width:%d height:%d" ,width,height);
var yuv_wasm_data = Module._GetRenderData(ptr); // 得到C/C++生成的YUV数据.
// 将数据从C/C++层拷贝到JS层
var RenderBuffer = new Uint8Array ( Module.HEAPU8.subarray(yuv_wasm_data,yuv_wasm_data + renderlength) );
render(RenderBuffer,width,height);
}
else
{
console.log("解码失败.");
}
- Module._GetRenderData(ptr) 通过接口函数,我们提取C/C++ YUV数据的句柄.
- Module.HEAPU8.subarray 实现数据从C/C++到JS层的传递.
- JS层收到数据后,直接调用webGL进行渲染,这种方式比获取RGB贴图快N倍。
webGL 渲染
webGL模块直接封装为了控件
//代码摘自:https://github.com/ivan-94/video-push/blob/master/yuv/index.html#L312
<script>
const video = document.getElementById('glcanvas');
let renderer;
class WebglScreen {
constructor(canvas) {
this.canvas = canvas;
this.gl =
canvas.getContext('webgl') ||
canvas.getContext('experimental-webgl');
this._init();
}
_init() {
let gl = this.gl;
if (!gl) {
console.log('gl not support!');
return;
}
// 图像预处理
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
// GLSL 格式的顶点着色器代码
let vertexShaderSource = `
attribute lowp vec4 a_vertexPosition;
attribute vec2 a_texturePosition;
varying vec2 v_texCoord;
void main() {
gl_Position = a_vertexPosition;
v_texCoord = a_texturePosition;
}
`;
let fragmentShaderSource = `
precision lowp float;
uniform sampler2D samplerY;
uniform sampler2D samplerU;
uniform sampler2D samplerV;
varying vec2 v_texCoord;
void main() {
float r,g,b,y,u,v,fYmul;
y = texture2D(samplerY, v_texCoord).r;
u = texture2D(samplerU, v_texCoord).r;
v = texture2D(samplerV, v_texCoord).r;
fYmul = y * 1.1643828125;
r = fYmul + 1.59602734375 * v - 0.870787598;
g = fYmul - 0.39176171875 * u - 0.81296875 * v + 0.52959375;
b = fYmul + 2.01723046875 * u - 1.081389160375;
gl_FragColor = vec4(r, g, b, 1.0);
}
`;
let vertexShader = this._compileShader(
vertexShaderSource,
gl.VERTEX_SHADER,
);
let fragmentShader = this._compileShader(
fragmentShaderSource,
gl.FRAGMENT_SHADER,
);
let program = this._createProgram(vertexShader, fragmentShader);
this._initVertexBuffers(program);
// 激活指定的纹理单元
gl.activeTexture(gl.TEXTURE0);
gl.y = this._createTexture();
gl.uniform1i(gl.getUniformLocation(program, 'samplerY'), 0);
gl.activeTexture(gl.TEXTURE1);
gl.u = this._createTexture();
gl.uniform1i(gl.getUniformLocation(program, 'samplerU'), 1);
gl.activeTexture(gl.TEXTURE2);
gl.v = this._createTexture();
gl.uniform1i(gl.getUniformLocation(program, 'samplerV'), 2);
}
/**
* 初始化顶点 buffer
* @param {glProgram} program 程序
*/
_initVertexBuffers(program)
{
let gl = this.gl;
let vertexBuffer = gl.createBuffer();
let vertexRectangle = new Float32Array([
1.0,
1.0,
0.0,
-1.0,
1.0,
0.0,
1.0,
-1.0,
0.0,
-1.0,
-1.0,
0.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
// 向缓冲区写入数据
gl.bufferData(gl.ARRAY_BUFFER, vertexRectangle, gl.STATIC_DRAW);
// 找到顶点的位置
let vertexPositionAttribute = gl.getAttribLocation(
program,
'a_vertexPosition',
);
// 告诉显卡从当前绑定的缓冲区中读取顶点数据
gl.vertexAttribPointer(
vertexPositionAttribute,
3,
gl.FLOAT,
false,
0,
0,
);
// 连接vertexPosition 变量与分配给它的缓冲区对象
gl.enableVertexAttribArray(vertexPositionAttribute);
let textureRectangle = new Float32Array([
1.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0,
]);
let textureBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, textureBuffer);
gl.bufferData(gl.ARRAY_BUFFER, textureRectangle, gl.STATIC_DRAW);
let textureCoord = gl.getAttribLocation(program, 'a_texturePosition');
gl.vertexAttribPointer(textureCoord, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(textureCoord);
}
/**
* 创建并编译一个着色器
* @param {string} shaderSource GLSL 格式的着色器代码
* @param {number} shaderType 着色器类型, VERTEX_SHADER 或 FRAGMENT_SHADER。
* @return {glShader} 着色器。
*/
_compileShader(shaderSource, shaderType)
{
// 创建着色器程序
let shader = this.gl.createShader(shaderType);
// 设置着色器的源码
this.gl.shaderSource(shader, shaderSource);
// 编译着色器
this.gl.compileShader(shader);
const success = this.gl.getShaderParameter(
shader,
this.gl.COMPILE_STATUS,
);
if (!success) {
let err = this.gl.getShaderInfoLog(shader);
this.gl.deleteShader(shader);
console.error('could not compile shader', err);
return;
}
return shader;
}
/**
* 从 2 个着色器中创建一个程序
* @param {glShader} vertexShader 顶点着色器。
* @param {glShader} fragmentShader 片断着色器。
* @return {glProgram} 程序
*/
_createProgram(vertexShader, fragmentShader)
{
const gl = this.gl;
let program = gl.createProgram();
// 附上着色器
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
// 将 WebGLProgram 对象添加到当前的渲染状态中
gl.useProgram(program);
const success = this.gl.getProgramParameter(
program,
this.gl.LINK_STATUS,
);
if (!success) {
console.err(
'program fail to link' + this.gl.getShaderInfoLog(program),
);
return;
}
return program;
}
/**
* 设置纹理
*/
_createTexture(filter = this.gl.LINEAR)
{
let gl = this.gl;
let t = gl.createTexture();
// 将给定的 glTexture 绑定到目标(绑定点
gl.bindTexture(gl.TEXTURE_2D, t);
// 纹理包装 参考https://github.com/fem-d/webGL/blob/master/blog/WebGL基础学习篇(Lesson%207).md -> Texture wrapping
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// 设置纹理过滤方式
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
return t;
}
/**
* 渲染图片出来
* @param {number} width 宽度
* @param {number} height 高度
*/
renderImg(width, height, data)
{
let gl = this.gl;
// 设置视口,即指定从标准设备到窗口坐标的x、y仿射变换
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// 设置清空颜色缓冲时的颜色值
gl.clearColor(0, 0, 0, 0);
// 清空缓冲
gl.clear(gl.COLOR_BUFFER_BIT);
let uOffset = width * height;
let vOffset = (width >> 1) * (height >> 1);
gl.bindTexture(gl.TEXTURE_2D, gl.y);
// 填充纹理
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.LUMINANCE,
width,
height,
0,
gl.LUMINANCE,
gl.UNSIGNED_BYTE,
data.subarray(0, uOffset),
);
gl.bindTexture(gl.TEXTURE_2D, gl.u);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.LUMINANCE,
width >> 1,
height >> 1,
0,
gl.LUMINANCE,
gl.UNSIGNED_BYTE,
data.subarray(uOffset, uOffset + vOffset),
);
gl.bindTexture(gl.TEXTURE_2D, gl.v);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.LUMINANCE,
width >> 1,
height >> 1,
0,
gl.LUMINANCE,
gl.UNSIGNED_BYTE,
data.subarray(uOffset + vOffset, data.length),
);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}
/**
* 根据重新设置 canvas 大小
* @param {number} width 宽度
* @param {number} height 高度
* @param {number} maxWidth 最大宽度
*/
setSize(width, height, maxWidth)
{
let canvasWidth = Math.min(maxWidth, width);
this.canvas.width = canvasWidth;
this.canvas.height = (canvasWidth * height) / width;
}
destroy()
{
const { gl } = this;
gl.clear(
gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT,
);
}
} // end of webgl
const initialCanvas = (canvas, width, height) => {
canvas.width = width;
canvas.height = height;
return new WebglScreen(canvas);
};
const render = (buff,width,height) =>
{
if (renderer == null) {
return;
}
renderer.renderImg(width, height, buff);
};
</script>