利用Camera 和Canvas,以及webgl 的一些api
const vs = `
attribute vec3 aPos;
attribute vec2 aVertexTextureCoord;
varying highp vec2 vTextureCoord;
void main(void){
gl_Position = vec4(aPos, 1);
vTextureCoord = aVertexTextureCoord;
}
`
const fs = `
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
`
const vertex = [
-1, -1, 0.0,
1, -1, 0.0,
1, 1, 0.0,
-1, 1, 0.0
]
const vertexIndice = [
0, 1, 2,
0, 2, 3
]
const texCoords = [
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
function createShader(gl, src, type) {
const shader = gl.createShader(type)
gl.shaderSource(shader, src)
gl.compileShader(shader)
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error('Error compiling shader: ' + gl.getShaderInfoLog(shader))
}
return shader
}
const buffers = {}
function createRenderer(canvas, width, height,callback) {
const gl = canvas.getContext("webgl")
if (!gl) {
console.error('Unable to get webgl context.')
return
}
const info = wx.getSystemInfoSync()
gl.canvas.width = info.pixelRatio * width
gl.canvas.height = info.pixelRatio * height
//重新调整视图
//https://developer.mozilla.org/zh-CN/docs/Web/API/WebGLRenderingContext/viewport
//WebGLRenderingContext.drawingBufferWidth 只读属性, 指示当前绘图缓冲区的实际宽度。它应当匹配与绘图上下文相关联的 <canvas> 元素的宽度属性。如果实现未能提供所要求的宽度,值将有所不同。
//https://developer.mozilla.org/zh-CN/docs/Web/API/WebGLRenderingContext/drawingBufferWidth
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight)
const vertexShader = createShader(gl, vs, gl.VERTEX_SHADER)
const fragmentShader = createShader(gl, fs, gl.FRAGMENT_SHADER)
const program = gl.createProgram()
gl.attachShader(program, vertexShader)
gl.attachShader(program, fragmentShader)
gl.linkProgram(program)
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error('Unable to initialize the shader program.')
return
}
gl.useProgram(program)
const texture = gl.createTexture()
gl.activeTexture(gl.TEXTURE0)
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
gl.bindTexture(gl.TEXTURE_2D, null)
buffers.vertexBuffer = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.vertexBuffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertex), gl.STATIC_DRAW)
buffers.vertexIndiceBuffer = gl.createBuffer()
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffers.vertexIndiceBuffer)
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(vertexIndice), gl.STATIC_DRAW)
const aVertexPosition = gl.getAttribLocation(program, 'aPos')
gl.vertexAttribPointer(aVertexPosition, 3, gl.FLOAT, false, 0, 0)
gl.enableVertexAttribArray(aVertexPosition)
buffers.trianglesTexCoordBuffer = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.trianglesTexCoordBuffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW)
const vertexTexCoordAttribute = gl.getAttribLocation(program, "aVertexTextureCoord")
gl.enableVertexAttribArray(vertexTexCoordAttribute)
gl.vertexAttribPointer(vertexTexCoordAttribute, 2, gl.FLOAT, false, 0, 0)
const samplerUniform = gl.getUniformLocation(program, 'uSampler')
gl.uniform1i(samplerUniform, 0)
return (arrayBuffer, width, height) => {
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, arrayBuffer)
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0)
callback()
}
}
Page({
data: {
width: 288,
height: 358,
},
onReady: function () {
const selector = wx.createSelectorQuery()
selector.select('#webgl')
.node(this.init.bind(this))
.exec()
},
init(res) {
const canvas = res.node
const context = wx.createCameraContext()
const render = createRenderer(canvas, this.data.width, this.data.height, this.getCameraImgs)
if (!render || typeof render !== 'function') return
//实时获取摄像头数据
//拿到的内存区域,可以存放多种类型数据
const listener = context.onCameraFrame((frame) => {
render(new Uint8Array(frame.data), frame.width, frame.height)
})
listener.start()
},
getCameraImgs() {
let l = 0
let t = 0
let xw = this.data.width * 0.46
let xh = xw
l = (this.data.width - xw) / 2
t = (this.data.height - xh) / 2
wx.canvasToTempFilePath({
canvasId: 'canvas',
x: l,
y: t,
width: xw,
height: xh,
destWidth: xw,
destHeight: xh,
success: (e) => {
console.log(e.tempFilePath, 162)
},
fail: (e) => {
console.log(e, 168)
}
})
},
})
<canvas
id="webgl"
type="webgl"
canvas-id="canvas"
style="width: {{width}}px; height: {{height}}px;"
>
</canvas>
<camera
device-position="back"
flash="off"
binderror="error"
style="width: 192px; height: 144px;"
frame-size="small"
></camera>
参考
https://mp.weixin.qq.com/s/4_p2bAcp3OkTQizceFvMkQ
https://developers.weixin.qq.com/miniprogram/dev/api/media/camera/CameraContext.html
https://developers.weixin.qq.com/miniprogram/dev/component/canvas.html
2020.3.31
要代码的同学有点多,没时间挨个发邮件了。