Android OpenGLES + Camera1 相机预览

/**

  • 设置尺寸之后
    */
    public void onChangeAfter() {

}

/**

  • 绘制之前的准备
    */
    public boolean onReadyToDraw() {
    return true;
    }

/**

  • 绘制之前
    */
    public void onDrawPre() {

}

/**

  • 清屏
    */
    public void onClear() {
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    }

/**

  • 使用Program
    */
    public void onUseProgram() {
    GLES20.glUseProgram(program);
    }

/**

  • 初始化着色器各个位置
    */
    public void onInitLocation() {
    aPosLocation = GLES20.glGetAttribLocation(program, “aPos”);
    aCoordinateLocation = GLES20.glGetAttribLocation(program, “aCoordinate”);
    uSamplerLocation = GLES20.glGetUniformLocation(program, “uSampler”);
    }

/**

  • 绑定Fbo
    */
    public void onBindFbo() {
    if (!isBindFbo) {
    return;
    }
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
    GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
    GLES20.GL_TEXTURE_2D, fboTextureId, 0);
    GLES20.glViewport(0, 0, width, height);
    }

/**

  • 绑定Vbo
    */
    public void onBindVbo() {
    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
    }

/**

  • 激活并绑定纹理
    */
    public void onActiveTexture(int textureId) {
    this.textureId = textureId;
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
    GLES20.glUniform1i(uSamplerLocation, 0);
    }

/**

  • 启用顶点坐标
    */
    public void onEnableVertexAttributeArray() {
    GLES20.glEnableVertexAttribArray(aPosLocation);
    GLES20.glEnableVertexAttribArray(aCoordinateLocation);
    }

/**

  • 设置顶点坐标
    */
    public void onSetVertexData() {
    GLES20.glVertexAttribPointer(aPosLocation, vertexSize, GLES20.GL_FLOAT, false, vertexStride, 0);
    }

/**

  • 设置纹理坐标
    */
    public void onSetCoordinateData() {
    GLES20.glVertexAttribPointer(aCoordinateLocation, coordinateSize, GLES20.GL_FLOAT, false, coordinateStride, vertexBuffer.limit() * 4);
    }

/**

  • 设置其他数据
    */
    public void onSetOtherData() {

}

/**

  • 绘制
    */
    public void onDraw() {
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
    }

/**

  • 禁用顶点坐标
    */
    public void onDisableVertexAttributeArray() {
    GLES20.glDisableVertexAttribArray(aPosLocation);
    GLES20.glDisableVertexAttribArray(aCoordinateLocation);
    }

/**

  • 解除绑定
    */
    public void onUnBind() {
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
    }

/**

  • 绘制之后
    */
    public void onDrawAfter() {

}

/**

  • 删除Program
    */
    public void onDeleteProgram(int program) {
    GLES20.glDeleteProgram(program);
    }

/**

  • 删除Shader
    */
    public void onDeleteShader(int shader) {
    GLES20.glDeleteShader(shader);
    }

/**

  • 删除纹理
    */
    public void onDeleteTexture(int textureId) {
    GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
    }

/**

  • 删除Fbo
    */
    public void onDeleteFbo(int fboId) {
    GLES20.glDeleteFramebuffers(1, new int[]{fboId}, 0);
    }

/**

  • 删除Vbo
    */
    public void onDeleteVbo(int vboId) {
    GLES20.glDeleteBuffers(1, new int[]{vboId}, 0);
    }

public Context getContext() {
return context;
}

public void setContext(Context context) {
this.context = context;
}

public FloatBuffer getVertexBuffer() {
return vertexBuffer;
}

public void setVertexBuffer(FloatBuffer vertexBuffer) {
this.vertexBuffer = vertexBuffer;
}

public FloatBuffer getCoordinateBuffer() {
return coordinateBuffer;
}

public void setCoordinateBuffer(FloatBuffer coordinateBuffer) {
this.coordinateBuffer = coordinateBuffer;
}

public int getVertexSize() {
return vertexSize;
}

public void setVertexSize(int vertexSize) {
this.vertexSize = vertexSize;
}

public int getCoordinateSize() {
return coordinateSize;
}

public void setCoordinateSize(int coordinateSize) {
this.coordinateSize = coordinateSize;
}

public int getVertexStride() {
return vertexStride;
}

public void setVertexStride(int vertexStride) {
this.vertexStride = vertexStride;
}

public int getCoordinateStride() {
return coordinateStride;
}

public void setCoordinateStride(int coordinateStride) {
this.coordinateStride = coordinateStride;
}

public int getVertexCount() {
return vertexCount;
}

public void setVertexCount(int vertexCount) {
this.vertexCount = vertexCount;
}

public int getCoordinateCount() {
return coordinateCount;
}

public void setCoordinateCount(int coordinateCount) {
this.coordinateCount = coordinateCount;
}

public int getProgram() {
return program;
}

public void setProgram(int program) {
this.program = program;
}

public int getFboTextureId() {
return fboTextureId;
}

public void setFboTextureId(int fboTextureId) {
this.fboTextureId = fboTextureId;
}

public int getFboId() {
return fboId;
}

public void setFboId(int fboId) {
this.fboId = fboId;
}

public int getVboId() {
return vboId;
}

public void setVboId(int vboId) {
this.vboId = vboId;
}

public String getVertexFilename() {
return vertexFilename;
}

public void setVertexFilename(String vertexFilename) {
this.vertexFilename = vertexFilename;
}

public String getFragFilename() {
return fragFilename;
}

public void setFragFilename(String fragFilename) {
this.fragFilename = fragFilename;
}

public int getWidth() {
return width;
}

public void setWidth(int width) {
this.width = width;
}

public int getHeight() {
return height;
}

public void setHeight(int height) {
this.height = height;
}

public boolean isBindFbo() {
return isBindFbo;
}

public void setBindFbo(boolean bindFbo) {
isBindFbo = bindFbo;
}

public int getPosLocation() {
return aPosLocation;
}

public void setPosLocation(int aPosLocation) {
this.aPosLocation = aPosLocation;
}

public int getCoordinateLocation() {
return aCoordinateLocation;
}

public void setCoordinateLocation(int aCoordinateLocation) {
this.aCoordinateLocation = aCoordinateLocation;
}

public int getSamplerLocation() {
return uSamplerLocation;
}

public void setSamplerLocation(int uSamplerLocation) {
this.uSamplerLocation = uSamplerLocation;
}

public boolean isCreate() {
return isCreate;
}

public void setCreate(boolean create) {
isCreate = create;
}

public boolean isChange() {
return isChange;
}

public void setChange(boolean change) {
isChange = change;
}

public BaseRenderBean getRenderBean() {
return renderBean;
}

public void setRenderBean(BaseRenderBean renderBean) {
this.renderBean = renderBean;
}

public void updateRenderBean(BaseRenderBean renderBean) {
setRenderBean(renderBean);
}
}

代码有点长,但是里面尽可能地考虑到了渲染和扩展的需求

顶点着色器 vertex.frag

attribute vec4 aPos;
attribute vec2 aCoordinate;
varying vec2 vCoordinate;
void main(){
vCoordinate = aCoordinate;
gl_Position = aPos;
}

片元着色器 frag.frag

precision mediump float;
uniform sampler2D uSampler;
varying vec2 vCoordinate;
void main(){
gl_FragColor = texture2D(uSampler, vCoordinate);
}

注意到,里面有用到一个工具类OpenGLESUtils和实体类BaseRenderBean具体就不贴出来了,可以到Github上查看

四、BaseOesRender

注意到,BaseRender里面绑定的纹理是2D纹理,而如果想实现相机预览,则需要使用Oes纹理,所以需要创建一个BaseOesRender为相机做渲染

package com.yk.media.opengles.render.base;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;

import com.yk.media.utils.OpenGLESUtils;

public class BaseOesRender extends BaseRender {
/**

  • oes纹理id
    */
    private int oesTextureId;

/**

  • 顶点变换矩阵位置
    */
    private int uMatrixLocation;

/**

  • 纹理变换矩阵位置
    */
    private int uOesMatrixLocation;

/**

  • oes尺寸
    */
    private int oesW = -1;
    private int oesH = -1;

/**

  • 顶点变换矩阵
    */
    private float[] mMVPMatrix = new float[16];

/**

  • 纹理变换矩阵
    */
    private float[] mOesMatrix = {
    1, 0, 0, 0,
    0, 1, 0, 0,
    0, 0, 1, 0,
    0, 0, 0, 1
    };

/**

  • 是否准备好绘制
    */
    private boolean isReadyToDraw = false;

/**

  • SurfaceTexture
    */
    private SurfaceTexture surfaceTexture;

/**

  • SurfaceTexture回调
    */
    private OnSurfaceTextureListener onSurfaceTextureListener;

public BaseOesRender(Context context) {
super(context, “render/base/oes/vertex.frag”, “render/base/oes/frag.frag”);
setBindFbo(true);
oesTextureId = OpenGLESUtils.getOesTexture();
}

@Override
public void onInitCoordinateBuffer() {
setCoordinateBuffer(OpenGLESUtils.getSquareCoordinateBuffer());
}

@Override
public boolean onReadyToDraw() {
if (!isReadyToDraw) {
if (onSurfaceTextureListener != null) {
if (surfaceTexture != null) {
surfaceTexture.release();
surfaceTexture = null;
}
surfaceTexture = new SurfaceTexture(oesTextureId);
onSurfaceTextureListener.onSurfaceTexture(surfaceTexture);
isReadyToDraw = true;
} else if (surfaceTexture != null) {
surfaceTexture.attachToGLContext(oesTextureId);
isReadyToDraw = true;
} else {
return false;
}
}
return oesW != -1 && oesH != -1;
}

@Override
public void onDrawPre() {
super.onDrawPre();
mMVPMatrix = OpenGLESUtils.getMatrix(getWidth(), getHeight(), oesW, oesH);

surfaceTexture.updateTexImage();

float[] oesMatrix = new float[16];
surfaceTexture.getTransformMatrix(oesMatrix);
if (!OpenGLESUtils.isIdentityM(oesMatrix)) {
mOesMatrix = oesMatrix;
}
}

@Override
public void onInitLocation() {
super.onInitLocation();
uMatrixLocation = GLES20.glGetUniformLocation(getProgram(), “uMatrix”);
uOesMatrixLocation = GLES20.glGetUniformLocation(getProgram(), “uOesMatrix”);
}

@Override
public void onActiveTexture(int textureId) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES20.glUniform1i(getSamplerLocation(), 0);
}

@Override
public void onSetOtherData() {
super.onSetOtherData();
GLES20.glUniformMatrix4fv(uMatrixLocation, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(uOesMatrixLocation, 1, false, mOesMatrix, 0);
}

@Override
public void onRelease() {
super.onRelease();
onDeleteTexture(oesTextureId);
}

/**

  • 绘制
    */
    public void onDrawSelf() {
    super.onDraw(oesTextureId);
    }

/**

  • 设置oes尺寸
    */
    public void setOesSize(int width, int height) {
    oesW = width;
    oesH = height;
    }

/**

  • 设置SurfaceTexture
    */
    public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
    this.surfaceTexture = surfaceTexture;
    isReadyToDraw = false;
    }

/**

  • 设置SurfaceTexture回调
    */
    public void setOnSurfaceTextureListener(OnSurfaceTextureListener onSurfaceTextureListener) {
    this.onSurfaceTextureListener = onSurfaceTextureListener;
    isReadyToDraw = false;
    }
    }

顶点着色器 vertex.frag

attribute vec4 aPos;
attribute vec4 aCoordinate;
uniform mat4 uMatrix;
uniform mat4 uOesMatrix;
varying vec2 vCoordinate;
void main(){
vCoordinate = (uOesMatrix * aCoordinate).xy;
gl_Position = uMatrix * aPos;
}

片元着色器 frag.frag

#extension GL_OES_EGL_image_external:require
precision mediump float;
uniform samplerExternalOES uSampler;
varying vec2 vCoordinate;
void main(){
gl_FragColor = texture2D(uSampler, vCoordinate);
}

BaseOesRender是继承BaseRender

加入了一些变量

  • oesWoesH

用于计算矩阵

  • mMVPMatrix

通过width、height、oesW、oesH计算的矩阵

  • mOesMatrix

通过SurfaceTexture获取的矩阵

  • isReadyToDraw

是否准备好渲染

重写了一些方法

  • onInitCoordinateBuffer

BaseOesRender需要绑定FBO,所有纹理坐标需要设置好,不然会出现倒立

  • onReadyToDraw

此处做一些SurfaceTexture绑定纹理和回调

  • onDrawPre

此处做矩阵相关的计算

  • onInitLocation

获取矩阵的Location

  • onActiveTexture

前面有说到,预览相机使用的是SurfaceTexture,所有需要修改绑定为Oes纹理

  • onSetOtherData

此处传入矩阵的数据

  • onRelease

此处做一些释放资源的工作

OnSurfaceTextureListener

public interface OnSurfaceTextureListener {
/**

  • SurfaceTexture回调
    */
    void onSurfaceTexture(SurfaceTexture surfaceTexture);
    }

onDrawSelf

public void onDrawSelf() {
super.onDraw(oesTextureId);
}

因为oesTextureId是内部创建的,故渲染的话,直接调用super.onDraw(oesTextureId),方便外部调用

五、OesRender

前面有说过,EGLTextureViewsetRenderer只能调用一次,故才会创建BaseRender,现在来创建Renderer调度者

public class OesRender implements Renderer {
private Context context;

/**

  • 输入(FBO保存数据)
    */
    private BaseOesRender inputRender;

/**

  • 输出(屏幕显示)
    */
    private BaseRender outputRender;

private int width;
private int height;

public OesRender(Context context) {
this.context = context;
inputRender = new BaseOesRender(context);
outputRender = new BaseRender(context);
}

@Override
public void onCreate() {
inputRender.onCreate();
outputRender.onCreate();
}

@Override
public void onChange(int width, int height) {
this.width = width;
this.height = height;
inputRender.onChange(width, height);
outputRender.onChange(width, height);
}

@Override
public void onDraw() {
inputRender.onDrawSelf();
outputRender.onDraw(inputRender.getFboTextureId());
}

public void setOesSize(int width, int height) {
inputRender.setOesSize(width, height);
}

public void setOnSurfaceTextureListener(OnSurfaceTextureListener onSurfaceTextureListener) {
inputRender.setOnSurfaceTextureListener(onSurfaceTextureListener);
}

public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
inputRender.setSurfaceTexture(surfaceTexture);
}

public int getFboTextureId() {
return inputRender.getFboTextureId();
}
}

注意到,OesRender里面,有两个Render

  • BaseOesRender

渲染Oes纹理,并绑定了Fbo

  • BaseRender

做为输出显示

六、CameraView

完成了上面的工作,接下来就可以进入正题,如何在Android中,使用OpenGLES + Camera1实现相机预览?

既然我们已经完成了“容器”和Renderer的创建,那么下面的工作就比较轻松

还记得上一章的Android Camera1相机预览,不记得的同学可以点击链接进入查看,因为接下来需要用到上一章介绍的CameraManager

public class CameraView extends EGLTextureView implements OnCameraListener {
private final CameraManager cameraManager = new CameraManager();

private Activity activity;

public OesRender render;

public CameraView(Context context) {
this(context, null);
}

public CameraView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}

private void init(Context context) {
setEGLContextClientVersion(2);
render = new OesRender(context);
setRenderer(render);
setRenderMode(EGLTextureView.RENDERMODE_WHEN_DIRTY);

activity = (Activity) context;
cameraManager.addOnCameraListener(this);
}

public void openCamera() {
render.setOnSurfaceTextureListener(new OnSurfaceTextureListener() {
@Override
public void onSurfaceTexture(SurfaceTexture surfaceTexture) {
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
cameraManager.openCamera(activity, surfaceTexture);
}
});
requestRender();
}

public void closeCamera() {
cameraManager.closeCamera();
}

public void switchCamera() {
cameraManager.switchCamera();
openCamera();
}

public void switchCamera(int facing) {
cameraManager.switchCamera(facing);
openCamera();
}

public void addOnCameraListener(OnCameraListener onCameraListener) {
cameraManager.addOnCameraListener(onCameraListener);
}

@Override
public void onCameraOpened(Size cameraSize, int facing) {
render.setOesSize(cameraSize.getHeight(), cameraSize.getWidth());
requestRender();
}

@Override
public void onCameraClosed() {

}

@Override
public void onCameraError(Exception e) {

}

public CameraManager getCameraManager() {
return cameraManager;
}

public int getFboTextureId() {
return render.getFboTextureId();
}
}

新建CameraView,继承EGLTextureView

初始化

private void init(Context context) {
setEGLContextClientVersion(2);
render = new OesRender(context,process);
setRenderer(render);
setRenderMode(EGLTextureView.RENDERMODE_WHEN_DIRTY);
activity = (Activity) context;
cameraManager.addOnCameraListener(this);
}

此处做了一些初始化工作

  • setEGLContextClientVersion

此处传入的是2,表示使用的是OpenGLES 2.0

  • setRenderer

此处将上面创建的OesRender传入

  • setRenderMode

此处传入EGLTextureView.RENDERMODE_WHEN_DIRTY,表示只有在调用requestRender()方法时才会渲染

对应的,还有一个是EGLTextureView.RENDERMODE_CONTINUOUSLY,表示会不停的渲染

  • addOnCameraListener

设置相机的回调

打开相机

public void openCamera() {
render.setOnSurfaceTextureListener(new OnSurfaceTextureListener() {
@Override
public void onSurfaceTexture(SurfaceTexture surfaceTexture) {
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
});
cameraManager.openCamera(activity, surfaceTexture);
}
});
requestRender();
}

首先,设置SurfaceTexture的回调监听,并调用requestRender()方法进行渲染请求

在拿到SurfaceTexture后,需要对SurfaceTexture进行设置setOnFrameAvailableListener监听,表示当SurfaceTexture每帧数据到来的时候,都会调用requestRender()请求渲染

接下来,就是使用CameraManageropenCamera方法,传入activitysurfaceTexture进行打开摄像头的操作

关闭相机

public void closeCamera() {
cameraManager.closeCamera();
}

关闭相机比较简单,直接调用CameraManager的closeCamera()方法即可

切换相机

文末

我总结了一些Android核心知识点,以及一些最新的大厂面试题、知识脑图和视频资料解析。

以后的路也希望我们能一起走下去。(谢谢大家一直以来的支持)

部分资料一览:

  • 330页PDF Android学习核心笔记(内含8大板块)

  • Android学习的系统对应视频

  • Android进阶的系统对应学习资料

  • Android BAT大厂面试题(有解析)


《Android学习笔记总结+移动架构视频+大厂面试真题+项目实战源码》点击传送门,即可获取!

首先,设置SurfaceTexture的回调监听,并调用requestRender()方法进行渲染请求

在拿到SurfaceTexture后,需要对SurfaceTexture进行设置setOnFrameAvailableListener监听,表示当SurfaceTexture每帧数据到来的时候,都会调用requestRender()请求渲染

接下来,就是使用CameraManageropenCamera方法,传入activitysurfaceTexture进行打开摄像头的操作

关闭相机

public void closeCamera() {
cameraManager.closeCamera();
}

关闭相机比较简单,直接调用CameraManager的closeCamera()方法即可

切换相机

文末

我总结了一些Android核心知识点,以及一些最新的大厂面试题、知识脑图和视频资料解析。

以后的路也希望我们能一起走下去。(谢谢大家一直以来的支持)

部分资料一览:

  • 330页PDF Android学习核心笔记(内含8大板块)

[外链图片转存中…(img-zgpvoGQ7-1714550727406)]

[外链图片转存中…(img-RzZmIpHd-1714550727408)]

  • Android学习的系统对应视频

  • Android进阶的系统对应学习资料

[外链图片转存中…(img-LM4isAlM-1714550727410)]

  • Android BAT大厂面试题(有解析)

[外链图片转存中…(img-AGi3p9iq-1714550727411)]
《Android学习笔记总结+移动架构视频+大厂面试真题+项目实战源码》点击传送门,即可获取!

  • 29
    点赞
  • 28
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值