Android基于共享texture(surfaceTexture)实现glSurfaceView高效camera预览

    处理和渲染视频需要较高的性能,而移动端的cpu性能有限,所以在处理和渲染视频的时候尽量要用gpu的能力,在移动设备里主要是opengles api的使用,今天主要想介绍一下通过共享texture(surfacetexture)来实现Android camera的高效预览。

    在本文中主要介绍Android camera预览,用opengl shader对camera数据进行简单的对比度处理,然后在glSurfaceView预览,其中避免直接操作yuv裸数据,通过共享texture完成数据传递,大致原理图见下:

    


    本方案的思路就是camera预览不采用直接到View和数据回调的方式,而是采用setPreviewTexture方式,将camera数据预览到surfaceTexture,surfaceTexture由于opengl 中texture关联,数据准备好后surfaceTexture中onFrameAvailable回调通知Render开始处理,通过updateImage将surfaceTexture数据更新到texture,然后进行处理,最后显示到glSurfaceView上。

    Demo资源下载:https://download.csdn.net/download/cmshao/10371209 

    Demo中实现了一个类MyGLSurfaceView,继承自GLSurfaceView,实现GLSurfaceView.Render和SurfaceTexture.OnFrameAvailableListener接口,GLSurfaceView rendermode位RENDERMODE_WHEN_DITY,靠onFrameAvailable中调用requestRender驱动,texture绑定需要GLES11Ext.GL_TEXTURE_EXTERNAL_OES类型,Fragement中需要声明extension GL_OES_EGL_image_external : require。

    引用参考链接:

    https://developer.android.com/training/graphics/opengl/index.html

    https://github.com/muojie/PlayCamera_V3.0.0

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
要在 Android 应用中使用 GLSurfaceViewCamera2 API 实现预览,可以参考以下步骤: 1. 在你的 Android 项目中添加 GLSurfaceView 控件,并在应用程序中初始化它。 2. 通过 Camera2 API 打开相机,并将相机输出连接到 GLSurfaceView 控件上。 3. 在 GLSurfaceView 控件中实现自定义的 Renderer,并在 Renderer 中实现图像渲染和处理逻辑。 4. 将渲染结果显示在 GLSurfaceView 控件上。 以下是一个简单的代码示例,演示如何使用 GLSurfaceViewCamera2 API 实现预览: ```java public class PreviewActivity extends AppCompatActivity { private CameraManager cameraManager; private CameraDevice cameraDevice; private CameraCaptureSession captureSession; private CaptureRequest.Builder previewRequestBuilder; private CaptureRequest previewRequest; private Size previewSize; private SurfaceTexture surfaceTexture; private GLSurfaceView glSurfaceView; private CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice camera) { cameraDevice = camera; createCameraPreviewSession(); } @Override public void onDisconnected(@NonNull CameraDevice camera) { cameraDevice.close(); cameraDevice = null; } @Override public void onError(@NonNull CameraDevice camera, int error) { cameraDevice.close(); cameraDevice = null; } }; private void openCamera() { cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { String cameraId = cameraManager.getCameraIdList()[0]; CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); previewSize = map.getOutputSizes(SurfaceTexture.class)[0]; surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); Surface previewSurface = new Surface(surfaceTexture); previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewRequestBuilder.addTarget(previewSurface); cameraDevice.createCaptureSession(Arrays.asList(previewSurface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession session) { captureSession = session; updatePreview(); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession session) { } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } private void createCameraPreviewSession() { try { surfaceTexture = glSurfaceView.getSurfaceTexture(); surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); openCamera(); } catch (CameraAccessException e) { e.printStackTrace(); } } private void updatePreview() { previewRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); previewRequest = previewRequestBuilder.build(); try { captureSession.setRepeatingRequest(previewRequest, null, null); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); glSurfaceView = new GLSurfaceView(this); glSurfaceView.setEGLContextClientVersion(2); glSurfaceView.setRenderer(new PreviewRenderer()); setContentView(glSurfaceView); } @Override protected void onResume() { super.onResume(); if (glSurfaceView != null) { glSurfaceView.onResume(); } if (cameraDevice == null) { try { cameraManager.openCamera(cameraManager.getCameraIdList()[0], stateCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } } } @Override protected void onPause() { if (glSurfaceView != null) { glSurfaceView.onPause(); } if (cameraDevice != null) { cameraDevice.close(); cameraDevice = null; } super.onPause(); } private class PreviewRenderer implements GLSurfaceView.Renderer { private final float[] vertexData = { -1f, -1f, 1f, -1f, -1f, 1f, 1f, 1f }; private final float[] textureData = { 0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f }; private int textureId; private int program; private int aPositionLocation; private int aTextureLocation; private int uTextureMatrixLocation; @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { textureId = createTexture(); program = createProgram(); aPositionLocation = glGetAttribLocation(program, "aPosition"); aTextureLocation = glGetAttribLocation(program, "aTextureCoord"); uTextureMatrixLocation = glGetUniformLocation(program, "uTextureMatrix"); glClearColor(0f, 0f, 0f, 0f); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { glViewport(0, 0, width, height); Matrix.scaleM(textureMatrix, 0, 1f, -1f, 1f); Matrix.translateM(textureMatrix, 0, 0f, -1f, 0f); Matrix.rotateM(textureMatrix, 0, 90f, 0f, 0f, 1f); } @Override public void onDrawFrame(GL10 gl) { glClear(GL_COLOR_BUFFER_BIT); glUseProgram(program); glEnableVertexAttribArray(aPositionLocation); glVertexAttribPointer(aPositionLocation, 2, GL_FLOAT, false, 0, vertexBuffer); glEnableVertexAttribArray(aTextureLocation); glVertexAttribPointer(aTextureLocation, 2, GL_FLOAT, false, 0, textureBuffer); glUniformMatrix4fv(uTextureMatrixLocation, 1, false, textureMatrix, 0); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); glDisableVertexAttribArray(aPositionLocation); glDisableVertexAttribArray(aTextureLocation); } private int createTexture() { int[] textures = new int[1]; glGenTextures(1, textures, 0); int textureId = textures[0]; glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); return textureId; } private int createProgram() { String vertexShaderCode = "attribute vec4 aPosition;\n" + "attribute vec4 aTextureCoord;\n" + "uniform mat4 uTextureMatrix;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " vTextureCoord = (uTextureMatrix * aTextureCoord).xy;\n" + " gl_Position = aPosition;\n" + "}"; String fragmentShaderCode = "#extension GL_OES_EGL_image_external : require\n" + "precision mediump float;\n" + "uniform samplerExternalOES uTexture;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_FragColor = texture2D(uTexture, vTextureCoord);\n" + "}"; int vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode); int fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode); int program = glCreateProgram(); glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); glUseProgram(program); return program; } private int loadShader(int type, String code) { int shader = glCreateShader(type); glShaderSource(shader, code); glCompileShader(shader); return shader; } } } ``` 需要注意的是,这只是一个简单的示例,并且可能需要进行进一步的优化和改进,以满足你的实际需求和性能要求。同时,为了确保应用程序的稳定性,还需要进行充分的测试和错误处理。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值