预览摄像头画面

原理

利用OpenGL生成纹理并绑定到SurfaceTexture,然后把camera的预览数据设置显示到SurfaceTexture中,这样就可以在OpenGL中拿到摄像头数据并显示了。


vertex_shader.glsl

attribute vec4 v_Position;
attribute vec2 f_Position;
varying vec2 ft_Position;
void main() {
    ft_Position = f_Position;
    gl_Position = v_Position;
}

fragment_shader.glsl

#extension GL_OES_EGL_image_external : require

precision mediump float;
varying vec2 ft_Position;
uniform samplerExternalOES sTexture;
void main() {
    gl_FragColor=texture2D(sTexture, ft_Position);
}

fragment_shader_screen.glsl

precision mediump float;
varying vec2 ft_Position;
uniform sampler2D sTexture;
void main() {
    gl_FragColor=texture2D(sTexture, ft_Position);
}

GCamera

package com.example.glivepush.camera;

import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;

import java.io.IOException;

public class GCamera {

    private Camera camera;

    public GCamera() {

    }

    private SurfaceTexture surfaceTexture;

    public void initCamera(SurfaceTexture surfaceTexture,int cameraId){
        this.surfaceTexture = surfaceTexture;
        setCameraParm(cameraId);
    }

    //预览
    private void setCameraParm(int cameraId){
        try {
            camera = Camera.open(cameraId);
            camera.setPreviewTexture(surfaceTexture);
            Camera.Parameters parameters = camera.getParameters();

            parameters.setFlashMode("off");
            parameters.setPictureFormat(ImageFormat.NV21);
            parameters.setPictureSize(parameters.getSupportedPictureSizes().get(0).width,
                    parameters.getSupportedPictureSizes().get(0).height);
            parameters.setPreviewSize(parameters.getSupportedPreviewSizes().get(0).width,
                    parameters.getSupportedPreviewSizes().get(0).height);

            camera.setParameters(parameters);

            camera.startPreview();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void stopPreview(){
        if(camera != null){
            camera.startPreview();
            camera.release();
            camera = null;
        }
    }

    public void changeCamera(int cameraId){
        if(camera != null){
            stopPreview();
        }
        setCameraParm(cameraId);
    }
}

GCameraRender

package com.example.glivepush.camera;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.util.Log;

import com.example.glivepush.R;
import com.example.glivepush.egl.GEGLSurfaceView;
import com.example.glivepush.egl.GShaderUtil;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class GCameraRender implements GEGLSurfaceView.GGLRender , SurfaceTexture.OnFrameAvailableListener {

    private Context context;

    private FloatBuffer vertexBuffer;
    private FloatBuffer fragmentBuffer;

    private int program;

    private int vPosition;
    private int fPosition;

    private int vboId;
    private int fboId;

    private int fboTextureid;
    private int cameraTextureid;

    private GCameraFboRender gCameraFboRender;

    private SurfaceTexture surfaceTexture;
    private OnSurfaceCreateListener onSurfaceCreateListener;

    public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) {
        this.onSurfaceCreateListener = onSurfaceCreateListener;
    }

    private final float[] vertexData = {
            -1f, -1f,
            1f, -1f,
            -1f, 1f,
            1f, 1f
    };

    private final float[] fragmentData = {
            0f, 1f,
            1f, 1f,
            0f, 0f,
            1f, 0f
    };

    public GCameraRender(Context context) {
        this.context = context;

        gCameraFboRender = new GCameraFboRender(context);

        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(fragmentData);
        fragmentBuffer.position(0);
    }

    @Override
    public void onSurfaceCreated() {

        gCameraFboRender.onCreate();

        //获取着色器属性
        String vertexSource = GShaderUtil.getRawResource(context, R.raw.vertex_shader);
        String fragmentSource = GShaderUtil.getRawResource(context, R.raw.fragment_shader);
        program = GShaderUtil.createProgram(vertexSource, fragmentSource);
        //顶点坐标
        vPosition = GLES20.glGetAttribLocation(program, "v_Position");
        //纹理坐标
        fPosition = GLES20.glGetAttribLocation(program, "f_Position");

        /***********************************    vbo    ********************************************/
        int [] vbos = new int[1];
        GLES20.glGenBuffers(1, vbos, 0);
        vboId = vbos[0];
        //绑定
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
        //分配内存
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length*4 + fragmentData.length*4,
                null,GLES20. GL_STATIC_DRAW);
        //缓存到显存
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4,
                fragmentBuffer);
        //解绑
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
        /***********************************    vbo    ********************************************/

        /***********************************    fbo    ********************************************/
        //fbo
        int [] fbos = new int[1];
        GLES20.glGenBuffers(1, fbos, 0);
        fboId = fbos[0];
        //绑定
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
       /***********************************    fbo    ********************************************/

        //生成纹理
        int[] textureIds = new int[1];
        GLES20.glGenTextures(1, textureIds, 0);
        fboTextureid = textureIds[0];
        //绑定
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, fboTextureid);

        //设置环绕过滤方法
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);

        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

        //设置FBO分配内存大小
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 900, 1600, 0,
                GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
        //把纹理绑定到FBO
        GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
                GLES20.GL_TEXTURE_2D, fboTextureid, 0);
        //检查FBO绑定是否成功
        if(GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE){
            Log.e("godv", "fbo error");
        }else {
            Log.e("godv", "fbo success");
        }

        //解绑纹理
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        //解绑
        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

        /***********************************    camera   ******************************************/
        //生成摄像头纹理
        int[] textureIdsoes = new int[1];
        GLES20.glGenTextures(1, textureIdsoes, 0);
        cameraTextureid = textureIdsoes[0];
        //绑定扩展纹理
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureid);

        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);

        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        /***********************************    camera    *****************************************/

        //摄像头id
        surfaceTexture = new SurfaceTexture(cameraTextureid);
        //监听
        surfaceTexture.setOnFrameAvailableListener(this);

        if(onSurfaceCreateListener !=null){
            onSurfaceCreateListener.onSurfaceCreate(surfaceTexture);
        }

        //解绑纹理
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    }

    @Override
    public void onSurfaceChanged(int width, int height) {
        gCameraFboRender.onChange(width, height);
        GLES20.glViewport(0,0,width,height);
    }

    @Override
    public void onDrawFrame() {
        surfaceTexture.updateTexImage();

        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(1f,0f, 0f, 1f);

        GLES20.glUseProgram(program);

        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

        GLES20.glEnableVertexAttribArray(vPosition);
        GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
                0);

        GLES20.glEnableVertexAttribArray(fPosition);
        GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
                vertexData.length * 4);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

        GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

        gCameraFboRender.onDraw(fboTextureid);
    }

    //有数据回调
    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {

    }

    public interface OnSurfaceCreateListener{

        void onSurfaceCreate(SurfaceTexture surfaceTexture);
    }
}

GCameraFboRender

package com.example.glivepush.camera;

import android.content.Context;
import android.opengl.GLES20;

import com.example.glivepush.R;
import com.example.glivepush.egl.GShaderUtil;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class GCameraFboRender {

    private Context context;

    private float[] vertexData = {
            -1f, -1f,
            1f, -1f,
            -1f, 1f,
            1f, 1f
    };
    private FloatBuffer vertexBuffer;

    private float[] fragmentData = {
            0f, 1f,
            1f, 1f,
            0f, 0f,
            1f, 0f
    };
    private FloatBuffer fragmentBuffer;

    private int program;
    private int vPosition;
    private int fPosition;
    private int textureid;
    private int sampler;

    private int vboId;

    public GCameraFboRender(Context context) {
        this.context = context;

        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(fragmentData);
        fragmentBuffer.position(0);

    }

    public void onCreate()
    {
        String vertexSource = GShaderUtil.getRawResource(context, R.raw.vertex_shader);
        String fragmentSource = GShaderUtil.getRawResource(context, R.raw.fragment_shader_screen);

        program = GShaderUtil.createProgram(vertexSource, fragmentSource);

        vPosition = GLES20.glGetAttribLocation(program, "v_Position");
        fPosition = GLES20.glGetAttribLocation(program, "f_Position");
        sampler = GLES20.glGetUniformLocation(program, "sTexture");

        int [] vbos = new int[1];
        GLES20.glGenBuffers(1, vbos, 0);
        vboId = vbos[0];

        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + fragmentData.length * 4, null, GLES20. GL_STATIC_DRAW);
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4, fragmentBuffer);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
    }

    public void onChange(int width, int height)
    {
        GLES20.glViewport(0, 0, width, height);
    }

    public void onDraw(int textureId)
    {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(1f,0f, 0f, 1f);

        GLES20.glUseProgram(program);

        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);


        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

        GLES20.glEnableVertexAttribArray(vPosition);
        GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
                0);

        GLES20.glEnableVertexAttribArray(fPosition);
        GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
                vertexData.length * 4);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
    }
}

GCameraView

package com.example.glivepush.camera;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.AttributeSet;

import com.example.glivepush.egl.GEGLSurfaceView;

public class GCameraView extends GEGLSurfaceView {

    private GCameraRender gCameraRender;
    private GCamera gCamera;

    private int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;

    public GCameraView(Context context) {
        this(context,null);
    }

    public GCameraView(Context context, AttributeSet attrs) {
        this(context, attrs,0);
    }

    public GCameraView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        gCameraRender = new GCameraRender(context);
        gCamera = new GCamera();
        setRender(gCameraRender);

        gCameraRender.setOnSurfaceCreateListener(new GCameraRender.OnSurfaceCreateListener() {
            @Override
            public void onSurfaceCreate(SurfaceTexture surfaceTexture) {
                gCamera.initCamera(surfaceTexture, cameraId);
            }
        });
    }

    public void onDestory(){
        if(gCamera != null){
            gCamera.stopPreview();
        }
    }
}

CameraActivity

package com.example.glivepush;

import android.os.Bundle;

import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;

import com.example.glivepush.camera.GCameraView;

public class CameraActivity extends AppCompatActivity {

    private GCameraView gCameraView;
    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_camera);
        gCameraView = findViewById(R.id.cameraView);
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        gCameraView.onDestory();
    }
}

 

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值