GLSurfaceView渲染YUV数据


public class CameraGLSurfaceFragment extends Fragment {

    private final BaseCamera mCamera;

    private final byte[] grayData; // 这里应该是你的灰度图像数据
    private final int width;
    private final int height;

    private final int[] textureId_yuv = new int[3];
    private ByteBuffer y;
    private final ByteBuffer u;
    private final ByteBuffer v;

    private int sampler_y;
    private int sampler_u;
    private int sampler_v;

    private int program_yuv;

    private final FloatBuffer vertexBuffer;
    private final FloatBuffer textureBuffer;

    private int avPosition_yuv;
    private int afPosition_yuv;

    private static final String VERTEX_SHADER =
            "attribute vec4 av_Position;\n" +
            "attribute vec2 af_Position;\n" +
            "varying vec2 v_texCord;\n" +
            "void main() {\n" +
                "gl_Position = av_Position;\n" +
                "v_texCord = af_Position;\n" +
            "}\n";

    private static final String FRAGMENT_SHADER =
            "precision mediump float;\n" +
            "uniform sampler2D sampler_y;\n" +
            "uniform sampler2D sampler_u;\n" +
            "uniform sampler2D sampler_v;\n" +
            "varying vec2 v_texCord;\n" +
            "void main() {\n" +
                "vec4 c = vec4((texture2D(sampler_y, v_texCord).r - 16./255.) * 1.164);\n" +
                "vec4 U = vec4(texture2D(sampler_u, v_texCord).r - 128./255.);\n" +
                "vec4 V = vec4(texture2D(sampler_v, v_texCord).r - 128./255.);\n" +
                "c += V * vec4(1.596, -0.813, 0, 0);\n" +
                "c += U * vec4(0, -0.392, 2.017, 0);\n" +
                "c.a = 1.0;\n" +
                "gl_FragColor = c;\n" +
            "}\n";

    ScheduledFuture<?> scheduledFuture;
    ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();

    public CameraGLSurfaceFragment(BaseCamera camera) {
        mCamera = camera;
        width = camera.getWidth();
        height = camera.getHeight();

        grayData = new byte[width * height];

        byte[] uData = new byte[width * height / 4];
        Arrays.fill(uData, (byte) 128);
        u = ByteBuffer.wrap(uData);

        byte[] vData = new byte[width * height / 4];
        Arrays.fill(vData, (byte) 128);
        v = ByteBuffer.wrap(vData);

        //顶点坐标
        float[] vertexData = {
                -1f, -1f,
                1f, -1f,
                -1f, 1f,
                1f, 1f
        };
        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        //纹理坐标
        float[] textureData = {
                0f, 1f,
                1f, 1f,
                0f, 0f,
                1f, 0f
        };
        textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(textureData);
        textureBuffer.position(0);
    }

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        mCamera.open();
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        mCamera.close();
        if (scheduledFuture != null) scheduledFuture.cancel(false);
    }

    @Override
    public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
        return inflater.inflate(R.layout.fragment_camera_gl_surface, container, false);
    }

    @Override
    public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
        super.onViewCreated(view, savedInstanceState);

        GLSurfaceView glSurfaceView = view.findViewById(R.id.preview_gl);
        glSurfaceView.setEGLContextClientVersion(2);
        glSurfaceView.setRenderer(new GLSurfaceView.Renderer() {
            @Override
            public void onSurfaceCreated(GL10 gl, EGLConfig config) {

                //创建一个渲染程序
                program_yuv = createProgram();

                //得到着色器中的属性
                avPosition_yuv = GLES20.glGetAttribLocation(program_yuv, "av_Position");
                afPosition_yuv = GLES20.glGetAttribLocation(program_yuv, "af_Position");

                sampler_y = GLES20.glGetUniformLocation(program_yuv, "sampler_y");
                sampler_u = GLES20.glGetUniformLocation(program_yuv, "sampler_u");
                sampler_v = GLES20.glGetUniformLocation(program_yuv, "sampler_v");

                //创建纹理
                GLES20.glGenTextures(3, textureId_yuv, 0);

                for (int i = 0; i < 3; i++) {
                    //绑定纹理
                    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[i]);
                    //设置环绕和过滤方式
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
                }
            }

            @Override
            public void onSurfaceChanged(GL10 gl, int width, int height) {
                // 设置视口大小
                GLES20.glViewport(0, 0, width, height);
            }

            @Override
            public void onDrawFrame(GL10 gl) {
                //
                mCamera.getData(grayData);
                //Log.d("GLSurface", "onDrawFrame " + mCamera.getFrameIndex());

                // 加载灰度图像数据到纹理中
                y = ByteBuffer.allocateDirect(grayData.length);
                y.put(grayData);
                y.position(0);

                // 清除屏幕
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

                GLES20.glUseProgram(program_yuv);

                GLES20.glEnableVertexAttribArray(avPosition_yuv);//使顶点属性数组有效
                GLES20.glVertexAttribPointer(avPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);//为顶点属性赋值

                GLES20.glEnableVertexAttribArray(afPosition_yuv);
                GLES20.glVertexAttribPointer(afPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);

                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[0]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);//

                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[1]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width / 2, height / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);

                GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[2]);
                GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width / 2, height / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);

                GLES20.glUniform1i(sampler_y, 0);
                GLES20.glUniform1i(sampler_u, 1);
                GLES20.glUniform1i(sampler_v, 2);

                // 绘制几何体(例如一个矩形覆盖整个屏幕)
                GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

            }
        });
        glSurfaceView.setRenderMode(RENDERMODE_WHEN_DIRTY);

        scheduledFuture = executor.scheduleAtFixedRate(glSurfaceView::requestRender, 300, 50, TimeUnit.MILLISECONDS);
    }


    private int createProgram() {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
        int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
        return linkProgram(vertexShader, fragmentShader);
    }

    private static int loadShader(int type, String shaderSource) {
        int shader = glCreateShader(type);
        glShaderSource(shader, shaderSource);
        glCompileShader(shader);
        return shader;
    }

    private static int linkProgram(int verShader, int fragShader) {
        int program = glCreateProgram();

        glAttachShader(program, verShader);
        glAttachShader(program, fragShader);

        // 创建OpenGL ES程序可执行文件
        glLinkProgram(program);

        //告诉OpenGL ES使用此program
        glUseProgram(program);

        return program;
    }

}

要在 GLSurfaceView 中显示 YUV 数据,你需要将 YUV 数据转换为 RGB 数据,然后将 RGB 数据传递给 OpenGL ES,最后在 GLSurfaceView 中显示。这个过程可以通过 JNI 来完成。 以下是一个简单的示例代码: 1. Java 代码: ``` public class YuvRenderer implements GLSurfaceView.Renderer { private static final String TAG = "YuvRenderer"; private int mTextureId; private int mProgram; private int mPositionHandle; private int mTexCoordHandle; private int mYuvWidth; private int mYuvHeight; private ByteBuffer mYuvBuffer; public YuvRenderer() { mYuvWidth = 0; mYuvHeight = 0; mYuvBuffer = null; } public void setYuvData(int width, int height, byte[] yuvData) { mYuvWidth = width; mYuvHeight = height; mYuvBuffer = ByteBuffer.wrap(yuvData); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glDisable(GLES20.GL_DEPTH_TEST); mProgram = createProgram(); mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); int textureUniformHandle = GLES20.glGetUniformLocation(mProgram, "uTexture"); int[] textureIds = new int[1]; GLES20.glGenTextures(1, textureIds, 0); mTextureId = textureIds[0]; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mYuvWidth / 2, mYuvHeight / 2, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); GLES20.glUseProgram(mProgram); GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, createVertexBuffer()); GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, createTexCoordBuffer()); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLES20.glUniform1i(textureUniformHandle, 0); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); } @Override public void onDrawFrame(GL10 gl) { if (mYuvBuffer == null) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); byte[] yuvData = mYuvBuffer.array(); int[] rgbData = new int[mYuvWidth * mYuvHeight]; YuvUtils.convertYUV420ToRGB8888(yuvData, rgbData, mYuvWidth, mYuvHeight); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mYuvWidth / 2, mYuvHeight / 2, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(rgbData)); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } private int createProgram() { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_CODE); int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_CODE); int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); return program; } private int loadShader(int shaderType, String shaderCode) { int shader = GLES20.glCreateShader(shaderType); GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); return shader; } private FloatBuffer createVertexBuffer() { float[] vertexData = new float[] { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertexData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(vertexData); buffer.position(0); return buffer; } private FloatBuffer createTexCoordBuffer() { float[] texCoordData = new float[] { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(texCoordData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(texCoordData); buffer.position(0); return buffer; } private static final String VERTEX_SHADER_CODE = "attribute vec4 aPosition;\n" + "attribute vec2 aTexCoord;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTexCoord = aTexCoord;\n" + "}"; private static final String FRAGMENT_SHADER_CODE = "precision mediump float;\n" + "uniform sampler2D uTexture;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_FragColor = texture2D(uTexture, vTexCoord);\n" + "}"; } ``` 2. JNI 代码: ``` JNIEXPORT void JNICALL Java_com_example_yuvrenderer_YuvRenderer_setYuvData(JNIEnv *env, jobject obj, jint width, jint height, jbyteArray yuvData) { jclass clazz = env->GetObjectClass(obj); jfieldID yuvWidthField = env->GetFieldID(clazz, "mYuvWidth", "I"); jint yuvWidth = env->GetIntField(obj, yuvWidthField); jfieldID yuvHeightField = env->GetFieldID(clazz, "mYuvHeight", "I"); jint yuvHeight = env->GetIntField(obj, yuvHeightField); jbyte* yuvDataPtr = env->GetByteArrayElements(yuvData, NULL); jsize yuvDataSize = env->GetArrayLength(yuvData); if (yuvWidth != width || yuvHeight != height) { env->SetIntField(obj, yuvWidthField, width); env->SetIntField(obj, yuvHeightField, height); jclass byteBufferClazz = env->FindClass("java/nio/ByteBuffer"); jmethodID allocateDirectMethod = env->GetStaticMethodID(byteBufferClazz, "allocateDirect", "(I)Ljava/nio/ByteBuffer;"); jobject yuvBuffer = env->CallStaticObjectMethod(byteBufferClazz, allocateDirectMethod, yuvDataSize); env->SetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;"), yuvBuffer); } jobject yuvBuffer = env->GetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;")); env->GetDirectBufferAddress(yuvBuffer); memcpy(yuvBufferPtr, yuvDataPtr, yuvDataSize); env->ReleaseByteArrayElements(yuvData, yuvDataPtr, JNI_ABORT); } ``` 这个示例代码中假设 YUV 数据是 NV21 格式的,你需要根据你的 YUV 数据格式进行相应的修改。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值