全网首发:安卓GLSurfaceView绘制显示YUV(NV21)

  前几天遇到一个问题:得到摄像头数据后,如何能够直接显示?

吾已解决。需要的朋友可下载吾开源项目:

GitHub - quantum6/Quantum6-CameraGL-Android

  • 最简单的办法,就是转换为RGB。这个有现在的代码,吾亦提供了一个:

摄像头NV21格式转RGB的JAVA代码,测试正确_nv21torgba_柳鲲鹏的博客-CSDN博客

  性能较差。有的手机会特别慢。

  • 不转换,直接显示。NATIVE方式。

网上有人提供了NATIVE方式。吾本来要试一下,一看要装CYGWIN,放弃。

  • 使用GLSurfaceView。

为什么使用这个?因为在doubango中,就是这样做的。具体步骤:

  1. 配置

在AndroidManifest.xml中增加一句:

<uses-feature    android:glEsVersion="0x00020000" android:required="false"  />
  1. 格式转换

NV21转换为YUV420SP。

https://quantum6.blog.csdn.net/article/details/105744636

  1. 继承GlSurfaceView

package net.quantum6.kit;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.SurfaceHolder;


/**
 * OpenGL Surface view
 */
public class VideoRendererView extends GLSurfaceView implements GLSurfaceView.Renderer
{
    private final static String TAG = VideoRendererView.class.getCanonicalName();
    
    int mBufferWidthY, mBufferHeightY,  mBufferWidthUV, mBufferHeightUV;
    ByteBuffer mBuffer;
    int mBufferPositionY, mBufferPositionU, mBufferPositionV;
    
    private static final int FLOAT_SIZE_BYTES = 4;
    private static final int SHORT_SIZE_BYTES = 2;
    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET   = 0;
    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET    = 3;
    private final float[] TRIANFLE_VERTICES_DATA = {
             1, -1, 0, 1, 1,
             1,  1, 0, 1, 0,
            -1,  1, 0, 0, 0,
            -1, -1, 0, 0, 1
         };
    private final short[] INDICES_DATA = {
            0, 1, 2,
            2, 3, 0};

    private FloatBuffer mTriangleVertices;
    private ShortBuffer mIndices;

    private static final String VERTEX_SHADER_SOURCE =
        "attribute vec4 aPosition;\n" +
        "attribute vec2 aTextureCoord;\n" +
        "varying vec2 vTextureCoord;\n" +
        "void main() {\n" +
        "  gl_Position = aPosition;\n" +
        "  vTextureCoord = aTextureCoord;\n" +
        "}\n";

    private static final String FRAGMENT_SHADER_SOURCE = "precision mediump float;" +
    "varying vec2 vTextureCoord;" +
    "" +
    "uniform sampler2D SamplerY; " +
    "uniform sampler2D SamplerU;" +
    "uniform sampler2D SamplerV;" +
    "" +
    "const mat3 yuv2rgb = mat3(1, 0, 1.2802,1, -0.214821, -0.380589,1, 2.127982, 0);" +
    "" +
    "void main() {    " +
    "    vec3 yuv = vec3(1.1643 * (texture2D(SamplerY, vTextureCoord).r - 0.0625)," +
    "                    texture2D(SamplerU, vTextureCoord).r - 0.5," +
    "                    texture2D(SamplerV, vTextureCoord).r - 0.5);" +
    "    vec3 rgb = yuv * yuv2rgb;    " +
    "    gl_FragColor = vec4(rgb, 1.0);" +
    "} ";

    private int mProgram;
    private int maPositionHandle;
    private int maTextureHandle;
    private int muSamplerYHandle;
    private int muSamplerUHandle;
    private int muSamplerVHandle;
    private int[] mTextureY = new int[1];
    private int[] mTextureU = new int[1];
    private int[] mTextureV = new int[1];

    private boolean mSurfaceCreated;
    private boolean mSurfaceDestroyed;
    @SuppressWarnings("unused")
    private Context mContext;
    
    private int mViewWidth, mViewHeight, mViewX, mViewY;
    private boolean mFullScreenRequired;
    
    public VideoRendererView(Context context) {
        super(context);
        setEGLContextClientVersion(2);
        setEGLConfigChooser(8, 8, 8, 8, 16, 0);    
        setRenderer(this);
        getHolder().setFormat(PixelFormat.TRANSLUCENT);
        getHolder().setType(SurfaceHolder.SURFACE_TYPE_GPU);
        setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
  
        mContext = context;
        
        mTriangleVertices = ByteBuffer.allocateDirect(TRIANFLE_VERTICES_DATA.length
                * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
        mTriangleVertices.put(TRIANFLE_VERTICES_DATA).position(0);
        
        mIndices = ByteBuffer.allocateDirect(INDICES_DATA.length
                * SHORT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asShortBuffer();
        mIndices.put(INDICES_DATA).position(0);
    }
    
    public void setParams(boolean fullScreenRequired, ByteBuffer buffer, int bufferWidth, int bufferHeight, int fps)
    {
        mFullScreenRequired = fullScreenRequired;

        setBuffer(buffer, bufferWidth, bufferHeight);
    }

    public void setBuffer(ByteBuffer buffer, int bufferWidth, int bufferHeight){
        mValidDataList.clear();
        mEmptyDataList.clear();
        
        mBuffer          = buffer;
        mBufferWidthY    = bufferWidth;
        mBufferHeightY   = bufferHeight;
        
        mBufferWidthUV   = (mBufferWidthY >> 1);
        mBufferHeightUV  = (mBufferHeightY >> 1);
        
        mBufferPositionY = 0;
        mBufferPositionU = (mBufferWidthY * mBufferHeightY);
        mBufferPositionV = (mBufferPositionU + (mBufferWidthUV * mBufferHeightUV));
    }

    public boolean isReady(){
        return (mSurfaceCreated && !mSurfaceDestroyed);
    }
    
    public boolean isDestroyed(){
        return mSurfaceDestroyed;
    }
    
    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        mSurfaceCreated   = false;
        mSurfaceDestroyed = true;
        super.surfaceDestroyed(holder);
    }

    @Override
    public void onDrawFrame(GL10 glUnused) {
        GLES20.glViewport(mViewX, mViewY, mViewWidth, mViewHeight);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        GLES20.glUseProgram(mProgram);
        checkGlError("glUseProgram");

        if (mValidDataList.size() < BUFFER_COUNT_MIN)
        {
            return;
        }
        if (mBuffer == null)
        {
            mBuffer = ByteBuffer.allocateDirect(mBufferWidthY*mBufferHeightY*3/2);
        }
        byte[] newData = mValidDataList.get(0);
        mBuffer.rewind();
        mBuffer.put(newData);
        
        if(mBuffer != null){
            synchronized(this){                
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureY[0]);
                GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthY, mBufferHeightY, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionY));
                GLES20.glUniform1i(muSamplerYHandle, 0);
                
                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureU[0]);
                GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthUV, mBufferHeightUV, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionU));
                GLES20.glUniform1i(muSamplerUHandle, 1);
                
                GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
                GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureV[0]);
                GLES20.glTexImage2D(   GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mBufferWidthUV, mBufferHeightUV, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mBuffer.position(mBufferPositionV));
                GLES20.glUniform1i(muSamplerVHandle, 2);
            }
        }
        
        GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES_DATA.length, GLES20.GL_UNSIGNED_SHORT, mIndices);
        
        this.mEmptyDataList.add(newData);
    }

    public void onSurfaceChanged(GL10 glUnused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
        setViewport(width, height);
        // GLU.gluPerspective(glUnused, 45.0f, (float)width/(float)height, 0.1f, 100.0f);
    }

    public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
        GLES20.glEnable( GLES20.GL_BLEND);
        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
        GLES20.glDisable(GLES20.GL_DITHER);
        GLES20.glDisable(GLES20.GL_STENCIL_TEST);
        GLES20.glDisable(GL10.GL_DITHER);
        
        String extensions = GLES20.glGetString(GL10.GL_EXTENSIONS);
        Log.d(TAG, "OpenGL extensions=" +extensions);

        // Ignore the passed-in GL10 interface, and use the GLES20
        // class static methods instead.
        mProgram = createProgram(VERTEX_SHADER_SOURCE, FRAGMENT_SHADER_SOURCE);
        if (mProgram == 0) {
            return;
        }
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
        checkGlError("glGetAttribLocation aPosition");
        if (maPositionHandle == -1) {
            throw new RuntimeException("Could not get attrib location for aPosition");
        }
        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
        checkGlError("glGetAttribLocation aTextureCoord");
        if (maTextureHandle == -1) {
            throw new RuntimeException("Could not get attrib location for aTextureCoord");
        }
        
        muSamplerYHandle = GLES20.glGetUniformLocation(mProgram, "SamplerY");
        if (muSamplerYHandle == -1) {
            throw new RuntimeException("Could not get uniform location for SamplerY");
        }
        muSamplerUHandle = GLES20.glGetUniformLocation(mProgram, "SamplerU");
        if (muSamplerUHandle == -1) {
            throw new RuntimeException("Could not get uniform location for SamplerU");
        }
        muSamplerVHandle = GLES20.glGetUniformLocation(mProgram, "SamplerV");
        if (muSamplerVHandle == -1) {
            throw new RuntimeException("Could not get uniform location for SamplerV");
        }
        
        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
        checkGlError("glVertexAttribPointer maPosition");
        
        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        checkGlError("glEnableVertexAttribArray maPositionHandle");
        GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
        checkGlError("glVertexAttribPointer maTextureHandle");
        GLES20.glEnableVertexAttribArray(maTextureHandle);
        checkGlError("glEnableVertexAttribArray maTextureHandle");

        GLES20.glGenTextures(1, mTextureY, 0);
        GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureY[0]);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);
        
        GLES20.glGenTextures(1, mTextureU, 0);
        GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureU[0]);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);
        
        GLES20.glGenTextures(1, mTextureV, 0);
        GLES20.glBindTexture(  GLES20.GL_TEXTURE_2D, mTextureV[0]);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,     GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,     GLES20.GL_CLAMP_TO_EDGE);
        
        mSurfaceCreated = true;
        
        setViewport(getWidth(), getHeight());
    }

    private int loadShader(int shaderType, String source) {
        int shader = GLES20.glCreateShader(shaderType);
        if (shader != 0) {
            GLES20.glShaderSource(shader, source);
            GLES20.glCompileShader(shader);
            int[] compiled = new int[1];
            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
            if (compiled[0] == 0) {
                Log.e(TAG, "Could not compile shader " + shaderType + ":");
                Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
                GLES20.glDeleteShader(shader);
                shader = 0;
            }
        }
        return shader;
    }

    private int createProgram(String vertexSource, String fragmentSource) {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
        if (vertexShader == 0) {
            return 0;
        }

        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
        if (pixelShader == 0) {
            return 0;
        }

        int program = GLES20.glCreateProgram();
        if (program != 0) {
            GLES20.glAttachShader(program, vertexShader);
            checkGlError("glAttachShader");
            GLES20.glAttachShader(program, pixelShader);
            checkGlError("glAttachShader");
            GLES20.glLinkProgram(program);
            int[] linkStatus = new int[1];
            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
            if (linkStatus[0] != GLES20.GL_TRUE) {
                Log.e(TAG, "Could not link program: ");
                Log.e(TAG, GLES20.glGetProgramInfoLog(program));
                GLES20.glDeleteProgram(program);
                program = 0;
            }
        }
        return program;
    }
    
    private void setViewport(int width, int height){
        if(mFullScreenRequired){
            mViewWidth = width;
            mViewHeight = height;
            mViewX = mViewY = 0;
        }
        else{
            float fRatio = ((float) mBufferWidthY / (float) mBufferHeightY);
            mViewWidth = (int) ((float) width / fRatio) > height ? (int) ((float) height * fRatio) : width;
            mViewHeight = (int) (mViewWidth / fRatio) > height ? height : (int) (mViewWidth / fRatio);
            mViewX = ((width - mViewWidth) >> 1);
            mViewY = ((height - mViewHeight) >> 1);
        }
    }

    private void checkGlError(String op) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e(TAG, op + ": glError " + error);
            throw new RuntimeException(op + ": glError " + error);
        }
    }
    
    private final static int BUFFER_COUNT_MIN   = 2;
    
    private List<byte[]>  mValidDataList = Collections.synchronizedList(new LinkedList<byte[]>());
    private List<byte[]>  mEmptyDataList = Collections.synchronizedList(new LinkedList<byte[]>());
    
    public void newDataArrived(final byte[] data)
    {
        byte[] newData;
        if (mEmptyDataList.size() > 0)
        {
            newData = mEmptyDataList.remove(0);
        }
        else
        {
            newData = new byte[data.length];
        }
        System.arraycopy(data, 0, newData, 0, data.length);
        mValidDataList.add(newData);
        this.requestRender();
    }

}

  • 3
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
要在 GLSurfaceView显示 YUV 数据,你需要将 YUV 数据转换为 RGB 数据,然后将 RGB 数据传递给 OpenGL ES,最后在 GLSurfaceView显示。这个过程可以通过 JNI 来完成。 以下是一个简单的示例代码: 1. Java 代码: ``` public class YuvRenderer implements GLSurfaceView.Renderer { private static final String TAG = "YuvRenderer"; private int mTextureId; private int mProgram; private int mPositionHandle; private int mTexCoordHandle; private int mYuvWidth; private int mYuvHeight; private ByteBuffer mYuvBuffer; public YuvRenderer() { mYuvWidth = 0; mYuvHeight = 0; mYuvBuffer = null; } public void setYuvData(int width, int height, byte[] yuvData) { mYuvWidth = width; mYuvHeight = height; mYuvBuffer = ByteBuffer.wrap(yuvData); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glDisable(GLES20.GL_DEPTH_TEST); mProgram = createProgram(); mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); int textureUniformHandle = GLES20.glGetUniformLocation(mProgram, "uTexture"); int[] textureIds = new int[1]; GLES20.glGenTextures(1, textureIds, 0); mTextureId = textureIds[0]; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mYuvWidth / 2, mYuvHeight / 2, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); GLES20.glUseProgram(mProgram); GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, createVertexBuffer()); GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, createTexCoordBuffer()); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLES20.glUniform1i(textureUniformHandle, 0); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); } @Override public void onDrawFrame(GL10 gl) { if (mYuvBuffer == null) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); byte[] yuvData = mYuvBuffer.array(); int[] rgbData = new int[mYuvWidth * mYuvHeight]; YuvUtils.convertYUV420ToRGB8888(yuvData, rgbData, mYuvWidth, mYuvHeight); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mYuvWidth / 2, mYuvHeight / 2, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(rgbData)); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } private int createProgram() { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_CODE); int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_CODE); int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); return program; } private int loadShader(int shaderType, String shaderCode) { int shader = GLES20.glCreateShader(shaderType); GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); return shader; } private FloatBuffer createVertexBuffer() { float[] vertexData = new float[] { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertexData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(vertexData); buffer.position(0); return buffer; } private FloatBuffer createTexCoordBuffer() { float[] texCoordData = new float[] { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(texCoordData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(texCoordData); buffer.position(0); return buffer; } private static final String VERTEX_SHADER_CODE = "attribute vec4 aPosition;\n" + "attribute vec2 aTexCoord;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTexCoord = aTexCoord;\n" + "}"; private static final String FRAGMENT_SHADER_CODE = "precision mediump float;\n" + "uniform sampler2D uTexture;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_FragColor = texture2D(uTexture, vTexCoord);\n" + "}"; } ``` 2. JNI 代码: ``` JNIEXPORT void JNICALL Java_com_example_yuvrenderer_YuvRenderer_setYuvData(JNIEnv *env, jobject obj, jint width, jint height, jbyteArray yuvData) { jclass clazz = env->GetObjectClass(obj); jfieldID yuvWidthField = env->GetFieldID(clazz, "mYuvWidth", "I"); jint yuvWidth = env->GetIntField(obj, yuvWidthField); jfieldID yuvHeightField = env->GetFieldID(clazz, "mYuvHeight", "I"); jint yuvHeight = env->GetIntField(obj, yuvHeightField); jbyte* yuvDataPtr = env->GetByteArrayElements(yuvData, NULL); jsize yuvDataSize = env->GetArrayLength(yuvData); if (yuvWidth != width || yuvHeight != height) { env->SetIntField(obj, yuvWidthField, width); env->SetIntField(obj, yuvHeightField, height); jclass byteBufferClazz = env->FindClass("java/nio/ByteBuffer"); jmethodID allocateDirectMethod = env->GetStaticMethodID(byteBufferClazz, "allocateDirect", "(I)Ljava/nio/ByteBuffer;"); jobject yuvBuffer = env->CallStaticObjectMethod(byteBufferClazz, allocateDirectMethod, yuvDataSize); env->SetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;"), yuvBuffer); } jobject yuvBuffer = env->GetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;")); env->GetDirectBufferAddress(yuvBuffer); memcpy(yuvBufferPtr, yuvDataPtr, yuvDataSize); env->ReleaseByteArrayElements(yuvData, yuvDataPtr, JNI_ABORT); } ``` 这个示例代码中假设 YUV 数据是 NV21 格式的,你需要根据你的 YUV 数据格式进行相应的修改。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

柳鲲鹏

能给阁下一点帮助,非常荣幸

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值