Android通过OpenGLes渲染YUV视频数据

在Android上用OpenGLES来显示YUV视频数据

1、首先我先说一下这个解决方案是怎么运行的,给大家一个概念

  • 1.1.显示在哪 -> GLSurfaceVIew

  • 1.2.谁来把数据贴到GLSurfaceVIew上 -> Renderer

  • 1.3.谁来负责YUV数据转换成RGB -> GL中的Program/Shader

一句话说明白就是:GL的Program/Shader把用户传过来的YUV数据,转换成RGB数据后,通过Renderer贴在GLSurfaceView上。

2、OpenGL ES渲染YUV数据

2.1、实现OpenGL ES

实现OpenGL ES的工具类-JfShaderUtil.java

public class JfShaderUtil {
    public static String readRawTxt(Context context, int rawId) {
        InputStream inputStream = context.getResources().openRawResource(rawId);
        BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
        StringBuffer sb = new StringBuffer();
        String line;
        try
        {
            while((line = reader.readLine()) != null)
            {
                sb.append(line).append("\n");
            }
            reader.close();
        }
        catch (Exception e)
        {
            e.printStackTrace();
        }
        return sb.toString();
    }

    public static int loadShader(int shaderType, String source)
    {
        int shader = GLES20.glCreateShader(shaderType);
        if(shader != 0)
        {
            GLES20.glShaderSource(shader, source);
            GLES20.glCompileShader(shader);
            int[] compile = new int[1];
            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0);
            if(compile[0] != GLES20.GL_TRUE)
            {
                JfLog.e("","shader compile error");
                GLES20.glDeleteShader(shader);
                shader = 0;
            }
        }
        return shader;
    }

    public static int createProgram(String vertexSource, String fragmentSource)
    {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
        if(vertexShader == 0)
        {
            return 0;
        }
        int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
        if(fragmentShader == 0)
        {
            return 0;
        }
        int program = GLES20.glCreateProgram();
        if(program != 0)
        {
            GLES20.glAttachShader(program, vertexShader);
            GLES20.glAttachShader(program, fragmentShader);
            GLES20.glLinkProgram(program);
            int[] linsStatus = new int[1];
            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0);
            if(linsStatus[0] != GLES20.GL_TRUE)
            {
                JfLog.e( "","link program error");
                GLES20.glDeleteProgram(program);
                program = 0;
            }
        }
        return  program;
    }
}

实现JfRender

public class JfRender implements GLSurfaceView.Renderer {
    private Context context;

    private final float[] vertexData ={//顶点坐标

            -1f, -1f,
            1f, -1f,
            -1f, 1f,
            1f, 1f

    };

    private final float[] textureData ={//纹理坐标
            0f,1f,
            1f, 1f,
            0f, 0f,
            1f, 0f
    };

    private FloatBuffer vertexBuffer;
    private FloatBuffer textureBuffer;
    private int program_yuv;
    private int avPosition_yuv;
    private int afPosition_yuv;

    private int sampler_y;
    private int sampler_u;
    private int sampler_v;
    private int[] textureId_yuv;

    //渲染用
    private int width_yuv;
    private int height_yuv;
    private ByteBuffer y;
    private ByteBuffer u;
    private ByteBuffer v;

    public JfRender(Context context){
        this.context = context;
        //存储顶点坐标数据
        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        //存储纹理坐标
        textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(textureData);
        textureBuffer.position(0);
    }
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        initRenderYUV();
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        //用黑色清屏
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
        renderYUV();
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    }
    /**
     * 初始化
     */
    private void initRenderYUV(){
        String vertexSource = JfShaderUtil.readRawTxt(context, R.raw.vertex_shader);
        String fragmentSource = JfShaderUtil.readRawTxt(context,R.raw.fragment_shader);
        //创建一个渲染程序
        program_yuv = JfShaderUtil.createProgram(vertexSource,fragmentSource);

        //得到着色器中的属性
        avPosition_yuv = GLES20.glGetAttribLocation(program_yuv,"av_Position");
        afPosition_yuv = GLES20.glGetAttribLocation(program_yuv,"af_Position");



        sampler_y = GLES20.glGetUniformLocation(program_yuv, "sampler_y");
        sampler_u = GLES20.glGetUniformLocation(program_yuv, "sampler_u");
        sampler_v = GLES20.glGetUniformLocation(program_yuv, "sampler_v");

        //创建纹理
        textureId_yuv = new int[3];
        GLES20.glGenTextures(3, textureId_yuv, 0);

        for(int i = 0; i < 3; i++)
        {
            //绑定纹理
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[i]);
            //设置环绕和过滤方式
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        }
        JfLog.d("initRenderYUV");
    }

    public void setYUVRenderData(int width, int height, byte[] y, byte[] u, byte[] v)
    {
        this.width_yuv = width;
        this.height_yuv = height;
        this.y = ByteBuffer.wrap(y);
        this.u = ByteBuffer.wrap(u);
        this.v = ByteBuffer.wrap(v);
    }

    /**
     * 渲染
     */
    private void renderYUV(){
        JfLog.d("渲染中");
        if(width_yuv > 0 && height_yuv > 0 && y != null && u != null && v != null){
            GLES20.glUseProgram(program_yuv);//使用源程序

            GLES20.glEnableVertexAttribArray(avPosition_yuv);//使顶点属性数组有效
            GLES20.glVertexAttribPointer(avPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);//为顶点属性赋值

            GLES20.glEnableVertexAttribArray(afPosition_yuv);
            GLES20.glVertexAttribPointer(afPosition_yuv, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);//激活纹理
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[0]);//绑定纹理
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv, height_yuv, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);//

            GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[1]);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId_yuv[2]);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width_yuv / 2, height_yuv / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);

            GLES20.glUniform1i(sampler_y, 0);
            GLES20.glUniform1i(sampler_u, 1);
            GLES20.glUniform1i(sampler_v, 2);

            y.clear();
            u.clear();
            v.clear();
            y = null;
            u = null;
            v = null;

            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        }
    }
}

实现JfGLSurfaceView

public class JfGLSurfaceView extends GLSurfaceView {

    private JfRender jfRender;
    public JfGLSurfaceView(Context context) {
        this(context,null);
    }

    public JfGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setEGLContextClientVersion(2);
        jfRender = new JfRender(context);
        setRenderer(jfRender);
        setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);//requestRender()时不会重复渲染
    }
    /**
     *
     * @param width
     * @param height
     * @param y
     * @param u
     * @param v
     */
    public void setYUVData(int width,int height,byte[] y,byte[] u,byte[] v){
        if (jfRender != null) {
            jfRender.setYUVRenderData(width, height, y, u, v);
            requestRender();
        }
    }
    public native int getYuvMediaData();
}

JfGLSurfaceView中的getYuvMediaData()本地方法就是获取YUV数据的;我们看下jni部分是怎么写的

extern "C" JNIEXPORT jint
JNICALL
Java_com_afscope_sloptoelectronic_JfGLSurfaceView_getYuvMediaData(
        JNIEnv *env,
        jobject jobject1/* this */) {
    jclass jclazz = env->FindClass("com/afscope/sloptoelectronic/JfGLSurfaceView");

    jmethodID jmethod = env->GetMethodID(jclazz, "setYUVData", "(II[B[B[B)V");

//    jobject jobject1 = env->AllocObject(jclazz);
    l_md_data_t *p_res = NULL;
    int i = l_sdk_dec_get_md_data(100, &p_res);
    __android_log_print(ANDROID_LOG_INFO, "native", "l_sdk_dec_get_md_data=%d", i);
    if (i == 0) {
        jbyteArray y = env->NewByteArray(p_res->w * p_res->h);
        env->SetByteArrayRegion(y, 0, p_res->w * p_res->h, reinterpret_cast<const jbyte *>(p_res->p_y));
        jbyteArray u = env->NewByteArray(p_res->w * p_res->h / 4);
        env->SetByteArrayRegion(u, 0, p_res->w * p_res->h / 4, reinterpret_cast<const jbyte *>(p_res->p_u));
        jbyteArray v = env->NewByteArray(p_res->w * p_res->h / 4);
        env->SetByteArrayRegion(v, 0, p_res->w * p_res->h / 4, reinterpret_cast<const jbyte *>(p_res->p_v));

        env->CallVoidMethod(jobject1, jmethod, p_res->w, p_res->h, y, u, v);
        env->DeleteLocalRef(y);
        env->DeleteLocalRef(u);
        env->DeleteLocalRef(v);

    }
    l_sdk_dec_free_md_data(p_res);
    return i;
}

获取YUV数据后在回调java的setYUVdata()方法,赋值后进行render就可以显示实时视频了;

 

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值