android glSurfaceview 底层创建EGL渲染环境

之前 android studio搭建简单jni层的opengl开发框架讲到的是在上层render的三个回调函数中写jni函数,从而在jni层调用opengl的绘制,但是在做播放器的时候,需要用opengl来渲染 每一帧,而底层解码后,再渲染,将渲染放在底层,这样播放器的整个框架都在底层,上层只是UI的一些显示操作,而且 如果在render的onFrame中直接来显示帧,在暂停或者seek的时候由于 glsurfaceview的双缓冲机制,容易导致画面2帧之间来回跳动,不好控制。

上层g.lsurfaceview部分

上层继承glSurfaceView,实现callback接口 和 renderer接口

package com.example.opengltest;

import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class LGlsurfaceView extends GLSurfaceView implements SurfaceHolder.Callback, GLSurfaceView.Renderer {//

    static {
        System.loadLibrary("lammyVideoPlayer");
    }
    public LGlsurfaceView(Context context) {
        super(context);
        setRenderer(this);
    }


    public void surfaceCreated(SurfaceHolder holder){

        initEgl(holder.getSurface());
    }


    public void surfaceChanged(SurfaceHolder holder, int format, int width,
                               int height){

    }

    public void surfaceDestroyed(SurfaceHolder holder){
        closeEgl();
    }


    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
    }

    @Override
    public void onDrawFrame(GL10 gl) {
    }
    

    native private  void initEgl(Surface surface);
    native private  void closeEgl();


}

initEgl是底层实现 EGL环境的搭建

closeEgl是关闭底层EGL渲染环境

EGL创建和关闭的类和源文件

//
// Created by lammy on 2019/4/20.
//

#ifndef LAMMYOPENGLFFMPEGVIDEOPLAYER_2_LEGL_H
#define LAMMYOPENGLFFMPEGVIDEOPLAYER_2_LEGL_H


#include <mutex>
#include <EGL/egl.h>


class LEGL {
public:
    EGLDisplay display = EGL_NO_DISPLAY;
    EGLSurface surface = EGL_NO_SURFACE;
    EGLContext context = EGL_NO_CONTEXT;
    std::mutex mux;

public:
     bool Init(void *win);
     void Close();
     void Draw();
     static LEGL *Get();

};


#endif //LAMMYOPENGLFFMPEGVIDEOPLAYER_2_LEGL_H
//
// Created by lammy on 2019/4/20.
//

#include <Log.h>
#include <android/native_window_jni.h>
#include <ggl.h>
#include "LEGL.h"

void LEGL::Draw()
{
    mux.lock();
    if(display == EGL_NO_DISPLAY || surface == EGL_NO_SURFACE)
    {
        LOGE("draw..EGL_NO_DISPLAY.....................................................1111111111111111.");
        mux.unlock();
        return;
    }
    glFinish();
    bool re = eglSwapBuffers(display,surface);

   if(re == EGL_FALSE)
   {
       LOGE("draw failed ......");
   }
    LOGE("draw success ......");

    mux.unlock();
}
 void LEGL::Close()
{
    mux.lock();
    if(display == EGL_NO_DISPLAY)
    {
        mux.unlock();
        return;
    }
    eglMakeCurrent(display,EGL_NO_SURFACE,EGL_NO_SURFACE,EGL_NO_CONTEXT);

    if(surface != EGL_NO_SURFACE)
        eglDestroySurface(display,surface);
    if(context != EGL_NO_CONTEXT)
        eglDestroyContext(display,context);

    eglTerminate(display);

    display = EGL_NO_DISPLAY;
    surface = EGL_NO_SURFACE;
    context = EGL_NO_CONTEXT;
    mux.unlock();
}



bool LEGL::Init(void *win)
{

    Close();
    //初始化EGL
    mux.lock();
    /*************************EGL环境创建*********************/
    //1、 display 获得显示的对象
    display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if(display == EGL_NO_DISPLAY)
    {
        LOGE("display failed");
        mux.unlock();
        return false;
    }
    //1.2初始化,第二 和 三个参数,是主版本号,次版本号 EGL版本号
    if(EGL_TRUE != eglInitialize(display, 0 , 0))
    {
        LOGE("eglInitialize failed");
        mux.unlock();
        return false;

    }
    //2、surface
    //2.1surface窗口配置
    EGLConfig  eglConfig = 0;// 输出config项
    EGLint configNum = 0;//输出config数量
    EGLint configSpec[]={
            EGL_RED_SIZE,8,
            EGL_GREEN_SIZE,8,
            EGL_BLUE_SIZE,8,
            EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
            EGL_NONE// 结尾符
    };
    if(EGL_TRUE != eglChooseConfig(display, configSpec, &eglConfig,1,&configNum))
    {
        LOGE("eglChooseConfig failed");
        mux.unlock();
        return false;
    }
  // 创建 surface
    surface = eglCreateWindowSurface(display, eglConfig, ( ANativeWindow *)win,0 );// 最后一个是版本信息,这里是默认的
    if(surface == EGL_NO_SURFACE){
        LOGE("eglCreateWindowSurface failed");
        mux.unlock();
        return false;
    }

    // 创建关联上下文 EGL_NO_CONTEXT 这个参数是 多个设备共享 上下文时候,传入context,这里用不到,传EGL_NO_CONTEXT
    const EGLint ctxATTR[]{
            EGL_CONTEXT_CLIENT_VERSION, 2 , EGL_NONE
    };
     context = eglCreateContext(display, eglConfig, EGL_NO_CONTEXT,ctxATTR);

    if(context == EGL_NO_CONTEXT)
    {
        LOGE("eglCreateContext failed");
        mux.unlock();
        return false;
    }
    // 将上下文,和 surface 和 display 关联。 并且在当前线程中启动当前的渲染环境
    if(EGL_TRUE != eglMakeCurrent(display, surface, surface,context))// 2、3 2个surface 一个读 一个写,双缓冲
    {
        LOGE("eglMakeCurrent failed");
        mux.unlock();
        return false;
    }
    LOGW("RGL init success");
    mux.unlock();
    return true;
}


LEGL *LEGL::Get()
{
    static LEGL egl;
    return &egl;
}

根据上层传入的surfaceview 创建nativeWindow,然后利用window创建:

 EGLDisplay display = EGL_NO_DISPLAY;
 EGLSurface surface = EGL_NO_SURFACE;
 EGLContext context = EGL_NO_CONTEXT;

然后创建末端启用创建的环境,即将context 和 surface 和 display结合起来。

调用EGL

在surfaceview Surface创建后,初始化 opengl的shader 创建program 绘制,绘制完毕后,调用eglSwapBuffers(display,surface); 来将opengl绘制的纹理渲染到 窗口。在上面的EGL中已经封装在Draw函数内部了。

node:渲染的线程 必须 与 EGL的创建,即init必须在同一个线程,否则就会绘制失效。这里主要是因为:eglMakeCurrent(display, surface, surface,context)在里面调用了,当然可以在别的线程先初始化好 context 、display 和context ,然后再渲染线程中 绘制前就调用 eglMakeCurrent。

JNI层调用例子:

bool isExit = false;
 void test(ANativeWindow *win)
{
//    LEGL::Get()->Init(win);
     if(EGL_TRUE != eglMakeCurrent(LEGL::Get()->display, LEGL::Get()->surface,
 LEGL::Get()->surface,LEGL::Get()->context))// 2、3 2个surface 一个读 一个写,双缓冲
     {
         LOGE("eglMakeCurrent failed");
         return;
     }
    glProgram = new GLProgram();
    glProgram->init();
     while(!isExit){
         LOGE("draw ing.............");

         glProgram->Draw();
         LEGL::Get()->Draw();

     }
    LOGE("draw close.............");

}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_opengltest_LGlsurfaceView_initEgl(JNIEnv *env, jobject instance, jobject surface)
{

    // TODO
    LOGE("initEgl.............");
    isExit = false;
    ANativeWindow *win = ANativeWindow_fromSurface(env,surface);
    LEGL::Get()->Init(win);
    std::thread a(test,win);
    a.detach();


}

如果不在渲染前 调用eglMakeCurrent,是无法绘制出来的。

下面给出我的demo,欢迎各位老铁下载。

要在 GLSurfaceView 中显示 YUV 数据,你需要将 YUV 数据转换为 RGB 数据,然后将 RGB 数据传递给 OpenGL ES,最后在 GLSurfaceView 中显示。这个过程可以通过 JNI 来完成。 以下是一个简单的示例代码: 1. Java 代码: ``` public class YuvRenderer implements GLSurfaceView.Renderer { private static final String TAG = "YuvRenderer"; private int mTextureId; private int mProgram; private int mPositionHandle; private int mTexCoordHandle; private int mYuvWidth; private int mYuvHeight; private ByteBuffer mYuvBuffer; public YuvRenderer() { mYuvWidth = 0; mYuvHeight = 0; mYuvBuffer = null; } public void setYuvData(int width, int height, byte[] yuvData) { mYuvWidth = width; mYuvHeight = height; mYuvBuffer = ByteBuffer.wrap(yuvData); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glDisable(GLES20.GL_DEPTH_TEST); mProgram = createProgram(); mPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "aTexCoord"); int textureUniformHandle = GLES20.glGetUniformLocation(mProgram, "uTexture"); int[] textureIds = new int[1]; GLES20.glGenTextures(1, textureIds, 0); mTextureId = textureIds[0]; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mYuvWidth / 2, mYuvHeight / 2, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); GLES20.glUseProgram(mProgram); GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, createVertexBuffer()); GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, createTexCoordBuffer()); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glEnableVertexAttribArray(mTexCoordHandle); GLES20.glUniform1i(textureUniformHandle, 0); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); } @Override public void onDrawFrame(GL10 gl) { if (mYuvBuffer == null) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); return; } GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); byte[] yuvData = mYuvBuffer.array(); int[] rgbData = new int[mYuvWidth * mYuvHeight]; YuvUtils.convertYUV420ToRGB8888(yuvData, rgbData, mYuvWidth, mYuvHeight); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId); GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mYuvWidth / 2, mYuvHeight / 2, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ByteBuffer.wrap(rgbData)); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } private int createProgram() { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_CODE); int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_CODE); int program = GLES20.glCreateProgram(); GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); return program; } private int loadShader(int shaderType, String shaderCode) { int shader = GLES20.glCreateShader(shaderType); GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); return shader; } private FloatBuffer createVertexBuffer() { float[] vertexData = new float[] { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertexData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(vertexData); buffer.position(0); return buffer; } private FloatBuffer createTexCoordBuffer() { float[] texCoordData = new float[] { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, }; ByteBuffer byteBuffer = ByteBuffer.allocateDirect(texCoordData.length * 4); byteBuffer.order(ByteOrder.nativeOrder()); FloatBuffer buffer = byteBuffer.asFloatBuffer(); buffer.put(texCoordData); buffer.position(0); return buffer; } private static final String VERTEX_SHADER_CODE = "attribute vec4 aPosition;\n" + "attribute vec2 aTexCoord;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTexCoord = aTexCoord;\n" + "}"; private static final String FRAGMENT_SHADER_CODE = "precision mediump float;\n" + "uniform sampler2D uTexture;\n" + "varying vec2 vTexCoord;\n" + "void main() {\n" + " gl_FragColor = texture2D(uTexture, vTexCoord);\n" + "}"; } ``` 2. JNI 代码: ``` JNIEXPORT void JNICALL Java_com_example_yuvrenderer_YuvRenderer_setYuvData(JNIEnv *env, jobject obj, jint width, jint height, jbyteArray yuvData) { jclass clazz = env->GetObjectClass(obj); jfieldID yuvWidthField = env->GetFieldID(clazz, "mYuvWidth", "I"); jint yuvWidth = env->GetIntField(obj, yuvWidthField); jfieldID yuvHeightField = env->GetFieldID(clazz, "mYuvHeight", "I"); jint yuvHeight = env->GetIntField(obj, yuvHeightField); jbyte* yuvDataPtr = env->GetByteArrayElements(yuvData, NULL); jsize yuvDataSize = env->GetArrayLength(yuvData); if (yuvWidth != width || yuvHeight != height) { env->SetIntField(obj, yuvWidthField, width); env->SetIntField(obj, yuvHeightField, height); jclass byteBufferClazz = env->FindClass("java/nio/ByteBuffer"); jmethodID allocateDirectMethod = env->GetStaticMethodID(byteBufferClazz, "allocateDirect", "(I)Ljava/nio/ByteBuffer;"); jobject yuvBuffer = env->CallStaticObjectMethod(byteBufferClazz, allocateDirectMethod, yuvDataSize); env->SetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;"), yuvBuffer); } jobject yuvBuffer = env->GetObjectField(obj, env->GetFieldID(clazz, "mYuvBuffer", "Ljava/nio/ByteBuffer;")); env->GetDirectBufferAddress(yuvBuffer); memcpy(yuvBufferPtr, yuvDataPtr, yuvDataSize); env->ReleaseByteArrayElements(yuvData, yuvDataPtr, JNI_ABORT); } ``` 这个示例代码中假设 YUV 数据是 NV21 格式的,你需要根据你的 YUV 数据格式进行相应的修改。
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值