Android 系统下,通过 C++ 方式调用 GPU 实现硬件解码的纹理数据转为 RGB 格式是可行的。具体实现步骤如下:
- 获取 EGLDisplay、EGLSurface 和 EGLContext。
- 创建一个 OpenGL ES 2.0 的上下文,并将其设置为当前上下文。
- 创建一个 OpenGL ES 2.0 着色器程序对象,编译和链接着色器程序。
- 创建 OpenGL ES 2.0 纹理对象,并将解码后的数据绑定到纹理对象上。
- 在着色器程序中,使用 Vertex Shader 将顶点坐标、纹理坐标传递给 Fragment Shader。
- 在着色器程序中,使用 Fragment Shader 从纹理对象中读取像素值,进行颜色转换,输出 RGB 值。
- 将渲染结果保存到帧缓冲区中。
- 从帧缓冲区中读取渲染结果,存储为 RGB 数据。
下面是一个简单的示例代码,实现了将 YUV420P 格式的视频解码为 RGB 格式并显示在屏幕上:
```cpp
include <GLES2/gl2.h>
include <EGL/egl.h>
define LOG_TAG "OpenGL"
define LOGI(...) ((void)androidlogprint(ANDROID_LOGINFO, LOGTAG, _VAARGS))
define LOGE(...) ((void)androidlogprint(ANDROID_LOGERROR, LOGTAG, _VAARGS))
static const char* VERTEXSHADERSRC = "attribute vec4 aposition;\n" "attribute vec2 atexCoord;\n" "varying vec2 vtexCoord;\n" "void main() {\n" " glPosition = aposition;\n" " vtexCoord = a_texCoord;\n" "}\n";
static const char* FRAGMENTSHADERSRC = "precision mediump float;\n" "varying vec2 vtexCoord;\n" "uniform sampler2D stexture;\n" "const mat3 yuv2rgb = mat3(1.0, 1.0, 1.0,\n" " 0.0, -0.39465, 2.03211,\n" " 1.13983, -0.58060, 0.0);\n" "void main() {\n" " vec3 yuv = vec3(texture2D(stexture, vtexCoord).r,\n" " texture2D(stexture, vec2(vtexCoord.x + 0.5 / 128.0, v_texCoord.y)).r,\n" " texture2D(stexture, vec2(vtexCoord.x, vtexCoord.y + 0.5 / 128.0)).r);\n" " vec3 rgb = yuv2rgb * yuv;\n" " glFragColor = vec4(rgb, 1.0);\n" "}\n";
GLuint program; GLuint texture;
void initGL() { const GLfloat vertexData[] = { // x y u v -1.0f, -1.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, -1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f };
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glEnable(GL_TEXTURE_2D);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
program = glCreateProgram();
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, VERTEX_SHADER_SRC);
glBindAttribLocation(program, 0, "a_position");
glBindAttribLocation(program, 1, "a_texCoord");
glAttachShader(program, vertexShader);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, FRAGMENT_SHADER_SRC);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
GLint linkStatus;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus == GL_FALSE) {
GLint infoLogLength;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLogLength);
char* infoLog = new char[infoLogLength];
glGetProgramInfoLog(program, infoLogLength, NULL, infoLog);
LOGE("Error: Could not link program:\n%s\n", infoLog);
delete[] infoLog;
}
GLint positionHandle = glGetAttribLocation(program, "a_position");
glVertexAttribPointer(positionHandle, 2, GL_FLOAT, GL_FALSE, 16, vertexData);
glEnableVertexAttribArray(positionHandle);
GLint texCoordHandle = glGetAttribLocation(program, "a_texCoord");
glVertexAttribPointer(texCoordHandle, 2, GL_FLOAT, GL_FALSE, 16, vertexData + 2);
glEnableVertexAttribArray(texCoordHandle);
}
void renderFrame(uint8t* yuvData, int width, int height) { glBindTexture(GLTEXTURE2D, texture); glTexImage2D(GLTEXTURE2D, 0, GLLUMINANCE, width, height, 0, GLLUMINANCE, GLUNSIGNED_BYTE, yuvData);
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
GLint textureHandle = glGetUniformLocation(program, "s_texture");
glUniform1i(textureHandle, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
void destroyGL() { glDeleteTextures(1, &texture); glDeleteProgram(program); }
GLuint loadShader(GLenum shaderType, const char* shaderSource) { GLuint shader = glCreateShader(shaderType); glShaderSource(shader, 1, &shaderSource, NULL); glCompileShader(shader);
GLint compileStatus;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compileStatus);
if (compileStatus == GL_FALSE) {
GLint infoLogLength;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLogLength);
char* infoLog = new char[infoLogLength];
glGetShaderInfoLog(shader, infoLogLength, NULL, infoLog);
LOGE("Error: Could not compile shader:\n%s\n", infoLog);
delete[] infoLog;
glDeleteShader(shader);
return 0;
}
return shader;
} ```
在调用 initGL()
函数初始化 OpenGL 之后,可以在每帧调用renderFrame()
函数来将 YUV 数据解码并渲染到屏幕上。最后,在退出时调用 destroyGL()
函数释放资源。
该代码使用了纹理和着色器,在 Fragment Shader 中进行了颜色转换,将 YUV 数据转为 RGB 数据,并输出到屏幕上。
★文末名片可以免费领取音视频开发学习资料,内容包括(FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,srs)以及音视频学习路线图等等。
见下方!↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓