项目中在android平台上,需要对nv12的yuv数据支持,其实在cpu中利用libyuv可以很快速的转换,但是为了性能,能直接从渲染器中支持是再好不过了。
Fragment Shader的代码
texture2D()函数生成的对象有rgba四个成员变量,在yuv中,rgb是一样的(猜的,因为别人的shader用的r,我这个用的是g),这里需要非常注意的是,在代码中glTexImage2D函数,就是创建texture的函数中,nv12因为是uv放在一起的,所以它的类型应该是GL_LUMINANCE_ALPHA,而不是GL_LUMINANCE。
后面的那个mat3矩阵的作用就是把yuv转换成rgba,然后交给显卡渲染。
#define GET_STR(x) #x
//图元被光栅化为多少片段,就被调用多少次
static const char *fragYUV420P = GET_STR(
precision
mediump float;
varying
vec2 vTextCoord;
//输入的yuv三个纹理
uniform
sampler2D yTexture;//采样器
uniform
sampler2D uTexture;//采样器
uniform
sampler2D vTexture;//采样器
void main() {
vec3 yuv;
vec3 rgb;
yuv.x = texture2D(yTexture, vTextCoord).g;
yuv.y = texture2D(uTexture, vTextCoord).g - 0.5;
yuv.z = texture2D(vTexture, vTextCoord).g - 0.5;
rgb = mat3(
1.0, 1.0, 1.0,
0.0, -0.39465, 2.03211,
1.13983, -0.5806, 0.0
) * yuv;
//gl_FragColor是OpenGL内置的
gl_FragColor = vec4(rgb, 1.0);
}
);
static const char *fragNV12 = GET_STR(
precision
highp float;
varying
vec2 vTextCoord;
//输入的yuv三个纹理
uniform
sampler2D yTexture;//采样器
uniform
sampler2D uTexture;//采样器
uniform
sampler2D vTexture;//采样器
void main() {
vec3 yuv;
vec3 rgb;
// We had put the Y values of each pixel to the R,G,B components by GL_LUMINANCE,
// that's why we're pulling it from the R component, we could also use G or B
yuv.x = texture2D(yTexture, vTextCoord).r;
// We had put the U and V values of each pixel to the A and R,G,B components of the
// texture respectively using GL_LUMINANCE_ALPHA. Since U,V bytes are interspread
// in the texture, this is probably the fastest way to use them in the shader
yuv.y = texture2D(uTexture, vTextCoord).r - 0.5;
yuv.z = texture2D(uTexture, vTextCoord).a - 0.5;
// The numbers are just YUV to RGB conversion constants
rgb = mat3(
1.0, 1.0, 1.0,
0.0, -0.39465, 2.03211,
1.13983, -0.5806, 0.0
) * yuv;
gl_FragColor = vec4(rgb, 1.0);
}
);