FFmpeg解码帧数据上传至OpenGL ES及GPU实现YUV转换RGB

https://www.jianshu.com/p/eed347f56d76

 

本文档描述了经FFmpeg解码得到的多个YUV格式或RGB格式数据上传至OpenGL ES及YUV转换RGB的办法。有关YUV转换RGB的描述可参考我另一个文档音视频开发:RGB与YUV相互转换问题

1、YUV

1.1、YUV420p

yuv420p三个通道分开,分别上传即可。

const uint8_t *pixels[] = { luma, cb, cr };
const int widths[]  = { frameWidth, frameWidth / 2, frameWidth / 2 };
const int heights[] = { frameHeight, frameHeight / 2, frameHeight / 2 };

for (int i = 0; i < 3; ++i) 
{       
    glBindTexture(GL_TEXTURE_2D, textures[i]);
        
    glTexImage2D(GL_TEXTURE_2D,
        0,
        GL_LUMINANCE,
        widths[i],
        heights[i],
        0,
        GL_LUMINANCE,
        GL_UNSIGNED_BYTE,
        pixels[i]);
        
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}

internalFormat指定为GL_LUMINANCE,同时纹理格式也为GL_LUMINANCE,像素数据结构为GL_UNSIGNED_BYTE。

Fragment Shader转换YUV至RGB的示例。

Video Range[16, 255]:

precision highp float;
varying   highp vec2 vv2_Texcoord;
uniform         mat3 um3_ColorConversion;
uniform   lowp  sampler2D us2_SamplerX;
uniform   lowp  sampler2D us2_SamplerY;
uniform   lowp  sampler2D us2_SamplerZ;

void main()
{
    mediump vec3 yuv;
    lowp    vec3 rgb;

    // Subtract constants to map the video range start at 0
    yuv.x = (texture2D(us2_SamplerX, vv2_Texcoord).r - (16.0 / 255.0));
    yuv.y = (texture2D(us2_SamplerY, vv2_Texcoord).r - 0.5);
    yuv.z = (texture2D(us2_SamplerZ, vv2_Texcoord).r - 0.5);
    rgb = um3_ColorConversion * yuv;
    gl_FragColor = vec4(rgb, 1);
}

对于Video Range,亮度通道在减去16.0/255.0后映射到Full Range。Full Range则城需此处理。

1.2、YUV420sp

yuv420sp因uv交错存储,故uv一起上传至GPU。

const uint8_t * pixels[] = { luma, cbcr };
const int widths[]  = { frameWidth, frameWidth / 2};
const int heights[] = { frameHeight, frameHeight / 2};

// Y
glBindTexture(GL_TEXTURE_2D, plane[0]);
glTexImage2D(GL_TEXTURE_2D,
             0,
             GL_RED_EXT,
             widths[0],
             heights[0],
             0,
             GL_RED_EXT,
             GL_UNSIGNED_BYTE,
             pixels[0]);
// UV
glBindTexture(GL_TEXTURE_2D, plane[1]);
glTexImage2D(GL_TEXTURE_2D,
             0,
             GL_RG_EXT,
             widths[1],
             heights[1],
             0,
             GL_RG_EXT,
             GL_UNSIGNED_BYTE,
             pixels[1]);

Fragment Shader(Video Range):

precision highp float;

in   highp vec2 vv2_Texcoord;

uniform         mat3 um3_ColorConversion;
uniform   lowp  sampler2D us2_SamplerX;
uniform   lowp  sampler2D us2_SamplerY;

out vec4 fragColor;

void main()
{
    mediump vec3 yuv;
    lowp    vec3 rgb;
    
    // Subtract constants to map the video range start at 0
    yuv.x  = (texture(us2_SamplerX,  vv2_Texcoord).r  - (16.0 / 255.0));
    yuv.yz = (texture(us2_SamplerY,  vv2_Texcoord).ra - vec2(0.5, 0.5));
    rgb = um3_ColorConversion * yuv;
    fragColor = vec4(rgb, 1);
}

对于iOS Video Toolbox,使用OpenGL ES 2.0,通过CVOpenGLESTextureCacheCreateTextureFromImage使用GL_RED_EXT和GL_RG_EXT宏创建纹理,那么,数据上传至GPU则按REG方式显示Y通道,如下所示。

YUV420SP_RED

YUV420SP_RG

YUV420SP_结果图像

使用OpenGL ES 3.0,则可用GL_LUMINANCE和GL_LUMINANCE_ALPHA宏创建纹理。

1.3、YUV444p10LE

const GLsizei widths[]    = { frameWidth / 2, frameWidth / 2, frameWidth / 2 };
const GLsizei heights[]   = { frameHeight, frameHeight, frameHeight };
for (int i = 0; i < 3; ++i)
{
    glBindTexture(GL_TEXTURE_2D, renderer->plane_textures[i]);
    
    glTexImage2D(GL_TEXTURE_2D,
                 0,
                 GL_LUMINANCE_ALPHA,
                 widths[i],
                 heights[i],
                 0,
                 GL_LUMINANCE_ALPHA,
                 GL_UNSIGNED_BYTE,
                 pixels[i]);
}

Fragment Shader(Video Range):

precision highp float;
varying   highp vec2 vv2_Texcoord;
uniform         mat3 um3_ColorConversion;
uniform   lowp  sampler2D us2_SamplerX;
uniform   lowp  sampler2D us2_SamplerY;
uniform   lowp  sampler2D us2_SamplerZ;

void main()
{
    mediump vec3 yuv_l;
    mediump vec3 yuv_h;
    mediump vec3 yuv;
    lowp    vec3 rgb;
    
    yuv_l.x = texture2D(us2_SamplerX, vv2_Texcoord).r;
    yuv_h.x = texture2D(us2_SamplerX, vv2_Texcoord).a;
    yuv_l.y = texture2D(us2_SamplerY, vv2_Texcoord).r;
    yuv_h.y = texture2D(us2_SamplerY, vv2_Texcoord).a;
    yuv_l.z = texture2D(us2_SamplerZ, vv2_Texcoord).r;
    yuv_h.z = texture2D(us2_SamplerZ, vv2_Texcoord).a;
    
    yuv = (yuv_l * 255.0 + yuv_h * 255.0 * 256.0) / (1023.0) - vec3(16.0 / 255.0, 0.5, 0.5);
    
    rgb = um3_ColorConversion * yuv;
    gl_FragColor = vec4(rgb, 1);
}

2、RGB

2.1、RGB888

glTexImage2D(GL_TEXTURE_2D,
             0,
             GL_RGB,
             width,
             height,
             0,
             GL_RGB,
             GL_UNSIGNED_BYTE,
             pixels);

Fragment Shader:

precision highp float;
varying   highp vec2 vv2_Texcoord;
uniform   lowp  sampler2D us2_SamplerX;

void main()
{
    gl_FragColor = vec4(texture2D(us2_SamplerX, vv2_Texcoord).rgb, 1);
}

RGB888、RGB565和RGBA共用同一份着色器代码。对于RGBA,可读取纹理自带的Alpha通道:

gl_FragColor = texture2D(us2_SamplerX, vv2_Texcoord);

2.2、RGBA

glTexImage2D(GL_TEXTURE_2D,
             0,
             GL_RGBA,
             width,
             height,
             0,
             GL_RGBA,
             GL_UNSIGNED_BYTE,
             pixels);

2.3、RGB565

glTexImage2D(GL_TEXTURE_2D,
             0,
             GL_RGB,
             width,
             height,
             0,
             GL_RGB,
             GL_UNSIGNED_SHORT_5_6_5,
             pixels);

问题:

FFmpeg解码H.264后,如何确认颜色转换矩阵是Full Range还是Video Range?
H.264的SPS有标识指出颜色转换矩阵,详情见我另一个文档FFmpeg源码调试:解析H.264 SPS(Sequence Parameter Set)中视频的颜色空间及其范围


 

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值