OpenGL播放yuv视频

http://www.cnblogs.com/weinyzhou/archive/2012/07/07/2592453.html

//data指向yuv420p数据
- (void)playVideoData:(void *)data
{
    [EAGLContext setCurrentContext:_glContext];
    if (!_textureY)
    {
        glGenTextures(1, &_textureY);
        glGenTextures(1, &_textureU);
        glGenTextures(1, &_textureV);
    }
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, _textureY);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW, _videoH, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, _textureU);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, _textureV);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, data + _videoW * _videoH * 5 / 4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    [self render];
}

- (void)render
{
    [EAGLContext setCurrentContext:_glContext];
    glViewport(0, 0, 320, 320);
    glClearColor(0.0, 0.6, 0.0, 1.0);
    
    static c*****t GLfloat squareVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };
    
    static c*****t GLfloat coordVertices[] = {
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f,  0.0f,
        1.0f,  0.0f,
    };
//    GLuint vertices;
//    glGenBuffers(1, &vertices);
//    glBindBuffer(GL_ARRAY_BUFFER, vertices);
//    glBufferData(GL_ARRAY_BUFFER, sizeof(squareVertices), squareVertices, GL_STATIC_DRAW);
    
    glClear(GL_COLOR_BUFFER_BIT);
    
    glUseProgram(_program);
    
    // Update uniform value
    //glUniform1f(uniforms[UNIFORM_TRANSLATE], 0.0f);
    GLuint textureUniformY = glGetUniformLocation(_program, "SamplerY");
    GLuint textureUniformU = glGetUniformLocation(_program, "SamplerU");
    GLuint textureUniformV = glGetUniformLocation(_program, "SamplerV");
    
    // Update attribute values
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    
    glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
    glEnableVertexAttribArray(ATTRIB_TEXTURE);
    
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, _textureY);
    glUniform1i(textureUniformY, 0);
    
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, _textureU);
    glUniform1i(textureUniformU, 1);
    
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, _textureV);
    glUniform1i(textureUniformV, 2);
    
    // Draw
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
    //glFlush();
    [_glContext presentRenderbuffer:GL_RENDERBUFFER];
}

//Shader.vsh
attribute vec4 position; // 1
//uniform float translate;
attribute vec2 TexCoordIn; // New
varying vec2 TexCoordOut; // New

void main(void)
{
    gl_Position = position; // 6
    TexCoordOut = TexCoordIn;
}

//Shader.fsh
varying lowp vec2 TexCoordOut;

uniform sampler2D SamplerY;
uniform sampler2D SamplerU;
uniform sampler2D SamplerV;

void main(void)
{
    mediump vec3 yuv;
    lowp vec3 rgb;
    
    yuv.x = texture2D(SamplerY, TexCoordOut).r;
    yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;
    yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;

    rgb = mat3( 1,       1,         1,
                0,       -0.39465,  2.03211,
                1.13983, -0.58060,  0) * yuv;
    
    gl_FragColor = vec4(rgb, 1);

}

PyQt的OpenGL Widget可以用来播放视频。通过将QVideoWidget嵌入到自定义的QOpenGLWidget中,可以实现视频的加载、播放、转换和保存。然而,有时会出现视频无法覆盖Widget的问题。为了解决这个问题,可以使用OpenGL来绘制视频,这样可以大大降低YUV转RGB的转换开销。在使用OpenGL时,需要考虑三个问题:与界面如何交互(使用QOpenGLWidget)、材质初始化(initializeGL函数)和窗口大小变化时的处理(resizeGL函数)。此外,还可以使用GLSL编写顶点和片元程序来与显卡进行交互。\[1\]\[2\]\[3\] #### 引用[.reference_title] - *1* [PyQt5学习笔记--基于Qt Designer加载、播放和保存视频](https://blog.csdn.net/weixin_43863869/article/details/128026022)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control_2,239^v3^insert_chatgpt"}} ] [.reference_item] - *2* *3* [【QT项目:视频播放器——Qt opengl编程】通过shader完成显示yuv](https://blog.csdn.net/weixin_42427696/article/details/127055332)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control_2,239^v3^insert_chatgpt"}} ] [.reference_item] [ .reference_list ]
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值