qt 使用opengl显示yuv实时视频流

30 篇文章 10 订阅
1 篇文章 3 订阅

    我使用的是ffmpeg拉取实时流,并解码为yuv420p。然后用opengl转成rgb32渲染到qt中显示。

用ffmpeg解码实时流和网上一样。参考:https://blog.csdn.net/su_vast/article/details/52214642 进行修改和增加。

    av_register_all();
    avformat_network_init();
    AVFormatContext *pAVFomatContext = avformat_alloc_context();
    AVCodecContext* pAVCodecContext = nullptr;
    AVFrame *pAVFrame = av_frame_alloc();
    AVFrame* pAVFrameYUV = av_frame_alloc();

    int result = avformat_open_input(&pAVFomatContext,"rtsp://127.0.0.1:8554/1",nullptr,nullptr);
    if(result < 0){
        qDebug() << QStringLiteral("打开视频失败");
        return;
    }
    result = avformat_find_stream_info(pAVFomatContext,nullptr);
    if(result < 0){
        qDebug() << QStringLiteral("获取视频流信息失败");
        return;
    }

    int videoStreamIndex = -1;
    for(uint i = 0; i < pAVFomatContext->nb_streams; i++){
        if(pAVFomatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
            videoStreamIndex = i;
            break;
        }
    }

    if(videoStreamIndex == -1){
        qDebug() << QStringLiteral("获取视频流索引失败");
        return;
    }

    pAVCodecContext = pAVFomatContext->streams[videoStreamIndex]->codec;
    int videoWidth = pAVCodecContext->width;
    int videoHeight = pAVCodecContext->height;

    AVCodec *pAVCodec;

    pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
    SwsContext* pSwsContext = sws_getContext(videoWidth,videoHeight,pAVCodecContext->pix_fmt,videoWidth,videoHeight,AV_PIX_FMT_YUV420P,SWS_BICUBIC,nullptr,nullptr,nullptr);

    int numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P,videoWidth,videoHeight);
    uint8_t *out_buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));

    avpicture_fill((AVPicture*)pAVFrameYUV,out_buffer,AV_PIX_FMT_YUV420P,videoWidth,videoHeight);
    int y_size = pAVCodecContext->width * pAVCodecContext->height;
    AVPacket* pAVPacket = (AVPacket*) malloc(sizeof(AVPacket));
    av_new_packet(pAVPacket,y_size);
    av_dump_format(pAVFomatContext,0,"rtsp://127.0.0.1:8554/1",0);
    result = avcodec_open2(pAVCodecContext,pAVCodec,nullptr);
    if(result < 0){
        qDebug() << QStringLiteral("打开解码器失败");
        return;
    }

    qDebug() << QStringLiteral("视频流初始化成功");
    int got_picture = 0;
    int _num = 0;
    while (!isInterruptionRequested()) {
        if(av_read_frame(pAVFomatContext,pAVPacket) <0){
            break;
        }
            avcodec_decode_video2(pAVCodecContext,pAVFrame,&got_picture,pAVPacket);
            if(got_picture){
                qDebug() << __LINE__;
                sws_scale(pSwsContext,(const uint8_t *const*)pAVFrame->data,pAVFrame->linesize,0,videoHeight,pAVFrameYUV->data,pAVFrameYUV->linesize);
                emit sigYuv((uchar*)out_buffer,videoWidth,videoHeight);
            }
        av_free_packet(pAVPacket);
    }
//    av_free(pAVPicture);
    avcodec_close(pAVCodecContext);
    avformat_close_input(&pAVFomatContext);

 

这样就得到yuv数据了。下面是用opegl渲染到qt的widgets。我使用的是QOpenglWidget和QOpenglFunctions

头文件声明如下

#ifndef GLYUVWIDGET_H
#define GLYUVWIDGET_H

#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QOpenGLBuffer>
#include <QTimer>

QT_FORWARD_DECLARE_CLASS(QOpenGLShaderProgram)
QT_FORWARD_DECLARE_CLASS(QOpenGLTexture)

class GLYuvWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
    Q_OBJECT
public:
    GLYuvWidget(QWidget *parent =0);
    ~GLYuvWidget();

public slots:
    void slotShowYuv(uchar *ptr,uint width,uint height); //显示一帧Yuv图像

protected:
    void initializeGL() Q_DECL_OVERRIDE;
    void paintGL() Q_DECL_OVERRIDE;

private:
    QOpenGLShaderProgram *program;
    QOpenGLBuffer vbo;
    GLuint textureUniformY,textureUniformU,textureUniformV; //opengl中y、u、v分量位置
    QOpenGLTexture *textureY = nullptr,*textureU = nullptr,*textureV = nullptr;
    GLuint idY,idU,idV; //自己创建的纹理对象ID,创建错误返回0
    uint videoW,videoH;
    uchar *yuvPtr = nullptr;
};

#endif // GLYUVWIDGET_H

下面是cpp中的实现,主要利用了QOpenglFunctions中集成的opengl的函数

#include "glyuvwidget.h"
#include <QOpenGLShaderProgram>
#include <QOpenGLTexture>
#include <QDebug>
#define VERTEXIN 0
#define TEXTUREIN 1

GLYuvWidget::GLYuvWidget(QWidget *parent):
    QOpenGLWidget(parent)
{
}

GLYuvWidget::~GLYuvWidget()
{
    makeCurrent();
    vbo.destroy();
    textureY->destroy();
    textureU->destroy();
    textureV->destroy();
    doneCurrent();
}

void GLYuvWidget::slotShowYuv(uchar *ptr, uint width, uint height)
{
    yuvPtr = ptr;
    videoW = width;
    videoH = height;
    update();
}

void GLYuvWidget::initializeGL()
{
    initializeOpenGLFunctions();
    glEnable(GL_DEPTH_TEST);

    static const GLfloat vertices[]{
        //顶点坐标
        -1.0f,-1.0f,
        -1.0f,+1.0f,
        +1.0f,+1.0f,
        +1.0f,-1.0f,
        //纹理坐标
        0.0f,1.0f,
        0.0f,0.0f,
        1.0f,0.0f,
        1.0f,1.0f,
    };


    vbo.create();
    vbo.bind();
    vbo.allocate(vertices,sizeof(vertices));

    QOpenGLShader *vshader = new QOpenGLShader(QOpenGLShader::Vertex,this);
    const char *vsrc =
   "attribute vec4 vertexIn; \
    attribute vec2 textureIn; \
    varying vec2 textureOut;  \
    void main(void)           \
    {                         \
        gl_Position = vertexIn; \
        textureOut = textureIn; \
    }";
    vshader->compileSourceCode(vsrc);

    QOpenGLShader *fshader = new QOpenGLShader(QOpenGLShader::Fragment,this);
    const char *fsrc = "varying vec2 textureOut; \
    uniform sampler2D tex_y; \
    uniform sampler2D tex_u; \
    uniform sampler2D tex_v; \
    void main(void) \
    { \
        vec3 yuv; \
        vec3 rgb; \
        yuv.x = texture2D(tex_y, textureOut).r; \
        yuv.y = texture2D(tex_u, textureOut).r - 0.5; \
        yuv.z = texture2D(tex_v, textureOut).r - 0.5; \
        rgb = mat3( 1,       1,         1, \
                    0,       -0.39465,  2.03211, \
                    1.13983, -0.58060,  0) * yuv; \
        gl_FragColor = vec4(rgb, 1); \
    }";
    fshader->compileSourceCode(fsrc);

    program = new QOpenGLShaderProgram(this);
    program->addShader(vshader);
    program->addShader(fshader);
    program->bindAttributeLocation("vertexIn",VERTEXIN);
    program->bindAttributeLocation("textureIn",TEXTUREIN);
    program->link();
    program->bind();
    program->enableAttributeArray(VERTEXIN);
    program->enableAttributeArray(TEXTUREIN);
    program->setAttributeBuffer(VERTEXIN,GL_FLOAT,0,2,2*sizeof(GLfloat));
    program->setAttributeBuffer(TEXTUREIN,GL_FLOAT,8*sizeof(GLfloat),2,2*sizeof(GLfloat));

    textureUniformY = program->uniformLocation("tex_y");
    textureUniformU = program->uniformLocation("tex_u");
    textureUniformV = program->uniformLocation("tex_v");
    textureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureY->create();
    textureU->create();
    textureV->create();
    idY = textureY->textureId();
    idU = textureU->textureId();
    idV = textureV->textureId();
    glClearColor(0.0,0.0,0.0,0.0);
}

void GLYuvWidget::paintGL()
{
//    QMatrix4x4 m;
//    m.perspective(60.0f, 4.0f/3.0f, 0.1f, 100.0f );//透视矩阵随距离的变化,图形跟着变化。屏幕平面中心就是视点(摄像头),需要将图形移向屏幕里面一定距离。
//    m.ortho(-2,+2,-2,+2,-10,10);//近裁剪平面是一个矩形,矩形左下角点三维空间坐标是(left,bottom,-near),右上角点是(right,top,-near)所以此处为负,表示z轴最大为10;
                                //远裁剪平面也是一个矩形,左下角点空间坐标是(left,bottom,-far),右上角点是(right,top,-far)所以此处为正,表示z轴最小为-10;
                                //此时坐标中心还是在屏幕水平面中间,只是前后左右的距离已限制。
    glActiveTexture(GL_TEXTURE0);  //激活纹理单元GL_TEXTURE0,系统里面的
    glBindTexture(GL_TEXTURE_2D,idY); //绑定y分量纹理对象id到激活的纹理单元
    //使用内存中的数据创建真正的y分量纹理数据
    glTexImage2D(GL_TEXTURE_2D,0,GL_RED,videoW,videoH,0,GL_RED,GL_UNSIGNED_BYTE,yuvPtr);
    //https://blog.csdn.net/xipiaoyouzi/article/details/53584798 纹理参数解析
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glActiveTexture(GL_TEXTURE1); //激活纹理单元GL_TEXTURE1
    glBindTexture(GL_TEXTURE_2D,idU);
    //使用内存中的数据创建真正的u分量纹理数据
    glTexImage2D(GL_TEXTURE_2D,0,GL_RED,videoW >> 1, videoH >> 1,0,GL_RED,GL_UNSIGNED_BYTE,yuvPtr + videoW * videoH);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glActiveTexture(GL_TEXTURE2); //激活纹理单元GL_TEXTURE2
    glBindTexture(GL_TEXTURE_2D,idV);
    //使用内存中的数据创建真正的v分量纹理数据
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, videoW >> 1, videoH >> 1, 0, GL_RED, GL_UNSIGNED_BYTE, yuvPtr+videoW*videoH*5/4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    //指定y纹理要使用新值
    glUniform1i(textureUniformY, 0);
    //指定u纹理要使用新值
    glUniform1i(textureUniformU, 1);
    //指定v纹理要使用新值
    glUniform1i(textureUniformV, 2);
    //使用顶点数组方式绘制图形
    glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}

本程序中yuv图像到rgb32的转换通过opengl在gpu中完成,利用gpu处理图像,使用widgets界面线程有大量的空闲时间处理其它事情,所以widgets界面相当流畅。

以下是主程序中的使用,我在这里画了一个框在上面

#include <QApplication>
#include "GLYuvWidget/glyuvwidget.h"
#include "Rtsp/rtsp.h"
#include <QLabel>
#include <QHBoxLayout>
#include <QPixmap>
#include <QPainter>

int main(int argc, char *argv[])
{
    QApplication a(argc, argv);

    Rtsp rtsp;
    GLYuvWidget w;

    QLabel label;
    label.setScaledContents(true);
    QHBoxLayout hLay;
    hLay.addWidget(&label);
    w.setLayout(&hLay);

    QPixmap pix(w.size());
    pix.fill(Qt::transparent);
    QPainter p(&pix);
    p.setRenderHint(QPainter::Antialiasing);
    p.setPen(QPen(Qt::yellow,4));
    p.drawRoundRect(w.rect().adjusted(20,20,-20,-20));
    label.setPixmap(pix);

    QObject::connect(&rtsp,SIGNAL(sigYuv(uchar*,uint,uint)),&w,SLOT(slotShowYuv(uchar*,uint,uint)));
    rtsp.start();

    QObject::connect(&a,&QApplication::lastWindowClosed,[&]{
        rtsp.requestInterruption();
        rtsp.quit();
        rtsp.wait();
    });

    w.show();

    return a.exec();
}

以下为视频效果

 

 

  • 15
    点赞
  • 137
    收藏
    觉得还不错? 一键收藏
  • 44
    评论
要在Qt使用OpenGL显示YUV图像,你需要将YUV数据转换为RGB格式,并将其作为纹理上传到OpenGL中。以下是一个简单的实现步骤: 1. 加载YUV数据并将其转换为RGB格式。 可以使用libyuv等库来完成YUV到RGB的转换。具体实现可以参考以下代码: ```c++ #include "libyuv.h" void yuv2rgb(unsigned char* src_y, unsigned char* src_u, unsigned char* src_v, unsigned char* dst_rgb, int width, int height) { int u_offset = width * height; int v_offset = u_offset + u_offset / 4; libyuv::I420ToRGB24(src_y, width, src_u, width / 2, src_v, width / 2, dst_rgb, width * 3, width, height); } ``` 2. 创建OpenGL纹理并上传RGB数据。 在Qt中,你可以使用QOpenGLTexture类来创建和绑定纹理。以下是一个简单的实现步骤: ```c++ QOpenGLTexture* texture = new QOpenGLTexture(QOpenGLTexture::Target2D); texture->setSize(width, height); texture->setFormat(QOpenGLTexture::RGBFormat); texture->allocateStorage(); texture->setData(QOpenGLTexture::RGB, QOpenGLTexture::UInt8, rgb_data); ``` 3. 在OpenGL中绘制纹理。 在OpenGL中,你可以使用glTexCoord2f和glVertex2f函数将纹理映射到一个四边形上。以下是一个简单的实现步骤: ```c++ glEnable(GL_TEXTURE_2D); texture->bind(); glBegin(GL_QUADS); glTexCoord2f(0.0, 0.0); glVertex2f(-1.0, -1.0); glTexCoord2f(1.0, 0.0); glVertex2f(1.0, -1.0); glTexCoord2f(1.0, 1.0); glVertex2f(1.0, 1.0); glTexCoord2f(0.0, 1.0); glVertex2f(-1.0, 1.0); glEnd(); texture->release(); glDisable(GL_TEXTURE_2D); ``` 以上是一个简单的实现步骤。当然,实际的实现可能更加复杂,需要根据你的具体需求进行修改和优化。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 44
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值