Qt:使用FFmpeg解码视频流

Debug和Release載入不同動態庫:

//將lib文件放到對應工程目錄下,在.pro添加存放lib的路徑
LIBPATH += $$PWD/Library/Lib
//debug為active時處理LIBS += -L$$LIB_PATH -lOpengl32...
CONFIG(debug, debug|release){ LIBS += -L$$LIBPATH -lOpengl32...}
else{LIBS += -L$$LIBPATH -lOpengl32 -lavcodec...}CONFIG(debug, debug|release):LIBS += -L$$LIBPATH -lOpengl32...
CONFIG(release, debug|release):LIBS += -L$$LIBPATH -lOpengl32 -lavcodec..
//在對應的debug或release目錄下添加使用的.dll文件

//本例中用到的库文件:
INCLUDEPATH += $$PWD/Dev/include
LIBPATH += $$PWD/Dev/lib
LIBS += -L$$LIBPATH -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc -lswresample -lswscale

屏蔽QDebug()提示信息:在pro文件中添加:DEFINES+=QT_NO_DEBUG_OUTPUT

FFmpeg:
靜態庫版本:Static,只包含三個應用程式,不依賴動態庫可單獨運行;
動態庫版本:Shared,除應用程式外還包含dll動態庫;
開發者版本:Dev,包含lib文件和.h文件,但不包含dll文件。
使用ffmpeg要注意對應版本(如編譯器32位則使用32位的FFmpeg版本),ffmpeg庫為C文件,導入需在頭文件添加關鍵詞extetn “C”。

可將dll拷貝進lib,發佈時再拷貝dll進運行目錄或將dll拷貝進運行目錄。
檢查環境配置是否搭建成功:

unsigned version = avcodec_version();
QString ch = QString::number(version,10);
qDebug()<<"version: "<<version<<"\n配置信息:"<<avcodec_configuration();

注册FFmpeg组件:注册和初始化FFmpeg封装器和网络设备

av_register_all();
avformat_network_init();

QAudioOutput
是Qt中播放音频的类,要使用它,需要在pro中加入 QT += multimedia

程序代码:
(1)设置继承线程继承QThread

basicthread.h

#ifndef BASICTHREAD_H
#define BASICTHREAD_H
#include <QThread>
#include <QDebug>
extern "C"
{
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include <libavdevice/avdevice.h>
    #include <libavformat/version.h>
    #include <libavutil/time.h>
    #include <libavutil/mathematics.h>
    #include "libswresample/swresample.h"
}
class basicthread:public QThread
{
    Q_OBJECT
public:
    basicthread(QObject * parent = nullptr);
    ~basicthread();
};

#endif // BASICTHREAD_H

basicthread.cpp

#include "basicthread.h"
basicthread::basicthread(QObject * parent):QThread(parent)
{
}
basicthread::~basicthread(){
    quit();
    wait();
    qDebug()<<"~BasicThread";
}

(2)初始化视频流线程:

initthread.h

#ifndef INITTHREAD_H
#define INITTHREAD_H
#include "basicthread.h"

class initthread : public basicthread
{
    Q_OBJECT
public:
    initthread(QObject*parent=nullptr);
    void closeinitthread();
    SwrContext*swrContext;
    AVCodecContext*audioCodecContext;
signals:
    void sendcontext(AVFormatContext*,AVCodecContext*,AVCodecContext*,SwrContext*);
protected:
    //重写虚函数run(),只有在此函数在新线程里,运行完毕后该线程生命周期结束
    virtual void run();
};
#endif // INITTHREAD_H

initthread.cpp

#include "initthread.h"

initthread::initthread(QObject*parent):basicthread(parent)
{
}

void initthread::run(){
    int videoStreamIndex=-1;int audioStreamIndex=-1;uint i;
      //注册库中所有可用的文件格式和解码器
      av_register_all();
      //初始化网络流格式,使用RTSP网络流时必须先执行
      avformat_network_init();
      //申請一個AVFormatContext結構的内存,並進行簡單初始化
      AVFormatContext* avFormatContext = avformat_alloc_context();

      //打開視頻流
      int ret=avformat_open_input(&avFormatContext,"C:\\Users\\shelly\\Desktop\\guitar.mp4",NULL,NULL);
      if(ret!=0){
          qDebug()<<"打開視頻流失敗";
           avformat_free_context(avFormatContext);
      }
      //讀取流數據包並獲取相關信息
      if(avformat_find_stream_info(avFormatContext,NULL)<0){
           qDebug()<<"獲取視頻流信息失敗";
           avformat_close_input(&avFormatContext);
          }

      for(i=0;i<avFormatContext->nb_streams;i++){
          //確定流格式是否為視頻
          if(avFormatContext->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
              videoStreamIndex=i; 
          }
          if(videoStreamIndex==-1){
              avformat_close_input(&avFormatContext);
              qDebug()<<"獲取視頻流索引失敗";
          }
          //確定流格式是否為音頻
          if(avFormatContext->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){
              audioStreamIndex=i;
          }
      }
      //视频部分处理
      //根據編碼器ID獲取視頻劉解碼器
      AVCodec*videoCodec = avcodec_find_decoder(avFormatContext->streams[videoStreamIndex]->codecpar->codec_id);
      if(videoCodec==NULL){
          qDebug()<<"尋找视频解碼器失敗";
      }

      //获取视频编解码器上下文信息
      AVCodecContext*videoCodecContext = avcodec_alloc_context3(videoCodec);
      if(videoCodecContext==NULL){
          avformat_close_input(&avFormatContext);
          qDebug()<<"获取上下文信息失败";
      }
      //拷贝视频上下文信息
      int ret1=avcodec_parameters_to_context(videoCodecContext,avFormatContext->streams[videoStreamIndex]->codecpar);
      if(ret1==0){
          qDebug()<<"拷贝视频流成功";
      }
      if(ret1<0){
          qDebug()<<"拷贝视频流失败!";
          avformat_close_input(&avFormatContext);
      }

      //打開對應视频解碼器
      if(avcodec_open2(videoCodecContext,videoCodec,NULL)<0){
         qDebug()<<"打開视频解碼器失敗";
         avformat_close_input(&avFormatContext);
         avcodec_free_context(&videoCodecContext);
      }

      //音频部分处理
      //根據編碼器ID獲取音频劉解碼器
      if(audioStreamIndex==1){
          AVCodec*audioCodec;
          audioCodec= avcodec_find_decoder(avFormatContext->streams[audioStreamIndex]->codecpar->codec_id);
          if(audioCodec!=NULL){
              //获取音频编解码器上下文信息
              /*AVCodecContext**/audioCodecContext = avcodec_alloc_context3(audioCodec);
              if(audioCodecContext==NULL){
                  avformat_close_input(&avFormatContext);
                  qDebug()<<"获取上下文信息失败";
              }

              int ret2=avcodec_parameters_to_context(audioCodecContext,avFormatContext->streams[audioStreamIndex]->codecpar);
              if(ret2==0){
                  qDebug()<<"拷贝音频流成功";
              }
              if(ret2<0){
                  qDebug()<<"拷贝音频流失败!";
                  avformat_close_input(&avFormatContext);
              }

              //打開對應音频解碼器
              if(avcodec_open2(audioCodecContext,audioCodec,NULL)<0){
                 qDebug()<<"打開音频解碼器失敗";
                 avformat_close_input(&avFormatContext);
                 avcodec_free_context(&audioCodecContext);
              }

              //音频流特殊处理部分
              //音频转码配置
              //ffmpeg中刚刚解码出的数据因为排列方式的原因,不能直接播放,必须要转换,首先根据音频解码上下文设置并初始化转换上下文:
              /*SwrContext**/swrContext = swr_alloc_set_opts(
                          nullptr,
                          AV_CH_LAYOUT_STEREO,
                          AV_SAMPLE_FMT_S16,
                          44100,
                          audioCodecContext->channel_layout,
                          audioCodecContext->sample_fmt,
                          audioCodecContext->sample_rate,
                          0, nullptr);
              if(swr_init(swrContext) < 0){
                  avformat_close_input(&avFormatContext);
                  avcodec_free_context(&videoCodecContext);
                  avcodec_free_context(&audioCodecContext);
              }
          }}else{
          qDebug()<<"尋找音频解碼器失敗";}
          
      qDebug()<<"視頻流初始化成功";
      emit sendcontext(avFormatContext,videoCodecContext,audioCodecContext,swrContext);
}

(3)解码线程:

decodethread.h

#ifndef DECODETHREAD_H
#define DECODETHREAD_H
#include "basicthread.h"

class DecodeThread : public basicthread
{
    Q_OBJECT
public:
    DecodeThread(QObject*parent=nullptr);
    void closeThread();
    AVFrame * senddecode(AVCodecContext *avCodecContext,AVPacket *avpacket);
    void decodeframe(AVFrame * avFrame);
    void decodeaudio(AVFrame * avFrame);
    void getcontext(AVFormatContext*,AVCodecContext*,AVCodecContext*,SwrContext*);
signals:
    void senddecodemsg(uint8_t*,int,int);
    void sendaudiomsg(uint8_t *,int);
private:
    volatile bool isRun;
    uint8_t * buffer = nullptr;
    AVFormatContext*getavformatContext;
    AVCodecContext*getavcodecContext;
    AVCodecContext*getaudiocontext;
    SwrContext*getswrcontext;
protected:
    virtual void run();
};
#endif // DECODETHREAD_H

decodethread.cpp

#include "decodethread.h"

DecodeThread::DecodeThread(QObject*parent):basicthread(parent)
{
   isRun=true;   
}
void DecodeThread::closeThread()
{
    isRun=false;
}

void DecodeThread::getcontext(AVFormatContext*avformatContext,AVCodecContext*videocontext,AVCodecContext*audiocontext,SwrContext*swrcontext){
    getavformatContext=avformatContext;
    getvideocontext=videocontext;
    getaudiocontext=audiocontext;
    getswrcontext=swrcontext;
}

void DecodeThread::run(){
    while(isRun){
        msleep(12);
        AVPacket * avpacket=av_packet_alloc();
        int a=av_read_frame(getavformatContext,avpacket);
        if(a<0){
            av_packet_free(&avpacket);
            continue;
        }
        AVFrame* pavFrame=av_frame_alloc();
        if(avpacket->stream_index==1){
            pavFrame=senddecode(getaudiocontext,avpacket);
            if(pavFrame==0){continue;}
            decodeaudio(pavFrame);
            av_frame_free(&pavFrame);
            av_packet_free(&avpacket);

        }else if(avpacket->stream_index==0){
            pavFrame=senddecode(getvideocontext,avpacket);
            if(pavFrame==0){continue;}
            decodeframe(pavFrame);
            av_frame_free(&pavFrame);
            av_packet_free(&avpacket);
        }
        }
    avcodec_close(getvideocontext);
    avformat_close_input(&getavformatContext);
    }

AVFrame * DecodeThread::senddecode(AVCodecContext * avCodecContext,AVPacket * avpacket){
    if(avcodec_send_packet(avCodecContext,avpacket)<0){
        av_packet_free(&avpacket);
        return nullptr;
    }
    AVFrame* avFrame=av_frame_alloc();
    int nret=avcodec_receive_frame(avCodecContext,avFrame);
    if(nret==AVERROR(EAGAIN)||nret==AVERROR_EOF){
        av_frame_free(&avFrame);
    }else if(nret<0){
        av_frame_free(&avFrame);
    }
    return avFrame;
}

void DecodeThread::decodeframe(AVFrame * avFrame){
    int nwidth=avFrame->width;
    int nheight=avFrame->height;
    videobuffer = new uint8_t[static_cast<long>(1.5 * nwidth * nheight)];
    long long nlen=0,i;
    for(i=0;i<nheight;i++){
        memcpy(videobuffer+nlen,avFrame->data[0]+i*avFrame->linesize[0],static_cast<size_t>(nwidth));
        nlen+=nwidth;
    }
    for(i=0;i<nheight/2;i++){
        memcpy(videobuffer+nlen,avFrame->data[1]+i*avFrame->linesize[1],static_cast<size_t>(nwidth/2));
        nlen+=nwidth/2;
    }
    for(i=0;i<nheight/2;i++){
        memcpy(videobuffer+nlen,avFrame->data[2]+i*avFrame->linesize[2],static_cast<size_t>(nwidth/2));
        nlen+=nwidth/2;
    }
    emit senddecodemsg(videobuffer,nwidth,nheight);
}

void DecodeThread::decodeaudio(AVFrame * avFrame){

    int64_t out_nb_samples = av_rescale_rnd(
                swr_get_delay(getswrcontext, avFrame->sample_rate) + avFrame->nb_samples,
                44100, avFrame->sample_rate, AV_ROUND_UP);
    //重采样
    int size = av_samples_get_buffer_size(0, avFrame->channels, static_cast<int>(out_nb_samples), AV_SAMPLE_FMT_S16, 1);
    audiobuffer=new uint8_t[size];
    int len = swr_convert(
                getswrcontext,&audiobuffer,
                static_cast<int>(out_nb_samples),
                reinterpret_cast<const uint8_t **>(&avFrame->data),
                static_cast<int>(avFrame->nb_samples));
    if (len <0){
        delete [] audiobuffer;
        return;
    }
    int size1 = av_samples_get_buffer_size(0, avFrame->channels, len, AV_SAMPLE_FMT_S16, 1);
    emit sendaudiomsg(audiobuffer,size1);
}

(4)显示窗口:

openglwidget.h

#ifndef OPENGLWIDGET_H
#define OPENGLWIDGET_H
#include "decodethread.h"
#include "initthread.h"
#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QOpenGLBuffer>
#include <QOpenGLShader>
#include <QOpenGLTexture>
#include <QAudioFormat>
#include <QAudioDeviceInfo>
#include <QAudioOutput>
#include <QFileInfo>
#include <QTimer>
class openglwidget : public QOpenGLWidget,protected QOpenGLFunctions
{
    Q_OBJECT
public:
    openglwidget(QWidget *parent = nullptr);
    ~openglwidget();
    void startdecode();
    void startinit();
    void initaudioplayer();
protected:
    void initializeGL() Q_DECL_OVERRIDE;
    void paintGL() Q_DECL_OVERRIDE;
private:
    QOpenGLShaderProgram *program;
    QOpenGLBuffer vbo;
    GLuint textureUniformY,textureUniformU,textureUniformV; //opengl中y、u、v分量位置
    QOpenGLTexture *textureY = nullptr,*textureU = nullptr,*textureV = nullptr;
    GLuint idY,idU,idV; //自己创建的纹理对象ID,创建错误返回0
    uint videoW,videoH;
    uint8_t *yuvPtr;
    initthread*init;
    DecodeThread *decode;
    AVFormatContext*formatc;
    AVCodecContext*codecc;
    SwrContext*swrc;
    QIODevice *outputDevice = nullptr;
    QAudioOutput *qOutput = nullptr;

private slots:
    void updateFrame(uint8_t *pSrc,int,int); //显示一帧Yuv图像
    void get(AVFormatContext*,AVCodecContext*);
    void getaudiomsg(uint8_t*,int);
};

#endif // OPENGLWIDGET_H

openglwidget.cpp

#include "openglwidget.h"
#define VERTEXIN 0
#define TEXTUREIN 1
openglwidget::openglwidget(QWidget *parent): QOpenGLWidget(parent)
{
    startinit();
    initaudioplayer();
}
openglwidget::~openglwidget()
{
    delete [] yuvPtr;
}
void openglwidget::startinit()
{
    init=new initthread;
    connect(init,SIGNAL(sendcontext(AVFormatContext*,AVCodecContext*)),this,SLOT(get(AVFormatContext*,AVCodecContext*)));
    init->start();qDebug()<<"初始化开始";
}
void openglwidget::get(AVFormatContext*avf,AVCodecContext*video,AVCodecContext*audio,SwrContext*swr)
{
    formatc=avf;
    videocodecc=video;
    audiocodecc=audio;
    swrc=swr;
    startdecode();
}
void openglwidget::startdecode()
{
    decode=new DecodeThread;
    connect(decode,SIGNAL(senddecodemsg(uint8_t*,int,int)),this,SLOT(updateFrame(uint8_t*,int,int)));
    connect(decode,SIGNAL(sendaudiomsg(uint8_t*,int)),this,SLOT(getaudiomsg(uint8_t*,int)));
    decode->getcontext(formatc,videocodecc,audiocodecc,swrc);
    decode->start();qDebug()<<"解码开始";
}
void openglwidget::initaudioplayer()
{
    QAudioFormat outformat;
    outformat.setSampleRate(44100);
    outformat.setChannelCount(2);
    outformat.setSampleSize(16);
    outformat.setCodec("audio/pcm");
    outformat.setSampleType(QAudioFormat::SignedInt);
    outformat.setByteOrder(QAudioFormat::LittleEndian);

    QAudioDeviceInfo outDeviceInfo(QAudioDeviceInfo::defaultOutputDevice());
    if(outDeviceInfo.isNull()){
        return;
    }
    if (!outDeviceInfo.isFormatSupported(outformat)){
        outformat = outDeviceInfo.nearestFormat(outformat);
    }
    if (qOutput == nullptr){
        qOutput = new QAudioOutput(outformat);
    }
    outputDevice = qOutput->start();
    if (outputDevice == nullptr){
        return;
    }
}
void openglwidget::getaudiomsg(uint8_t*audiomsg,int size)
{
    qDebug()<<audiomsg;
    if(outputDevice){
    outputDevice->write(reinterpret_cast<const char *>(audiomsg), size);}
}
void openglwidget::updateFrame(uint8_t*pSrc,int width,int height)
{
    yuvPtr=pSrc;
    videoW = width;
    videoH = height;
    update();
}
void openglwidget::initializeGL(){
    initializeOpenGLFunctions();
    glEnable(GL_DEPTH_TEST);
    static const GLfloat vertices[]{
            //顶点坐标
            -1.0f,-1.0f,
            -1.0f,+1.0f,
            +1.0f,+1.0f,
            +1.0f,-1.0f,
            //纹理坐标
            0.0f,1.0f,
            0.0f,0.0f,
            1.0f,0.0f,
            1.0f,1.0f,
        };
    vbo.create();
    vbo.bind();
    vbo.allocate(vertices,sizeof(vertices));
    QOpenGLShader *vshader = new QOpenGLShader(QOpenGLShader::Vertex,this);
    const char *vsrc =
    "attribute vec4 vertexIn; \
    attribute vec2 textureIn; \
    varying vec2 textureOut;  \
    void main(void)           \
    {                         \
        gl_Position = vertexIn; \
        textureOut = textureIn; \
    }";
    vshader->compileSourceCode(vsrc);
    QOpenGLShader *fshader = new QOpenGLShader(QOpenGLShader::Fragment,this);
    const char *fsrc =
    "varying vec2 textureOut; \
    uniform sampler2D tex_y; \
    uniform sampler2D tex_u; \
    uniform sampler2D tex_v; \
    void main(void) \
    { \
        vec3 yuv; \
        vec3 rgb; \
        yuv.x = texture2D(tex_y, textureOut).r; \
        yuv.y = texture2D(tex_u, textureOut).r - 0.5; \
        yuv.z = texture2D(tex_v, textureOut).r - 0.5; \
        rgb = mat3( 1,       1,         1, \
                    0,       -0.39465,  2.03211, \
                    1.13983, -0.58060,  0) * yuv; \
        gl_FragColor = vec4(rgb, 1); \
    }";
    fshader->compileSourceCode(fsrc);

    program = new QOpenGLShaderProgram(this);
    program->addShader(vshader);
    program->addShader(fshader);
    program->bindAttributeLocation("vertexIn",VERTEXIN);
    program->bindAttributeLocation("textureIn",TEXTUREIN);
    program->link();
    program->bind();
    program->enableAttributeArray(VERTEXIN);
    program->enableAttributeArray(TEXTUREIN);
    program->setAttributeBuffer(VERTEXIN,GL_FLOAT,0,2,2*sizeof(GLfloat));
    program->setAttributeBuffer(TEXTUREIN,GL_FLOAT,8*sizeof(GLfloat),2,2*sizeof(GLfloat));

    textureUniformY = program->uniformLocation("tex_y");
    textureUniformU = program->uniformLocation("tex_u");
    textureUniformV = program->uniformLocation("tex_v");
    textureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
    textureY->create();
    textureU->create();
    textureV->create();
    idY = textureY->textureId();
    idU = textureU->textureId();
    idV = textureV->textureId();
    glClearColor(0.99f,0.99f,0.99f,0.0f);
}

void openglwidget::paintGL(){

    glViewport(0, 0, width(), height());
    glActiveTexture(GL_TEXTURE0);  //激活纹理单元GL_TEXTURE0,系统里面的
    glBindTexture(GL_TEXTURE_2D,idY); //绑定y分量纹理对象id到激活的纹理单元
    //使用内存中的数据创建真正的y分量纹理数据
    glTexImage2D(GL_TEXTURE_2D,0,GL_RED,videoW,videoH,0,GL_RED,GL_UNSIGNED_BYTE,yuvPtr);
    //https://blog.csdn.net/xipiaoyouzi/article/details/53584798 纹理参数解析
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glActiveTexture(GL_TEXTURE1); //激活纹理单元GL_TEXTURE1
    glBindTexture(GL_TEXTURE1,idU);
    //使用内存中的数据创建真正的u分量纹理数据
    glTexImage2D(GL_TEXTURE_2D,0,GL_RED,videoW >> 1, videoH >> 1,0,GL_RED,GL_UNSIGNED_BYTE,yuvPtr + videoW * videoH);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glActiveTexture(GL_TEXTURE2); //激活纹理单元GL_TEXTURE2
    glBindTexture(GL_TEXTURE_2D,idV);
    //使用内存中的数据创建真正的v分量纹理数据
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, videoW >> 1, videoH >> 1, 0, GL_RED, GL_UNSIGNED_BYTE, yuvPtr+videoW*videoH*5/4);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    //指定y纹理要使用新值
    glUniform1i(textureUniformY, 0);
    //指定u纹理要使用新值
    glUniform1i(textureUniformU, 1);
    //指定v纹理要使用新值
    glUniform1i(textureUniformV, 2);
    //使用顶点数组方式绘制图形
    glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    //高亮当前选中窗口
}

main.cpp

#include "openglwidget.h"
#include <QApplication>
#include <QPixmap>
#include <QLabel>
#include <QHBoxLayout>
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    openglwidget openglw;

    QLabel label;
    label.setScaledContents(true);
    QHBoxLayout hlay;
    hlay.addWidget(&label);
    openglw.setLayout(&hlay);

    QPixmap pix(openglw.size());
    pix.fill(Qt::transparent);
    label.setPixmap(pix);

    openglw.show();
    return a.exec();
}

**结果显示:**使用电脑录屏的无音频/有音频视频均可播放,但会失真,播放单通道视频需要调播放器及转换参数。(暂时还不会结束线程的操作t-t)
在这里插入图片描述

  • 0
    点赞
  • 17
    收藏
    觉得还不错? 一键收藏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值