Qt tips 之使用 FFmpeg + OpenGL 实现简易视频播放器

参考:mahuifa-QMDemoyundiantech-VideoPlayerChatGPT 3.5
特别鸣谢:mahuifayundiantech

前言

本文实现了一个简易的视频播放器,UI 用的 Qt,视频解析用的是 FFmpeg,渲染用的是 Qt 的 OpenGL 模块。
能力有限,之前从未接触过音视频和 OpenGL 相关,本文代码大部分都是参照 mahuifayundiantech 两位大佬的代码照猫画虎地完成的,其余部分都是在网上到处东拼西凑,然后缝缝补补完成的。
很可惜,精力有限,之前只是实现了视频模块,并未实现音频模块。

注意:
本文代码实现基于 FFmpeg 解析出来的 YUV420p 数据,色彩空间是 BT 709 标准,对于其他其他数据类型本文代码解析出来可能会有问题,需要对相关部分进行修改。

配置

  • Qt 5.12.12
  • FFmpeg 6.1.1:FFmpeg 6.1.1
    FFmpeg 6.1.1
  • Visual Studio 2017 Release x64
  • Windows SDK 10.17763.0

代码实现

技术路线
  • 多线程实现:使用两个子线程,一个子线程负责读取视频帧,另一个子线程负责解码视频帧,然后主线程负责视频帧渲染。
  • 视频播放控制:FFmpeg 内部有一个自己的时间,每一帧数据也都对应一个时间。记录视频开始时间,帧数据对应的时间到了,即渲染对应的帧数据。
类说明
  • MCVideoPlayer:负责读取和解码视频帧,FFmpeg 相关实现都是在这个类中实现
  • MCVideoFrame:存放解码后的 YUV 数据
  • MCVideoWidget:OpenGL 渲染类
  • MCVideoPlayTest:视频播放器测试类
FFmpeg 解析视频
  • 解析视频
    从打开视频输入,到获取视频流,最后再打开解码器,一步步实现视频解析。

    bool MCVideoPlayer::openVideo()
    {
        if (m_videoFilePath.isEmpty())
        {
            printLog("the video file path is empty", true);
            return false;
        }
    
        // 分配视频格式 IO 上下文
        m_pFormatContext = avformat_alloc_context();
        if (nullptr == m_pFormatContext)
        {
            printLog("open video failed to avformat_alloc_context", true);
            closeVideo();
            return false;
        }
    
        // 中文路径先转 utf-8 再转成 const char*,否则有一定概率 avformat_open_input 执行失败
        QByteArray videoFilePathUtf8 = m_videoFilePath.toUtf8();
        const char* videoFilePath = videoFilePathUtf8.constData();
        printLog(QString("video file path %1").arg(m_videoFilePath));
    
        // 打开视频文件
        int result = avformat_open_input(&m_pFormatContext, videoFilePath, nullptr, nullptr);
        if (result < 0)
        {
            printLog(QString("open video failed to avformat_open_input, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 读取视频流信息
        result = avformat_find_stream_info(m_pFormatContext, nullptr);
        if (result < 0)
        {
            printLog(QString("open video failed to avformat_find_stream_info, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 查找视频流索引
        m_videoIndex = -1;
        for (unsigned int index = 0; index < m_pFormatContext->nb_streams; ++index)
        {
            if (AVMEDIA_TYPE_VIDEO == m_pFormatContext->streams[index]->codecpar->codec_type)
            {
                m_videoIndex = index;
                break;
            }
        }
        if (m_videoIndex < 0)
        {
            printLog(QString("open video failed to find video index"), true);
            closeVideo();
            return false;
        }
    
        // 计算视频总时长
        emit sigDurationChanged(m_pFormatContext->duration / m_msecondTimeBase);
        printLog(QString("video duration %1").arg(m_pFormatContext->duration / m_msecondTimeBase));
    
        // 通过视频流索引读取视频流
        m_pVideoStream = m_pFormatContext->streams[m_videoIndex];
    
        // 视频帧率
        m_videoFrameRate = 0.0;
        if (0 != m_pVideoStream->avg_frame_rate.den)
        {
            m_videoFrameRate = m_pVideoStream->avg_frame_rate.num * 1.0 / m_pVideoStream->avg_frame_rate.den;
        }
        printLog(QString("video frame rate %1").arg(m_videoFrameRate));
    
        m_oneFrameTime = 0;
        if (0 < m_videoFrameRate)
        {
            m_oneFrameTime = m_msecondTimeBase / m_videoFrameRate;
        }
    
        // 视频总帧数
        int totalFrames = m_pVideoStream->nb_frames;
        printLog(QString("video frame number %1").arg(totalFrames));
    
        // 查找视频解码器
        const AVCodec* pCodec = avcodec_find_decoder(m_pVideoStream->codecpar->codec_id);
        if (nullptr == pCodec)
        {
            printLog(QString("open video failed to avcodec_find_decoder, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 创建解码器上下文,并设置默认值
        m_pCodecContext = avcodec_alloc_context3(pCodec);
        if (nullptr == m_pCodecContext)
        {
            printLog(QString("open video failed to avcodec_alloc_context3, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 使用视频流的 codecpar 为解码器上下文赋值
        result = avcodec_parameters_to_context(m_pCodecContext, m_pVideoStream->codecpar);
        if (result < 0)
        {
            printLog(QString("open video failed to avcodec_parameters_to_context, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 打开解码器
        result = avcodec_open2(m_pCodecContext, nullptr, nullptr);
        if (result < 0)
        {
            printLog(QString("open video failed to avcodec_open2, error code %1").arg(result), true);
            closeVideo();
            return false;
        }
    
        // 使用 8 线程解码
        m_pCodecContext->thread_count = 8;
    
        printLog(QString("video open finished"));
    
        return true;
    }
    
  • 读取视频帧数据
    先分配数据包 AVPacket,再使用 av_read_frame 读取视频帧,把读取到的数据包添加到数据列表中。

    AVPacket* pPacket = av_packet_alloc();
    if (av_read_frame(m_pFormatContext, pPacket) < 0)
    {
        if (!m_isReadFinished)
        {
            // 读取完成
            m_isReadFinished = true;
            printLog(QString("video read finished %1").arg(m_readFrameCount));
        }
    
        if (m_isStopped)
        {
            // 读取完成,并且停止,可以退出读取循环
            av_packet_unref(pPacket);
            break;
        }
    
        // 前几帧数据 avcodec_receive_frame 报错,返回 EAGAIN 值,是因为数据太少,解码器需要足够的数据合成帧
        // 所以在读取完成但是未解码完成后,发送空包到解码器,否则最后几帧解析不到
        addPacket(pPacket);
    }
    else
    {
        // 读取到的视频数据包添加到视频数据列表
        if (m_videoIndex == pPacket->stream_index)
        {
            addPacket(pPacket);
            ++m_readFrameCount;
        }
        else
        {
            av_packet_unref(pPacket);
        }
    }
    
  • 解码视频帧数据
    先从数据列表中取出数据包,然后把数据包发送到解码器,解码器接收数据包,

    // 取出数据包
    AVPacket* pPacket = m_listVideoPackets.first();
    m_listVideoPackets.removeFirst();
    
    // 把数据发送到解码器
    avcodec_send_packet(m_pCodecContext, pPacket);
    
    // 解码器接收数据包
    avcodec_receive_frame(m_pCodecContext, pFrame);
    
  • 解码后的数据处理
    数据转换,图像且切片缩放

    // 视频帧率
    AVFrame* pFrame = nullptr;
    AVFrame* pFrameYUV = nullptr;
    
    // 解码后的 YUV 数据 
    uint8_t* pYUVBuffer = nullptr;
    // 视频格式转换上下文
    SwsContext* pSwsContext = nullptr;
    
    // 按 1 字节进行内存对齐,得到的内存大小最接近实际大小
    int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1);
    unsigned int byteCount = static_cast<unsigned int>(yuvSize);
    pYUVBuffer = static_cast<uint8_t*>(av_malloc(byteCount * sizeof(uint8_t)));
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, pYUVBuffer, AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1);
    
    // 将解码后的数据转换成 AV_PIX_FMT_YUV420P
    pSwsContext = sws_getContext(videoWidth, videoHeight, (AVPixelFormat)pFrame->format,
                                 videoWidth, videoHeight, AV_PIX_FMT_YUV420P,
                                 SWS_BICUBIC, nullptr, nullptr, nullptr);
    
  • 视频跳转
    以毫秒为基准,传入目标跳转毫秒时间,使用 av_seek_frame 跳转视频,然后重新读取视频帧,重新解码视频帧。

    // 根据时间,和一帧所用的时间,计算结果 seekTarget 即跳转目标帧的编号
    AVRational rational = { 1, m_msecondTimeBase };
    int seekTarget = av_rescale_q(m_seekTime, rational, m_pVideoStream->time_base);
    av_seek_frame(m_pFormatContext, m_videoIndex, seekTarget, AVSEEK_FLAG_BACKWARD);
    

完整代码

动态库项目 MCWidge
  • MCWidget_Global.h

    #pragma once
    
    #include <QtCore/qglobal.h>
    
    #ifndef BUILD_STATIC
    # if defined(MCWIDGET_LIB)
    #  define MCWIDGET_EXPORT Q_DECL_EXPORT
    # else
    #  define MCWIDGET_EXPORT Q_DECL_IMPORT
    # endif
    #else
    # define MCWIDGET_EXPORT
    #endif
    
    #define MCWIDGET_BEGIN_NAMESPACE namespace MCWidget {
    #define MCWIDGET_END_NAMESPACE };
    
    #define USE_MCWIDGET_NAMESPACE using namespace MCWidget;
    
    
  • MCVideoPlayer.h

    #pragma once
    
    #include "MCWidget_Global.h"
    #include "MCVideoFrame.h"
    
    #include <QObject>
    #include <QMutex>
    
    extern "C"
    {
        #include "libavcodec/avcodec.h"
        #include "libavformat/avformat.h"
        #include "libavutil/avutil.h"
        #include <libavutil/time.h>
        #include "libswscale/swscale.h"
        #include "libavutil/imgutils.h"
    }
    
    MCWIDGET_BEGIN_NAMESPACE
    
    class MCWIDGET_EXPORT MCVideoPlayer : public QObject
    {
        Q_OBJECT
    
    public:
        enum VideoState
        {
            StoppedState,
            PlayingState
        };
    
        MCVideoPlayer(QObject* parent);
        ~MCVideoPlayer();
    
        // 设置视频文件路径
        void setVideoFilePath(const QString& filePath);
    
        // 播放视频
        void playVideo();
        // 播放一帧视频
        void playOneFrame(int frameTime);
        // 停止视频
        void stopVideo();
        // 跳转视频
        void seekVideo(int seekTime);
    
        // 获取视频播放状态
        VideoState getState();
    
        int getCurrentTime();
    
        // 设置日志启用状态
        void setLogEnabled(bool enabled);
    
    signals:
        // 视频总时长变化
        void sigDurationChanged(int msecond);
        // 视频帧变化
        void sigFrameChanged(std::shared_ptr<MCVideoFrame> frame);
        // 视频播放状态变化
        void sigStateChanged(VideoState state);
    
    private:
        // 打开视频
        bool openVideo();
        // 关闭视频
        void closeVideo();
        // 读取视频
        bool readVideo();
        // 解码视频
        void decodeVideo();
    
        // 添加数据包到列表
        void addPacket(AVPacket* pPacket);
        // 清空数据包对流
        void clearPacketList();
    
        // 打印日志
        void printLog(const QString& log, bool isError = false);
    
    private:
        QString             m_videoFilePath             { "" };             // 视频路径
    
        VideoState          m_state                     { StoppedState };   // 视频播放状态
    
        AVFormatContext*    m_pFormatContext            { nullptr };        // 视频格式 IO 上下文
        AVStream*           m_pVideoStream              { nullptr };        // 视频流
        AVCodecContext*     m_pCodecContext             { nullptr };        // 解码器上下文
    
        bool                m_isStopped                 { false };          // 视频播放状态
    
        bool                m_isReadFinished            { false };          // 读取视频状态
        bool                m_isReadThreadFinished      { true };           // 读取视频线程状态
    
        bool                m_isDecodeFinished          { false };          // 解码视频状态
        bool                m_isDecodeThreadFinished    { true };           // 解码视频线程状态
    
        bool                m_seekRequestFlag           { false };          // 跳转请求标志
        bool                m_seekFrameFlag             { false };          // 跳转执行标志
    
        bool                m_decodeOneFrameRequsetFlag { false };          // 解码一帧请求标志
        bool                m_decodeOneFrameFlag        { false };          // 解码一帧完成标志
    
        bool                m_isLogEnabled              { false };          // 日志启用状态
    
        int                 m_videoIndex                { -1 };             // 视频流索引
        int                 m_oneFrameTime              { 0 };              // 视频一帧时间
        double              m_videoFrameRate            { 0.0 };            // 视频帧率
    
        int                 m_videoStartTime            { 0 };              // 视频开始时间
    
        int                 m_currentFrameIndex         { 0 };              // 当前视频帧索引
    
        int                 m_currentTime               { 0 };              // 当前视频播放进度时间
    
        int                 m_seekTime                  { 0 };              // 跳转时间
        int                 m_seekVideoStartTime        { 0 };              // 跳转时视频开始时间位置
    
        int                 m_readFrameCount            { 0 };              // 读取视频帧数量
    
        QList<AVPacket*>    m_listVideoPackets;                             // 视频数据包列表
    
        QMutex              m_mutex;                                        // 互斥锁   
                         
        const int           m_msecondTimeBase           { 1000 };           // 单位转换,毫秒
    
        const char*         m_flushFlagChar             { "FLUSH_FLAG" };   // 视频 buffer 刷新标志字符
    
        const int           m_videoReadLimitNumber      { 500 };            // 视频读取限制数量  
    };
    
    MCWIDGET_END_NAMESPACE
    
    
  • MCVideoPlayer.cpp

    #include "MCVideoPlayer.h"
    USE_MCWIDGET_NAMESPACE
    
    #include <QDebug>
    #include <QByteArray>
    
    #include <thread>
    
    #include <Windows.h>
    
    MCVideoPlayer::MCVideoPlayer(QObject* parent)
       : QObject(parent)
    {
       qRegisterMetaType<std::shared_ptr<MCVideoFrame>>("std::shared_ptr<MCVideoFrame>");
       qRegisterMetaType<MCWidget::MCVideoPlayer::VideoState>("VideoState");
    
       setLogEnabled(true);
    }
    
    MCVideoPlayer::~MCVideoPlayer()
    {
       stopVideo();
    }
    
    void MCVideoPlayer::setVideoFilePath(const QString& filePath)
    {
       m_videoFilePath = filePath;
    }
    
    void MCVideoPlayer::playVideo()
    {
       if (m_isReadThreadFinished)
       {
           std::thread(&MCVideoPlayer::readVideo, this).detach();
       }
       else
       {
           QString state = QString("video play failed, state %1, read thread state %2").arg(m_state).arg(m_isReadThreadFinished);
           printLog(state, true);
       }
    }
    
    void MCVideoPlayer::playOneFrame(int frameTime)
    {
       // 视频停止状态下跳转视频,只解码一帧数据
       // 确保读取线程停止,否则会因为冲突导致崩溃
       if (m_isReadThreadFinished && !m_seekRequestFlag)
       {
           m_seekRequestFlag = true;
           m_seekTime = frameTime;
    
           m_decodeOneFrameRequsetFlag = true;
           playVideo();
       }
    }
    
    void MCVideoPlayer::stopVideo()
    {
       m_isStopped = true;
    
       while (!m_isReadThreadFinished)
       {
           Sleep(1);
       }
    }
    
    void MCVideoPlayer::seekVideo(int seekTime)
    {
       if (!m_seekRequestFlag)
       {
           m_seekRequestFlag = true;
           m_seekTime = seekTime;
       }
    }
    
    MCVideoPlayer::VideoState MCVideoPlayer::getState()
    {
       return m_state;
    }
    
    int MCVideoPlayer::getCurrentTime()
    {
       // 跳转阶段返回跳转时间
       if (m_seekRequestFlag || m_seekFrameFlag)
       {
           return m_seekTime;
       }
       return m_currentTime;
    }
    
    void MCVideoPlayer::setLogEnabled(bool enabled)
    {
       m_isLogEnabled = enabled;
    }
    
    bool MCVideoPlayer::openVideo()
    {
       if (m_videoFilePath.isEmpty())
       {
           printLog("the video file path is empty", true);
           return false;
       }
    
       // 分配视频格式 IO 上下文
       m_pFormatContext = avformat_alloc_context();
       if (nullptr == m_pFormatContext)
       {
           printLog("open video failed to avformat_alloc_context", true);
           closeVideo();
           return false;
       }
    
       // 中文路径先转 utf-8 再转成 const char*,否则有一定概率 avformat_open_input 执行失败
       QByteArray videoFilePathUtf8 = m_videoFilePath.toUtf8();
       const char* videoFilePath = videoFilePathUtf8.constData();
       printLog(QString("video file path %1").arg(m_videoFilePath));
    
       // 打开视频文件
       int result = avformat_open_input(&m_pFormatContext, videoFilePath, nullptr, nullptr);
       if (result < 0)
       {
           printLog(QString("open video failed to avformat_open_input, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 读取视频流信息
       result = avformat_find_stream_info(m_pFormatContext, nullptr);
       if (result < 0)
       {
           printLog(QString("open video failed to avformat_find_stream_info, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 查找视频流索引
       m_videoIndex = -1;
       for (unsigned int index = 0; index < m_pFormatContext->nb_streams; ++index)
       {
           if (AVMEDIA_TYPE_VIDEO == m_pFormatContext->streams[index]->codecpar->codec_type)
           {
               m_videoIndex = index;
               break;
           }
       }
       if (m_videoIndex < 0)
       {
           printLog(QString("open video failed to find video index"), true);
           closeVideo();
           return false;
       }
    
       // 计算视频总时长
       emit sigDurationChanged(m_pFormatContext->duration / m_msecondTimeBase);
       printLog(QString("video duration %1").arg(m_pFormatContext->duration / m_msecondTimeBase));
    
       // 通过视频流索引读取视频流
       m_pVideoStream = m_pFormatContext->streams[m_videoIndex];
    
       // 视频帧率
       m_videoFrameRate = 0.0;
       if (0 != m_pVideoStream->avg_frame_rate.den)
       {
           m_videoFrameRate = m_pVideoStream->avg_frame_rate.num * 1.0 / m_pVideoStream->avg_frame_rate.den;
       }
       printLog(QString("video frame rate %1").arg(m_videoFrameRate));
    
       m_oneFrameTime = 0;
       if (0 < m_videoFrameRate)
       {
           m_oneFrameTime = m_msecondTimeBase / m_videoFrameRate;
       }
    
       // 视频总帧数
       int totalFrames = m_pVideoStream->nb_frames;
       printLog(QString("video frame number %1").arg(totalFrames));
    
       // 查找视频解码器
       const AVCodec* pCodec = avcodec_find_decoder(m_pVideoStream->codecpar->codec_id);
       if (nullptr == pCodec)
       {
           printLog(QString("open video failed to avcodec_find_decoder, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 创建解码器上下文,并设置默认值
       m_pCodecContext = avcodec_alloc_context3(pCodec);
       if (nullptr == m_pCodecContext)
       {
           printLog(QString("open video failed to avcodec_alloc_context3, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 使用视频流的 codecpar 为解码器上下文赋值
       result = avcodec_parameters_to_context(m_pCodecContext, m_pVideoStream->codecpar);
       if (result < 0)
       {
           printLog(QString("open video failed to avcodec_parameters_to_context, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 打开解码器
       result = avcodec_open2(m_pCodecContext, nullptr, nullptr);
       if (result < 0)
       {
           printLog(QString("open video failed to avcodec_open2, error code %1").arg(result), true);
           closeVideo();
           return false;
       }
    
       // 使用 8 线程解码
       m_pCodecContext->thread_count = 8;
    
       printLog(QString("video open finished"));
    
       return true;
    }
    
    void MCVideoPlayer::closeVideo()
    {
       if (nullptr != m_pCodecContext)
       {
           avcodec_close(m_pCodecContext);
           m_pCodecContext = nullptr;
       }
    
       avformat_close_input(&m_pFormatContext);
    
       // 析构 m_pFormatContext 的时候会同时析构 m_pVideoStream
       avformat_free_context(m_pFormatContext);
    
       m_pVideoStream = nullptr;
    }
    
    bool MCVideoPlayer::readVideo()
    {
       if (!openVideo())
       {
           return false;
       }
    
       // 更新播放状态
       m_isStopped = false;
    
       // 状态重置
       m_isReadFinished = false;
       m_isReadThreadFinished = false;
       m_readFrameCount = 0;
    
       // 解码器打开后,创建新的线程,解码视频数据包
       std::thread(&MCVideoPlayer::decodeVideo, this).detach();
    
       if (!m_decodeOneFrameRequsetFlag)
       {
           // 切换视频状态为正在播放状态
           m_state = VideoState::PlayingState;
           emit sigStateChanged(m_state);
       }
    
       // 记录视频开始时间
       m_videoStartTime = av_gettime() / m_msecondTimeBase;
       m_seekVideoStartTime = m_videoStartTime;
    
       // 读取视频数据循环
       while (1)
       {
           if (m_isStopped)
           {
               // 视频停止,退出读取
               break;
           }
    
           if (m_seekRequestFlag)
           {
               // 根据时间,和一帧所用的时间,计算结果 seekTarget 即跳转目标帧的编号
               AVRational rational = { 1, m_msecondTimeBase };
               int seekTarget = av_rescale_q(m_seekTime, rational, m_pVideoStream->time_base);
               if (av_seek_frame(m_pFormatContext, m_videoIndex, seekTarget, AVSEEK_FLAG_BACKWARD) < 0)
               {
                   printLog("read video failed to av_seek_frame", true);
               }
               else
               {
                   printLog(QString("read video seek %1").arg(m_seekTime));
    
                   // 跳转视频,在视频数据列表里添加一个新的 AVPacket,并把 data 赋值为 "FLUSH_FLAG" 以做标记
                   AVPacket* pPacket = av_packet_alloc();
                   av_new_packet(pPacket, strlen(m_flushFlagChar) + 1);
                   strcpy((char*)pPacket->data, m_flushFlagChar);
                   clearPacketList();
                   addPacket(pPacket);
    
                   // 记录跳转开始时间
                   m_seekVideoStartTime = av_gettime() / m_msecondTimeBase - m_seekTime;
    
                   // 重置读取帧计数
                   m_readFrameCount = 0;
    
                   // 更新跳转执行标志
                   m_seekFrameFlag = true;
    
                   // 更新读取视频状态为未完成状态
                   m_isReadFinished = false;
               }
               m_seekRequestFlag = false;
           }
    
           // 视频数据包列表数据包个数超过一定值,就暂停读取数据,等待数据解码,以防内存不足问题
           if (m_videoReadLimitNumber < m_listVideoPackets.size())
           {
               Sleep(1);
               continue;
           }
    
           AVPacket* pPacket = av_packet_alloc();
           if (av_read_frame(m_pFormatContext, pPacket) < 0)
           {
               if (!m_isReadFinished)
               {
                   // 读取完成
                   m_isReadFinished = true;
                   printLog(QString("video read finished %1").arg(m_readFrameCount));
               }
    
               if (m_isStopped)
               {
                   // 读取完成,并且停止,可以退出读取循环
                   av_packet_unref(pPacket);
                   break;
               }
    
               // 前几帧数据 avcodec_receive_frame 报错,返回 EAGAIN 值,是因为数据太少,解码器需要足够的数据合成帧
               // 所以在读取完成但是未解码完成后,发送空包到解码器,否则最后几帧解析不到
               addPacket(pPacket);
           }
           else
           {
               // 读取到的视频数据包添加到视频数据列表
               if (m_videoIndex == pPacket->stream_index)
               {
                   addPacket(pPacket);
                   ++m_readFrameCount;
               }
               else
               {
                   av_packet_unref(pPacket);
               }
           }
       }
    
       // 清除数据
       clearPacketList();
    
       // 确保解码线程结束,再停止线程
       while (!m_isDecodeThreadFinished)
       {
           Sleep(1);
       }
    
       // 停止播放
       if (VideoState::StoppedState != m_state)
       {
           // 切换视频状态为正在播放状态
           m_state = VideoState::StoppedState;
           emit sigStateChanged(m_state);
       }
    
       // 关闭视频,析构指针
       closeVideo();
    
       printLog("video read thread exit");
    
       // 读取线程结束
       m_isReadFinished = false;
       m_isReadThreadFinished = true;
       m_readFrameCount = 0;
    
       return true;
    }
    
    void MCVideoPlayer::decodeVideo()
    {
       // 重置解码线程完成状态
       m_isDecodeFinished = false;
       m_isDecodeThreadFinished = false;
       m_currentFrameIndex = 0;
    
       // 视频宽度、高度
       int videoWidth = 0;
       int videoHeight = 0;
    
       // 当前视频帧编号
       int currentFrameIndex = 0;
       // 当前视频帧时间
       double currentFrameTime = 0;
    
       // 视频帧率
       AVFrame* pFrame = nullptr;
       AVFrame* pFrameYUV = nullptr;
    
       // 解码后的 YUV 数据 
       uint8_t* pYUVBuffer = nullptr;
       // 视频格式转换上下文
       SwsContext* pSwsContext = nullptr;
    
       // 分配 AVFrame,字段设置为默认值
       pFrame = av_frame_alloc();
    
       // 解码视频数据循环
       while (1)
       {
           if (m_isStopped)
           {
               // 视频停止,退出解码
               break;
           }
    
           if (m_decodeOneFrameFlag)
           {
               // 一帧视频数据解码完成,退出循环
               printLog("video decode one frame finished");
               m_decodeOneFrameFlag = false;
               break;
           }
    
           // 读取数据包
           m_mutex.lock();
           if (m_listVideoPackets.isEmpty())
           {
               m_mutex.unlock();
    
               // 视频数据列表为空,但未读取完成,等待读取视频数据
               Sleep(1);
               continue;
           }
           AVPacket* pPacket = m_listVideoPackets.first();
           m_listVideoPackets.removeFirst();
           m_mutex.unlock();
    
           // 遇到视频跳转的标志视频数据包,刷新数据缓冲,继续解码下一个视频数据包
           if (nullptr != pPacket->data && 0 == strcmp((char*)pPacket->data, m_flushFlagChar))
           {
               printLog("decode video flush buffer");
               avcodec_flush_buffers(m_pCodecContext);
               av_packet_unref(pPacket);
    
               // 更新视频开始时间
               m_videoStartTime = m_seekVideoStartTime;
    
               // seek 之后,av_read_frame 读取内容依旧从第一帧开始,m_currentFrameIndex 重置为 0
               m_currentFrameIndex = 0;
    
               continue;
           }
    
           // 将数据包传送到解码器
           avcodec_send_packet(m_pCodecContext, pPacket);
    
           if (avcodec_receive_frame(m_pCodecContext, pFrame) < 0)
           {
               if (m_isReadFinished && nullptr == pPacket->data)
               {
                   // 视频数据列表为空,并且读取完成,视频解码完成,退出循环
                   printLog("video decode finished");
                   break;
               }
    
               // 视频数据包接收
               av_packet_unref(pPacket);
               continue;
           }
           ++m_currentFrameIndex;
    
           // 计算当前帧对应的时间
           currentFrameTime = m_currentFrameIndex * m_oneFrameTime;
    
           if (m_seekFrameFlag)
           {
               // 跳转触发时,如果 m_seekVideoStartTime != m_videoStartTime,则表示当前还在读取跳转前的数据帧
               // 舍弃跳转前的帧,直接 continue
               if (m_seekVideoStartTime != m_videoStartTime)
               {
                   av_packet_unref(pPacket);
                   continue;
               }
    
               // 跳转触发,刷新解码器缓冲区后解码的数据帧
               // 没到跳转时刻,就 continue,等到执行到跳转时刻
               if (currentFrameTime < m_seekTime)
               {
                   // 跳转未完成,未到跳转时刻
                   av_packet_unref(pPacket);
                   continue;
               }
               else
               {
                   // 跳转完成,清除跳转标志
                   m_seekFrameFlag = false;
               }
           }
    
           // 等待循环,等待当前帧对应的时间到来
           while (1)
           {
               // 跳转触发时,在延时循环内,退出
               if (m_seekFrameFlag)
               {
                   break;
               }
    
               // 根据开始时间,计算当前时间
               m_currentTime = av_gettime() / m_msecondTimeBase - m_videoStartTime;
               if (currentFrameTime <= m_currentTime)
               {
                   // 跳转一帧,捕获到了这一帧,更新状态
                   if (m_decodeOneFrameRequsetFlag)
                   {
                       m_decodeOneFrameRequsetFlag = false;
                       m_decodeOneFrameFlag = true;
                   }
    
                   break;
               }
    
               // 当前时间未到当前视频帧对应的时间,等待
               Sleep(1);
               continue;
           }
    
           // 跳转触发时,在延时循环外,继续解码下一帧
           if (m_seekFrameFlag)
           {
               av_packet_unref(pPacket);
               continue;
           }
    
           if (pFrame->width != videoWidth || pFrame->height != videoHeight)
           {
               videoWidth = pFrame->width;
               videoHeight = pFrame->height;
    
               if (nullptr != pFrameYUV)
               {
                   av_free(pFrameYUV);
               }
    
               if (nullptr != pYUVBuffer)
               {
                   av_free(pYUVBuffer);
               }
    
               if (nullptr != pSwsContext)
               {
                   sws_freeContext(pSwsContext);
               }
    
               pFrameYUV = av_frame_alloc();
    
               // 按 1 字节进行内存对齐,得到的内存大小最接近实际大小
               int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1);
               unsigned int byteCount = static_cast<unsigned int>(yuvSize);
               pYUVBuffer = static_cast<uint8_t*>(av_malloc(byteCount * sizeof(uint8_t)));
               av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, pYUVBuffer, AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1);
    
               // 将解码后的数据转换成 AV_PIX_FMT_YUV420P
               pSwsContext = sws_getContext(videoWidth, videoHeight, (AVPixelFormat)pFrame->format,
                                            videoWidth, videoHeight, AV_PIX_FMT_YUV420P,
                                            SWS_BICUBIC, nullptr, nullptr, nullptr);
           }
    
           sws_scale(pSwsContext, pFrame->data, pFrame->linesize, 0, videoHeight, pFrameYUV->data, pFrameYUV->linesize);
    
           std::shared_ptr<MCVideoFrame> pVideoFrame = std::make_shared<MCVideoFrame>();
           pVideoFrame.get()->setYUVData(pYUVBuffer, videoWidth, videoHeight);
    
           // 跳转触发时,在渲染信号前,继续解码下一帧
           if (m_seekFrameFlag)
           {
               av_packet_unref(pPacket);
               continue;
           }
    
           emit sigFrameChanged(pVideoFrame);
    
           av_packet_unref(pPacket);
       }
       av_free(pFrame);
    
       if (nullptr != pFrameYUV)
       {
           av_free(pFrameYUV);
       }
    
       if (nullptr != pYUVBuffer)
       {
           av_free(pYUVBuffer);
       }
    
       if (nullptr != pSwsContext)
       {
           sws_freeContext(pSwsContext);
       }
    
       if (!m_isStopped)
       {
           m_isStopped = true;
       }
       printLog("video decode thread exit");
    
       // 解码线程结束
       m_isDecodeFinished = true;
       m_isDecodeThreadFinished = true;
    }
    
    void MCVideoPlayer::addPacket(AVPacket* pPacket)
    {
       m_mutex.lock();
       m_listVideoPackets << pPacket;
       m_mutex.unlock();
       return;
    }
    
    void MCVideoPlayer::clearPacketList()
    {
       m_mutex.lock();
       for (AVPacket* pPacket : m_listVideoPackets)
       {
           av_packet_unref(pPacket);
       }
       m_listVideoPackets.clear();
       m_mutex.unlock();
    }
    
    void MCVideoPlayer::printLog(const QString& log, bool isError)
    {
       if (m_isLogEnabled)
       {
           if (isError)
           {
               qCritical() << QString("The video player error: %1").arg(log);
           }
           else
           {
               qInfo() << QString("The video player: %1").arg(log);
           }
       }
    }
    
  • MCVideoWidget.h

    #pragma once
    
    #include "MCWidget_Global.h"
    #include "MCVideoFrame.h"
    
    #include <QOpenGLWidget>
    #include <QOpenGLShaderProgram>
    #include <QOpenGLFunctions>
    #include <QOpenGLTexture>
    
    MCWIDGET_BEGIN_NAMESPACE
    
    class MCWIDGET_EXPORT MCVideoWidget : public QOpenGLWidget, public QOpenGLFunctions
    {
        Q_OBJECT
    
    public:
        explicit MCVideoWidget(QWidget* parent = nullptr, Qt::WindowFlags f = Qt::WindowFlags());
        ~MCVideoWidget();
    
        void updateFrame(std::shared_ptr<MCWidget::MCVideoFrame> frame);
    
    private:
        void updateGLVertex(int windowWidth, int widowHeight);
    
    protected:
        void initializeGL() override;
        void resizeGL(int width, int height) override;
        void paintGL() override;
    
    private:
        QOpenGLShader*          m_pVertexShader;        // 顶点着色器
        QOpenGLShader*          m_pFragmentShader;      // 片段着色器
        QOpenGLShaderProgram*   m_pShaderProgram;       // 着色器程序
    
        GLfloat*                m_pVertexVertices;      // 顶点矩阵
    
        GLuint                  m_textureIdY;           // Y 纹理对象 id
        GLuint                  m_textureIdU;           // U 纹理对象 id
        GLuint                  m_textureIdV;           // V 纹理对象 id
        GLuint                  m_textureUniformY;      // Y 纹理位置
        GLuint                  m_textureUniformU;      // U 纹理位置
        GLuint                  m_textureUniformV;      // V 纹理位置
    
        int                     m_videoWidth;           // 视频宽度
        int                     m_videoHeight;          // 视频高度
    
        std::shared_ptr<MCWidget::MCVideoFrame> m_pVideoFrame;
    };
    
    MCWIDGET_END_NAMESPACE
    
    
  • MCVideoWidget.cpp

    #include "MCVideoWidget.h"
    USE_MCWIDGET_NAMESPACE
    
    // 顶点矩阵
    static const GLfloat s_vertexVertices[] =
    {
       -1.0f, -1.0f,
        1.0f, -1.0f,
       -1.0f,  1.0f,
        1.0f,  1.0f,
    };
    
    // 纹理矩阵
    static const GLfloat s_textureVertices[] =
    {
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f, 0.0f,
        1.0f, 0.0f,
    };
    
    MCVideoWidget::MCVideoWidget(QWidget* parent, Qt::WindowFlags f)
        : QOpenGLWidget(parent, f)
    {
        m_pVertexShader = nullptr;
        m_pFragmentShader = nullptr;
        m_pShaderProgram = nullptr;
    
        m_pVertexVertices = new GLfloat[8];
    
        m_textureIdY = 0;
        m_textureIdU = 0;
        m_textureIdV = 0;
    
        m_textureUniformY = 0;
        m_textureUniformU = 0;
        m_textureUniformV = 0;
    
        m_videoHeight = 0;
        m_videoWidth = 0;
    
        m_pVideoFrame = nullptr;
    }
    
    MCVideoWidget::~MCVideoWidget()
    {
        // 确保当前 OpenGL 上下文是有效的
        makeCurrent();
    
        delete m_pShaderProgram;
    
        delete[] m_pVertexVertices;
    
        // 清理完成后释放当前上下文
        doneCurrent();
    
        m_pVideoFrame = nullptr;
    }
    
    void MCVideoWidget::updateFrame(std::shared_ptr<MCWidget::MCVideoFrame> frame)
    {
        int width = frame.get()->getWidth();
        int height = frame.get()->getHeight();
        if (width <= 0 || height <= 0)
        {
            return;
        }
    
        m_pVideoFrame = frame;
    
        if (m_videoWidth != width || m_videoHeight != height)
        {
            m_videoWidth = width;
            m_videoHeight = height;
    
            updateGLVertex(this->width(), this->height());
        }
    
        update();
    }
    
    void MCVideoWidget::initializeGL()
    {
        initializeOpenGLFunctions();
    
        glEnable(GL_DEPTH_TEST);
    
        // 顶点着色器
        m_pVertexShader = new QOpenGLShader(QOpenGLShader::Vertex, this);
        const char* vsrc =
            "attribute vec4 vertexIn;           \n"
            "attribute vec2 textureIn;          \n"
            "varying vec2 textureOut;           \n"
            "void main(void)                    \n"
            "{                                  \n"
            "   gl_Position = vertexIn;         \n"
            "   textureOut = textureIn;         \n"
            "}";
        m_pVertexShader->compileSourceCode(vsrc);
    
        // 片段着色器
        m_pFragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, this);
        const char* fsrc =
            "#ifdef GL_ES                                           \n"
            "precision mediump float;                               \n"
            "#endif                                                 \n"
            "varying vec2 textureOut;                               \n"
            "uniform sampler2D tex_y;                               \n"
            "uniform sampler2D tex_u;                               \n"
            "uniform sampler2D tex_v;                               \n"
            "void main(void)                                        \n"
            "{                                                      \n"
            "   vec3 yuv;                                           \n"
            "   vec3 rgb;                                           \n"
            "   yuv.x = texture2D(tex_y, textureOut).r;             \n"
            "   yuv.y = texture2D(tex_u, textureOut).r - 0.5;       \n"
            "   yuv.z = texture2D(tex_v, textureOut).r - 0.5;       \n"
            "   rgb = mat3( 1,      1,         1,                   \
                            0,      -0.187324, 1.8556,              \
                            1.5748, -0.468124, 0 ) * yuv;           \n"
            "   gl_FragColor = vec4(rgb, 1);                        \n"
            "}";
        m_pFragmentShader->compileSourceCode(fsrc);
    
        // 创建着色器程序容器
        m_pShaderProgram = new QOpenGLShaderProgram();
        // 将顶点着色器添加到程序容器
        m_pShaderProgram->addShader(m_pVertexShader);
        // 将片段着色器添加到程序容器
        m_pShaderProgram->addShader(m_pFragmentShader);
        // 绑定顶点属性 
        m_pShaderProgram->bindAttributeLocation("vertexIn", 3);
        // 绑定纹理属性
        m_pShaderProgram->bindAttributeLocation("textureIn", 4);
        // 链接着色器程序
        m_pShaderProgram->link();
        // 激活所有链接
        m_pShaderProgram->bind();
    
        // 读取着色器中的数据变量 tex_y, tex_u, tex_v 的位置
        m_textureUniformY = m_pShaderProgram->uniformLocation("tex_y");
        m_textureUniformU = m_pShaderProgram->uniformLocation("tex_u");
        m_textureUniformV = m_pShaderProgram->uniformLocation("tex_v");
    
        // 生成纹理,获取 Y 纹理索引值
        glGenTextures(1, &m_textureIdY);
        // 生成纹理,获取 U 纹理索引值
    	glGenTextures(1, &m_textureIdU);
        // 生成纹理,获取 V 纹理索引值
    	glGenTextures(1, &m_textureIdV);
    
        // 设置 Y 纹理参数
        glBindTexture(GL_TEXTURE_2D, m_textureIdY);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
        // 设置 U 纹理参数
        glBindTexture(GL_TEXTURE_2D, m_textureIdU);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
        // 设置 V 纹理参数
        glBindTexture(GL_TEXTURE_2D, m_textureIdV);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
        // 设置读取的 YUV 数据为 1 字节对齐,
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    
        // 设置清除颜色
        glClearColor(0.0, 0.0, 0.0, 0.0);
    
        updateGLVertex(this->width(), this->height());
    }
    
    void MCVideoWidget::updateGLVertex(int windowWidth, int widowHeight)
    {
        if (m_videoWidth <= 0 || m_videoHeight <= 0)
        {
            memcpy(m_pVertexVertices, s_vertexVertices, sizeof(s_vertexVertices));
    
            // 设置顶点矩阵值以及格式
            glVertexAttribPointer(3, 2, GL_FLOAT, 0, 0, m_pVertexVertices);
            // 设置纹理矩阵值以及格式
            glVertexAttribPointer(4, 2, GL_FLOAT, 0, 0, s_textureVertices);
            // 启用顶点属性
            glEnableVertexAttribArray(3);
            // 启用纹理属性
            glEnableVertexAttribArray(4);
        }
        else
        {
            // 以宽度为基准缩放视频
            int width = windowWidth;
            int height = m_videoHeight * width / m_videoWidth;
            int x = this->width() - width;
            int y = this->height() - height;
            x /= 2;
            y /= 2;
    
            // 显示不全时则以高度为基准缩放视频
            if (y < 0)
            {
                height = widowHeight;
                width = m_videoWidth * height / m_videoHeight;
                x = this->width() - width;
                y = this->height() - height;
                x /= 2;
                y /= 2;
            }
    
            float index_x = x * 1.0 / windowWidth * 2.0 - 1.0;
            float index_x_1 = index_x * -1.0;
            float index_x_2 = index_x;
    
            float index_y = y * 1.0 / widowHeight * 2.0 - 1.0;
            float index_y_1 = index_y * -1.0;
            float index_y_2 = index_y;
    
            const GLfloat vertexVertices[] =
            {
                index_x_2, index_y_2,
                index_x_1, index_y_2,
                index_x_2, index_y_1,
                index_x_1, index_y_1,
            };
            memcpy(m_pVertexVertices, vertexVertices, sizeof(vertexVertices));
    
            // 设置顶点矩阵值以及格式
            glVertexAttribPointer(3, 2, GL_FLOAT, 0, 0, m_pVertexVertices);
            // 设置纹理矩阵值以及格式
            glVertexAttribPointer(4, 2, GL_FLOAT, 0, 0, s_textureVertices);
            // 启用顶点属性
            glEnableVertexAttribArray(3);
            // 启用纹理属性
            glEnableVertexAttribArray(4);
        }
    }
    
    void MCVideoWidget::resizeGL(int width, int height)
    {
        if (height == 0)
        {
            height = 1;
        }
    
        // 设置视口
        glViewport(0, 0, width, height);
    
        updateGLVertex(width, height);
    }
    
    void MCVideoWidget::paintGL()
    {
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
        if (nullptr != m_pVideoFrame.get())
        {
            uint8_t* pYUVData = m_pVideoFrame.get()->getYUVData();
    
            if (nullptr != pYUVData)
            {
                m_pShaderProgram->bind();
    
                // 加载 Y 数据纹理
                glActiveTexture(GL_TEXTURE0);
                glBindTexture(GL_TEXTURE_2D, m_textureIdY);
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_videoWidth, m_videoHeight, 0, GL_RED, GL_UNSIGNED_BYTE, pYUVData);
    
                // 加载 U 数据纹理
                glActiveTexture(GL_TEXTURE1);
                glBindTexture(GL_TEXTURE_2D, m_textureIdU);
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_videoWidth / 2, m_videoHeight / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 
                             (char*)pYUVData + m_videoWidth * m_videoHeight);
                
                // 加载 V 数据纹理
                glActiveTexture(GL_TEXTURE2);
                glBindTexture(GL_TEXTURE_2D, m_textureIdV);
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_videoWidth / 2, m_videoHeight / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 
                             (char*)pYUVData + m_videoWidth * m_videoHeight * 5 / 4);
    
                // 指定 Y 纹理要使用新值
                glUniform1i(m_textureUniformY, 0);
                // 指定 U 纹理要使用新值
                glUniform1i(m_textureUniformU, 1);
                // 指定 V 纹理要使用新值
                glUniform1i(m_textureUniformV, 2);
    
                // 使用顶点数组方式绘制图形
                glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
                m_pShaderProgram->release();
            }
        }
    }
    
  • MCVideoFrame.h

    #pragma once
    
    #include "MCWidget_Global.h"
    #include <QObject>
    
    MCWIDGET_BEGIN_NAMESPACE
    
    class MCWIDGET_EXPORT MCVideoFrame : public QObject
    {
        Q_OBJECT
    
    public:
        MCVideoFrame(QObject* parent = nullptr);
        ~MCVideoFrame();
    
        void setYUVData(const uint8_t* data, int width, int height);
    
        uint8_t* getYUVData();
    
        int getWidth();
        int getHeight();
    
    private:
        void freeData();
    
    private:
        uint8_t*        m_YUVData       { nullptr };
    
        int             m_width         { 0 };
        int             m_height        { 0 };
    };
    
    MCWIDGET_END_NAMESPACE
    
    
  • MCVideoFrame.cpp

    #include "MCVideoFrame.h"
    USE_MCWIDGET_NAMESPACE
    
    MCVideoFrame::MCVideoFrame(QObject* parent)
        : QObject(parent)
    {}
    
    MCVideoFrame::~MCVideoFrame()
    {
        freeData();
    }
    
    void MCVideoFrame::setYUVData(const uint8_t* data, int width, int height)
    {
        freeData();
    
        m_width = width;
        m_height = height;
    
        int dataSize = width * height * 3 / 2;
        m_YUVData = (uint8_t*)malloc(dataSize);
        memcpy(m_YUVData, data, dataSize);
    }
    
    uint8_t* MCVideoFrame::getYUVData()
    {
        return m_YUVData;
    }
    
    int MCVideoFrame::getWidth()
    {
        return m_width;
    }
    
    int MCVideoFrame::getHeight()
    {
        return m_height;
    }
    
    void MCVideoFrame::freeData()
    {
        if (nullptr != m_YUVData)
        {
            free(m_YUVData);
            m_YUVData = nullptr;
        }
    }
    
    
主工程项目 MCVideoPlayTest
  • main.cpp

    #include "MCVideoPlayTest.h"
    #include <QtWidgets/QApplication>
    
    int main(int argc, char *argv[])
    {
        QApplication a(argc, argv);
        MCVideoPlayTest w;
        w.show();
        return a.exec();
    }
    
    
  • MCVideoPlayTest.h

    #pragma once
    
    #include <QtWidgets/QMainWindow>
    #include <QTimer>
    
    #include "MCWidget/MCVideoPlayer.h"
    #include "MCWidget/MCVideoWidget.h"
    #include "MCWidget/MCVideoFrame.h"
    
    QT_BEGIN_NAMESPACE
    namespace Ui { class MCVideoPlayTest; };
    QT_END_NAMESPACE
    
    class MCVideoPlayTest : public QMainWindow
    {
        Q_OBJECT
    
    public:
        MCVideoPlayTest(QWidget* parent = nullptr);
        ~MCVideoPlayTest();
    
    private slots:
        void timerTimeOut();
        void videoPlayOrStop();
        void videoSeek(int seekTime);
        void videoDurationChanged(int msecond);
        void videoFrameChanged(std::shared_ptr<MCWidget::MCVideoFrame> frame);
        void videoStateChanged(MCWidget::MCVideoPlayer::VideoState state);
    
    private:
        QString getTimeString(int msecond);
    
    private:
        Ui::MCVideoPlayTest*        ui                  { nullptr };
        MCWidget::MCVideoPlayer*    m_pVideoPlayer      { nullptr };
        MCWidget::MCVideoWidget*    m_pVideoWidget      { nullptr };
        QTimer*                     m_pTimer            { nullptr };
    };
    
    
  • MCVideoPlayTest.cpp

    #include "MCVideoPlayTest.h"
    #include "ui_MCVideoPlayTest.h"
    
    MCVideoPlayTest::MCVideoPlayTest(QWidget* parent)
        : QMainWindow(parent)
        , ui(new Ui::MCVideoPlayTest())
    {
        ui->setupUi(this);
    
        m_pVideoWidget = new MCWidget::MCVideoWidget(ui->wgtVideo);
        ui->hLayoutVideo->addWidget(m_pVideoWidget);
    
        ui->sliderTime->setValueByCliucked(true);
    
        m_pVideoPlayer = new MCWidget::MCVideoPlayer(this);
        m_pVideoPlayer->setVideoFilePath(QString::fromLocal8Bit(""));
    
        m_pTimer = new QTimer(this);
        connect(m_pTimer, &QTimer::timeout, this, &MCVideoPlayTest::timerTimeOut);
        m_pTimer->setInterval(500);
        
        // 视频播放、停止
        connect(ui->btnPlayOrStop, &QPushButton::clicked, this, &MCVideoPlayTest::videoPlayOrStop);
        // 视频跳转
        connect(ui->sliderTime, &QSlider::valueChanged, this, &MCVideoPlayTest::videoSeek);
    
        // 视频总时长更新
        connect(m_pVideoPlayer, &MCWidget::MCVideoPlayer::sigDurationChanged, 
                this, &MCVideoPlayTest::videoDurationChanged);
        // 视频数据帧更新
        connect(m_pVideoPlayer, &MCWidget::MCVideoPlayer::sigFrameChanged, 
                this, &MCVideoPlayTest::videoFrameChanged);
        // 视频播放状态更新
        connect(m_pVideoPlayer, &MCWidget::MCVideoPlayer::sigStateChanged, 
                this, &MCVideoPlayTest::videoStateChanged);
    }
    
    MCVideoPlayTest::~MCVideoPlayTest()
    {
        delete ui;
    }
    
    void MCVideoPlayTest::timerTimeOut()
    {
        int currentTime = m_pVideoPlayer->getCurrentTime();
        ui->sliderTime->blockSignals(true);
        ui->sliderTime->setValue(currentTime);
        ui->sliderTime->blockSignals(false);
        ui->labelCurrentTime->setText(getTimeString(currentTime));
    }
    
    void MCVideoPlayTest::videoPlayOrStop()
    {
        MCWidget::MCVideoPlayer::VideoState state = m_pVideoPlayer->getState();
        if (MCWidget::MCVideoPlayer::VideoState::StoppedState == state)
        {
            int min = ui->sliderTime->minimum();
            int max = ui->sliderTime->maximum();
            int value = ui->sliderTime->value();
            if (min != value && max != value)
            {
                m_pVideoPlayer->seekVideo(value);
            }
            m_pVideoPlayer->playVideo();
        }
        else
        {
            m_pVideoPlayer->stopVideo();
        }
    }
    
    void MCVideoPlayTest::videoSeek(int seekTime)
    {
        if (MCWidget::MCVideoPlayer::VideoState::StoppedState == m_pVideoPlayer->getState())
        {
            m_pVideoPlayer->playOneFrame(seekTime);
        }
        else
        {
            m_pVideoPlayer->seekVideo(seekTime);
        }
        ui->labelCurrentTime->setText(getTimeString(seekTime));
    }
    
    void MCVideoPlayTest::videoDurationChanged(int msecond)
    {
        ui->sliderTime->setRange(0, msecond);
        ui->labelTotalTime->setText(getTimeString(msecond));
    }
    
    void MCVideoPlayTest::videoFrameChanged(std::shared_ptr<MCWidget::MCVideoFrame> frame)
    {
        m_pVideoWidget->updateFrame(frame);
    }
    
    void MCVideoPlayTest::videoStateChanged(MCWidget::MCVideoPlayer::VideoState state)
    {
        if (MCWidget::MCVideoPlayer::VideoState::PlayingState == state)
        {
            ui->btnPlayOrStop->setText("stop");
            m_pTimer->start();
        }
        else
        {
            ui->btnPlayOrStop->setText("play");
            m_pTimer->stop();
            timerTimeOut();
        }
    }
    
    QString MCVideoPlayTest::getTimeString(int msecond)
    {
        int hours = msecond / 3600000;
        int mseconds = msecond % 3600000;
        int minutes = mseconds / 60000;
        mseconds = mseconds % 60000;
        int seconds = mseconds / 1000;
        return QString("%1:%2:%3").arg(hours, 2, 10, QChar('0')).arg(minutes, 2, 10, QChar('0')).arg(seconds, 2, 10, QChar('0'));
    }
    
    
  • MCVideoPlayTest.ui

    <?xml version="1.0" encoding="UTF-8"?>
    <ui version="4.0">
     <class>MCVideoPlayTest</class>
     <widget class="QMainWindow" name="MCVideoPlayTest">
      <property name="geometry">
       <rect>
        <x>0</x>
        <y>0</y>
        <width>1422</width>
        <height>600</height>
       </rect>
      </property>
      <property name="windowTitle">
       <string>MCVideoPlayer</string>
      </property>
      <widget class="QWidget" name="centralWidget">
       <layout class="QVBoxLayout" name="verticalLayout" stretch="1,0">
        <property name="spacing">
         <number>0</number>
        </property>
        <property name="leftMargin">
         <number>0</number>
        </property>
        <property name="topMargin">
         <number>0</number>
        </property>
        <property name="rightMargin">
         <number>0</number>
        </property>
        <property name="bottomMargin">
         <number>0</number>
        </property>
        <item>
         <widget class="QWidget" name="wgtVideo" native="true">
          <layout class="QHBoxLayout" name="hLayoutVideo">
           <property name="spacing">
            <number>0</number>
           </property>
           <property name="leftMargin">
            <number>0</number>
           </property>
           <property name="rightMargin">
            <number>0</number>
           </property>
           <property name="bottomMargin">
            <number>0</number>
           </property>
          </layout>
         </widget>
        </item>
        <item>
         <layout class="QHBoxLayout" name="horizontalLayout" stretch="0,0,0,0">
          <item>
           <widget class="QPushButton" name="btnPlayOrStop">
            <property name="text">
             <string>play</string>
            </property>
           </widget>
          </item>
          <item>
           <widget class="QLabel" name="labelCurrentTime">
            <property name="text">
             <string>00:00:00</string>
            </property>
           </widget>
          </item>
          <item>
           <widget class="QLabel" name="labelTotalTime">
            <property name="text">
             <string>00:00:00</string>
            </property>
           </widget>
          </item>
          <item>
           <widget class="MCWidget::MCSlider" name="sliderTime">
            <property name="orientation">
             <enum>Qt::Horizontal</enum>
            </property>
           </widget>
          </item>
         </layout>
        </item>
       </layout>
      </widget>
     </widget>
     <layoutdefault spacing="6" margin="11"/>
     <customwidgets>
      <customwidget>
       <class>MCWidget::MCSlider</class>
       <extends>QSlider</extends>
       <header>MCWidget/MCSlider.h</header>
      </customwidget>
     </customwidgets>
     <resources/>
     <connections/>
    </ui>
    
    
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值