#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QQuickPaintedItem>
#include <QPixmap>
#include <QPainter>
#include "decodeav.h"
#include "tsav.h"
class TsVideo : public QQuickPaintedItem
{
Q_OBJECT
public:
explicit TsVideo(QQuickItem *parent = nullptr);
~TsVideo() override;
Q_INVOKABLE void play();
Q_INVOKABLE void pause();
Q_INVOKABLE void stop();
Q_INVOKABLE void seek(const qint64 &msec);
Q_INVOKABLE qint64 getDuration();
private slots:
void slotDraw(QImage image);
protected:
void paint(QPainter *painter) override;
private:
decodeAV m_decodeAV;//ffmpeg
TsAV m_tsAv;//Qmedialplayer
QPixmap mFrame;
};
#endif // MAINWINDOW_H
.cpp
#include "TsVideo.h"
#include "ui_mainwindow.h"
#include <QDebug>
#include <QPainter>
#include <QPixmap>
TsVideo::TsVideo(QQuickItem *parent) :
QQuickPaintedItem(parent)
{
setFlag(QQuickItem::ItemHasContents);
// connect(&m_decodeAV, SIGNAL(signalDraw(QImage)),this,SLOT(slotDraw(QImage)));//注意sendImage和receiveImage的参数
connect(&m_tsAv, &TsAV::presentframe, this, &TsVideo::slotDraw);
// m_decodeAV.start();
}
TsVideo::~TsVideo()
{
m_tsAv.exit();
m_tsAv.wait();
}
void TsVideo::play()
{
m_tsAv.play();
}
void TsVideo::pause()
{
m_decodeAV.puase();
}
void TsVideo::stop()
{
m_decodeAV.puase();
}
void TsVideo::seek(const qint64 &msec)
{
m_decodeAV.seek(msec);
}
qint64 TsVideo::getDuration()
{
return m_decodeAV.getDuration();
}
void TsVideo::slotDraw(QImage image)
{
mFrame = QPixmap::fromImage(image);
update();
}
void TsVideo::paint(QPainter *painter)
{
if(mFrame.isNull())
{
return;
}
painter->setRenderHint(QPainter::Antialiasing);
painter->setRenderHint(QPainter::SmoothPixmapTransform);
painter->drawPixmap(0,0, mFrame.scaled(this->width(), this->height(),Qt::KeepAspectRatio));
}
视频解析
#ifndef DECODEAV_H
#define DECODEAV_H
#include <QThread>
#include <QImage>
#include <QMutex>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include <libavutil/pixdesc.h>
#include <libavutil/hwcontext.h>
#include <libavutil/opt.h>
#include <libavutil/avassert.h>
#include <libavutil/imgutils.h>
}
enum ePalyState{
palying,
stop,
finish
};
class decodeAV : public QThread
{
Q_OBJECT
public:
decodeAV();
~decodeAV();
void run();
void seek(const qint64 &msec);
void puase();
void play();
int64_t getDuration();
signals:
void signalDraw(QImage image);
private:
void init();
void decodec();
void delay(int ms);
private:
QMutex m_mutex;
ePalyState m_videoState = ePalyState::palying;
AVFormatContext *pAVFormatCtx = nullptr;//封装格式上下文结构体
AVCodecContext *pAVCodecCtx = nullptr;//编码器上下文结构体,保存了视频(音频)编解码相关信息
SwsContext *pSwsCtx = nullptr;
uint8_t *pRgbBuffer = nullptr;
AVPacket packet;//存储一帧压缩编码数据
AVFrame *pAVFrame = nullptr;
AVFrame *pAVFrameRGB = nullptr; //存储一帧解码后像素(采样)数据
int iVideoIndex = -1;
qint64 currmsec = 0;
bool isFinish =false;
};
#endif // DECODEAV_H
#include "decodeav.h"
#include <QDebug>
#include<QCoreApplication>
#include<QTime>
#include<QImage>
decodeAV::decodeAV()
{
}
decodeAV::~decodeAV()
{
m_videoState = ePalyState::finish;
//资源回收
av_free(pAVFrame);
av_free(pAVFrameRGB);
sws_freeContext(pSwsCtx);
avcodec_close(pAVCodecCtx);
avformat_close_input(&pAVFormatCtx);
avformat_free_context(pAVFormatCtx);
}
void decodeAV::run()
{
init();
decodec();
}
void decodeAV::init()
{
std::string file = "E:/MV/zouma.mp4";
//描述多媒体文件的构成及其基本信息
if (avformat_open_input(&pAVFormatCtx, file.data(), nullptr, nullptr) != 0)
{
qDebug() <<"open file fail";
avformat_free_context(pAVFormatCtx);
return;
}
//读取一部分视音频数据并且获得一些相关的信息
if (avformat_find_stream_info(pAVFormatCtx, nullptr) < 0)
{
qDebug() <<"vformat find stream fail";
avformat_close_input(&pAVFormatCtx);
return;
}
//根据枚举获取视频流
AVCodec *pAVCodec;//每种视频(音频)编解码器(例如H.264解码器)对应一个该结构体
int ret = av_find_best_stream(pAVFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pAVCodec, 0);
if (ret < 0) {
qDebug()<< "av_find_best_stream faliture";
avformat_close_input(&pAVFormatCtx);
return;
}
iVideoIndex = ret;
// 根据解码器枚举类型找到解码器
pAVCodec = avcodec_find_decoder(pAVFormatCtx->streams[iVideoIndex]->codecpar->codec_id);
if (pAVCodec == nullptr)
{
qDebug()<<"not find decoder";
return;
}
qDebug()<<"avcodec_open2 pAVCodec->name:" << QString::fromStdString(pAVCodec->name);
if(pAVFormatCtx->streams[iVideoIndex]->avg_frame_rate.den != 0)
{
float fps_ = pAVFormatCtx->streams[iVideoIndex]->avg_frame_rate.num / pAVFormatCtx->streams[iVideoIndex]->avg_frame_rate.den;
qDebug() <<"fps:" << fps_;
}
int64_t video_length_sec_ = pAVFormatCtx->duration/AV_TIME_BASE;
qDebug() <<"video_length_sec_:" << video_length_sec_;
//创建编解码器的上下文
pAVCodecCtx = avcodec_alloc_context3(pAVCodec);
if (pAVCodecCtx == nullptr)
{
qDebug() <<"get pAVCodecCtx fail";
avformat_close_input(&pAVFormatCtx);
return;
}
// 将视频数据拷贝到解码器的上下文
ret = avcodec_parameters_to_context(pAVCodecCtx, pAVFormatCtx->streams[iVideoIndex]->codecpar);
if (ret < 0)
{
qDebug() <<"avcodec_parameters_to_context fail";
avformat_close_input(&pAVFormatCtx);
return;
}
// 用解码器初始化解码器的上下文,解码器在此初始化完成
if (avcodec_open2(pAVCodecCtx, pAVCodec, nullptr) < 0)
{
qDebug()<<"avcodec_open2 fail";
return;
}
//为解码帧分配内存
//AVFrame 存放从AVPacket中解码出来的原始数据
pAVFrame = av_frame_alloc();
pAVFrameRGB = av_frame_alloc();
//用于视频图像的转换,将源数据转换为RGB32的目标数据
pSwsCtx = sws_getContext(pAVCodecCtx->width, pAVCodecCtx->height, pAVCodecCtx->pix_fmt,
pAVCodecCtx->width, pAVCodecCtx->height, AV_PIX_FMT_RGB32,
SWS_BICUBIC, nullptr, nullptr, nullptr);
int m_size = av_image_get_buffer_size(AVPixelFormat(AV_PIX_FMT_RGB32), pAVCodecCtx->width, pAVCodecCtx->height, 1);
pRgbBuffer = (uint8_t *)(av_malloc(m_size));
//为已经分配的空间的结构体AVPicture挂上一段用于保存数据的空间
avpicture_fill((AVPicture *)pAVFrameRGB, pRgbBuffer, AV_PIX_FMT_BGR32, pAVCodecCtx->width, pAVCodecCtx->height);
//av_image_fill_arrays
//AVpacket 用来存放解码数据
av_new_packet(&packet, pAVCodecCtx->width * pAVCodecCtx->height);
}
void decodeAV::decodec()
{
//读取码流中视频帧
while (true)
{
if(m_videoState != ePalyState::palying)
{
if(m_videoState == ePalyState::finish)
{
break;
}
continue;
}
// 读取一帧数据
int ret = av_read_frame(pAVFormatCtx, &packet);
currmsec++;
if(ret != 0)
{
if(currmsec == pAVFormatCtx->duration)
{
qDebug()<<"file end";
m_videoState = ePalyState::finish;
break;
}
else
{
continue;
}
}
if (packet.stream_index != iVideoIndex)
{
av_packet_unref(&packet);
continue;
}
int iGotPic = AVERROR(EAGAIN);
// 发送数据到后台解码队列
iGotPic = avcodec_send_packet(pAVCodecCtx, &packet);
if(iGotPic!=0)
{
qDebug()<<"avcodec_send_packet error";
continue;
}
// 从解码器读取帧数据
iGotPic = avcodec_receive_frame(pAVCodecCtx, pAVFrame);
if(iGotPic == 0)
{
//转换像素
sws_scale(pSwsCtx, (uint8_t const * const *)pAVFrame->data, pAVFrame->linesize, 0, pAVCodecCtx->height, pAVFrameRGB->data, pAVFrameRGB->linesize);
//构造QImage
QImage img(pRgbBuffer, pAVCodecCtx->width, pAVCodecCtx->height, QImage::Format_RGB32);
emit signalDraw(img);
delay(1);
}
else
{
qDebug()<<"decode error";
}
av_packet_unref(&packet);
std::this_thread::sleep_for(std::chrono::milliseconds(25));
}
}
//延时, 不能直接sleep延时,UI主线程不能直接被阻塞,不然会有问题的
void decodeAV::delay(int ms)
{
QTime stopTime;
stopTime.start();
while(stopTime.elapsed() < ms)//stopTime.elapsed()返回从start开始到现在的毫秒数
{
QCoreApplication::processEvents();
}
}
void decodeAV::seek(const qint64 &time)
{
if(time < 0)
{
return;
}
m_mutex.lock();
qint64 pos = time;
if (time < pAVFormatCtx->start_time)
pos = pAVFormatCtx->start_time;
if (time > pAVFormatCtx->duration)
pos = pAVFormatCtx->duration;
int target = av_rescale_q(pos, AV_TIME_BASE_Q, pAVFormatCtx->streams[iVideoIndex]->time_base);
av_seek_frame(pAVFormatCtx, iVideoIndex, target, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME); //AV_TIME_BASE
avcodec_flush_buffers(pAVCodecCtx);
m_mutex.unlock();
ePalyState videoState = m_videoState;
play();
if(videoState == ePalyState::stop)
{
puase();
}
}
void decodeAV::puase()
{
m_mutex.lock();
m_videoState = ePalyState::stop;
qDebug()<<"pause video";
m_mutex.unlock();
}
void decodeAV::play()
{
m_mutex.lock();
m_videoState = ePalyState::palying;
qDebug()<<"play video";
m_mutex.unlock();
}
int64_t decodeAV::getDuration()
{
if(pAVFormatCtx)
{
pAVFormatCtx->duration;
}
return -1;
}