上一篇讲解了如何用ffmpeg解码视频。
从现在开始,我们用QT+ffmpeg做一个视频播放器。本篇先实现把视频的图片(帧)显示在Qt的界面上。
1.先用Qt创建一个GUI工程。
配置工程环境,配置.pro文件相关依赖
#将输出文件直接放到源码目录上一级的win的bin目录下,将dll都放在了次目录中,用以解决运行后找不到dll的问#DESTDIR=$$PWD/bin/
contains(QT_ARCH, i386) {
message("32-bit")
DESTDIR = $${PWD}/../win/bin32
message($$DESTDIR)
} else {
message("64-bit")
DESTDIR = $${PWD}/../win/bin64
}
win32{
contains(QT_ARCH, i386) {
message("32-bit")
INCLUDEPATH += $$PWD/../win/bin32/include
$$PWD/src
LIBS += -L$$PWD/../win/bin32/lib -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc -lswresample -lswscale
} else {
message("64-bit")
INCLUDEPATH += $$PWD/../win/bin64/include
$$PWD/src
LIBS += -L$$PWD/../win/bin64/lib -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc -lswresample -lswscale
}
}
视频帧的解析很耗时时,为了不影响界面的卡顿,这里创建一个线程类专门处理解码的任务。
#include <QThread>
class CVideoPlayer: public QThread
{
Q_OBJECT
public:
CVideoPlayer();
protected:
void run();
};
run函数就是做读取视频和解码视频的任务,读取和解码还是和前面说的方法一样,这里需要改动一点是,图片格式的转换,由于Qt的控件不能显示ppm文件,因此转成RGB32。
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, \
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, \
AV_PIX_FMT_RGB32, SWS_BICUBIC, nullptr, nullptr, nullptr);
numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
outBuffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, outBuffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
int y_size = pCodecCtx->width * pCodecCtx->height;
将RGB32数据存入QImage对象
QImage tmpImg((uchar *)out_buffer,pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB32);
获取QImage对象后,通过信号把QImage对象传给主界面,主界面使用paintEvent()函数,把一帧帧图像显示在界面上。
工程源码如下:
//CVideoPlayer.h
#ifndef CVIDEOPLAYER_H
#define CVIDEOPLAYER_H
#include <QThread>
#include <QImage>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
}
class CVideoPlayer: public QThread
{
Q_OBJECT
public:
CVideoPlayer();
void videoDecode();
protected:
void run();
signals:
void signalGetOneFrame(QImage image);
void signalDecodeError(int error);
};
#endif // CVIDEOPLAYER_H
//CVideoPlayer.cpp
#include "CVideoPlayer.h"
#include <QDebug>
CVideoPlayer::CVideoPlayer()
{
}
void CVideoPlayer::run()
{
videoDecode();
}
void CVideoPlayer::videoDecode()
{
char *filePath = "F:\\github\\QtPlayLearn\\win\\mp4\\lasa.mp4";
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
AVPacket *packet;
uint8_t *outBuffer;
static struct SwsContext *img_convert_ctx;
unsigned int i;
int videoStream, numBytes;
int ret, got_picture;
av_register_all();//初始化ffmpeg 调用了这个才能正常适用编码器和解码器
//Allocate an AVFormatContext.
pFormatCtx = avformat_alloc_context();
if(0 != avformat_open_input(&pFormatCtx, filePath, nullptr, nullptr))
{
emit signalDecodeError(-1);
return;
}
if(avformat_find_stream_info(pFormatCtx, nullptr))
{
emit signalDecodeError(-2);
return;
}
videoStream = -1;
//循环查找视频中包含的流信息,直到找到视频类型的流
//便将其记录下来 保存到videoStream变量中
//这里我们现在只处理视频流 音频流先不管他
for(i = 0; i < pFormatCtx->nb_streams; ++i)
{
qDebug() << "pFormatCtx->streams[" << i << "]->codec->codec_type = " << pFormatCtx->streams[i]->codec->codec_type << endl;
//0:视频类型 1:音频类型
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
videoStream = i;
}
qDebug() << "videoStream===========" << videoStream << " pFormatCtx->nb_streams==" << pFormatCtx->nb_streams << endl;
//如果videoStream为-1 说明没有找到视频流
if(videoStream == -1)
{
emit signalDecodeError(-3);
return;
}
//查找解码器
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
qDebug() << "pCodecCtx->codec_id===========" << pCodecCtx->codec_id << endl;
//测试时这个值为27,查到枚举值对应的是AV_CODEC_ID_H264 ,即是H264压缩格式的文件。
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(nullptr == pCodec)
{
emit signalDecodeError(-4);
return;
}
//打开解码器
if(avcodec_open2(pCodecCtx, pCodec, nullptr) < 0)
{
emit signalDecodeError(-5);
return;
}
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, \
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, \
AV_PIX_FMT_RGB32, SWS_BICUBIC, nullptr, nullptr, nullptr);
numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
outBuffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, outBuffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *)malloc(sizeof(AVPacket));//分配一个packet
av_new_packet(packet, y_size);//分配packet的数据
av_dump_format(pFormatCtx, 0, filePath, 0);//输出视频信息
int index = 0;
while (1)
{
if(av_read_frame(pFormatCtx, packet) < 0)
{
qDebug() << "index===============" << index;
break;//这里认为视频读取完了
}
if(packet->stream_index == videoStream)
{
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0)
{
emit signalDecodeError(-6);
return;
}
if(got_picture)
{
sws_scale(img_convert_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
++index;
//把这个RGB数据 用QImage加载
QImage tempImage((uchar*)outBuffer, pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB32);
QImage image = tempImage.copy();//把图像复制一份 传递给界面显示
qDebug() << "image.width==" << image.width() << "image.height==" << image.height();
emit signalGetOneFrame(image);
//if (index > 10)
// return; //这里我们就保存10张图片
}
}
av_free_packet(packet);
}
av_free(outBuffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
//mainwindow.h
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "CVideoPlayer.h"
#include <QImage>
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
void initView();
void initData();
public slots:
void slotDecodeError(int error);
void slotGetOneFrame(QImage image);
protected:
void paintEvent(QPaintEvent *event);
private:
Ui::MainWindow *ui;
CVideoPlayer *m_pVideoPlayer;
QImage m_Image;
};
#endif // MAINWINDOW_H
//mainwindow.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QDebug>
#include <QPainter>
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
initView();
initData();
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::initView()
{
}
void MainWindow::initData()
{
m_pVideoPlayer = new CVideoPlayer();
connect(m_pVideoPlayer, SIGNAL(signalDecodeError(int)), this, SLOT(slotDecodeError(int)));
connect(m_pVideoPlayer, SIGNAL(signalGetOneFrame(QImage)), this, SLOT(slotGetOneFrame(QImage)));
m_pVideoPlayer->start();
}
void MainWindow::slotDecodeError(int error)
{
qDebug() << "slotDecodeError======error====" << error;
}
void MainWindow::slotGetOneFrame(QImage image)
{
m_Image = image;
update();//调用update将执行 paintEvent函数
}
void MainWindow::paintEvent(QPaintEvent *)
{
QPainter painter(this);
painter.setBrush(Qt::black);
painter.drawRect(0, 0, this->width(), this->height());//先画成黑色
if(m_Image.size().width() <= 0)
return;
//将图像按比例缩放成和窗口一样大小
QImage img = m_Image.scaled(this->size(), Qt::KeepAspectRatio);
int x = this->width() - img.width();
int y = this->height() - img.height();
x /= 2;
y /= 2;
painter.drawImage(QPoint(x, y), img);
}
//main.cpp
#include "mainwindow.h"
#include <QApplication>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
MainWindow w;
w.show();
return a.exec();
}
运行结果:
参考:
https://blog.csdn.net/qq214517703/article/details/52619145
https://blog.csdn.net/yao_hou/article/details/80559161