使用多线程的方式
VideoPlayer类继承QTread类
重写startplay():在函数中调用start()启动线程
把解码视频的过程放在run()函数里,解码每得到一帧视频就发送信号,在MainWindow类的构造函数中连接信号与槽
connect(mplayer,SIGNAL(sig_Get_Frame(QImage)),this,SLOT(slot_Get_Frame(QImage)));
在槽函数中调用update(),自动调用paintEvent绘制图像
转换视频帧格式为RGB32的过程:
pFrame=av_frame_alloc();//给解码前的帧分配一个AVFrame结构体的大小
pFrameRGB = av_frame_alloc();//给解码后的RGB帧分配一个AVFrame结构体的大小
img_convert_ctx=sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,
pCodecCtx->width,pCodecCtx->height,AV_PIX_FMT_RGB32,SWS_BICUBIC, NULL, NULL, NULL);
(int)numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGB32,pCodecCtx->width,pCodecCtx->height,1);//通过指定像素格式、图像宽、图像高来计算所需的内存大小
(uint8_t)out_buffer=(uint8_t*)av_malloc(sizeof(uint8_t)*numBytes);
av_image_fill_arrays(pFrameRGB->data,pFrameRGB->linesize,out_buffer,AV_PIX_FMT_RGB32,
pCodecCtx->width, pCodecCtx->height,1);
//函数自身不具备内存申请的功能,此函数类似于格式化已经申请的内存
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);//给packet的data分配y_size大小的空间,保存压缩后的一帧视频
将得到的pFrame转换为pFrameRGB:
sws_scale(img_convert_ctx,pFrame->data,pFrame->linesize,
0, pCodecCtx->height,
pFrameRGB->data,pFrameRGB->linesize);
使用QImage加载RGB数据:
QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
outbuffer就是av_image_fill_arrays中设置的pFrameRGB内存的首地址
mainwindow:
mainwindow.h:
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include <QImage>
#include "videoplayer.h"
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
protected:
void paintEvent(QPaintEvent *event);
private slots:
void slot_Get_Frame(QImage img);
private:
Ui::MainWindow *ui;
VideoPlayer *mplayer;
QImage mImage;
};
#endif // MAINWINDOW_H
.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QPainter>
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
mplayer=new VideoPlayer;
connect(mplayer,SIGNAL(sig_Get_Frame(QImage)),this,SLOT(slot_Get_Frame(QImage)));
mplayer->setFileName("E:\\in.mp4");
mplayer->startplay();
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::paintEvent(QPaintEvent *event)
{
QPainter painter(this);
painter.setBrush(Qt::black);
painter.drawRect(0,0,this->width(),this->height());
if (mImage.size().width() <= 0) return;
QImage img=mImage.scaled(this->size(),Qt::KeepAspectRatio);
int x = this->width() - img.width();
int y = this->height() - img.height();
x /= 2;
y /= 2;
painter.drawImage(QPoint(x,y),img);
}
void MainWindow::slot_Get_Frame(QImage img)
{
mImage=img;
update();
}
videoplayer:
.h
#ifndef VIDEOPLAYER_H
#define VIDEOPLAYER_H
#include <QThread>
#include <QImage>
#endif // VIDEOPLAYER_H
class VideoPlayer : public QThread
{
Q_OBJECT
public:
explicit VideoPlayer();
~VideoPlayer();
void setFileName(QString path){mFileName=path;}
void startplay();
signals:
void sig_Get_Frame(QImage img);
protected:
void run();
private:
QString mFileName;
};
.cpp
#include "videoplayer.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}
#include <stdio.h>
VideoPlayer::VideoPlayer()
{
}
VideoPlayer::~VideoPlayer()
{
}
void VideoPlayer::startplay()
{
this->start();
}
void VideoPlayer::run()
{
char file_path[512] = {0};
strcpy(file_path, mFileName.toUtf8().data());
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
AVPacket *packet;
uint8_t *out_buffer;
static struct SwsContext *img_convert_ctx;
int videoStream, i, numBytes;
int ret, got_picture;
av_register_all(); //初始化FFMPEG 调用了这个才能正常适用编码器和解码器
//Allocate an AVFormatContext.
pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0) {
printf("can't open the file. \n");
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
printf("Could't find stream infomation.\n");
return;
}
videoStream = -1;
///循环查找视频中包含的流信息,直到找到视频类型的流
///便将其记录下来 保存到videoStream变量中
///这里我们现在只处理视频流 音频流先不管他
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
}
}
///如果videoStream为-1 说明没有找到视频流
if (videoStream == -1) {
printf("Didn't find a video stream.\n");
return;
}
///查找解码器
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
printf("Codec not found.\n");
return;
}
///打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return;
}
pFrame=av_frame_alloc();
pFrameRGB = av_frame_alloc();
img_convert_ctx=sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt,
pCodecCtx->width,pCodecCtx->height,AV_PIX_FMT_RGB32,SWS_BICUBIC, NULL, NULL, NULL);
numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGB32,pCodecCtx->width,pCodecCtx->height,1);
out_buffer=(uint8_t*)av_malloc(sizeof(uint8_t)*numBytes);
av_image_fill_arrays(pFrameRGB->data,pFrameRGB->linesize,out_buffer,AV_PIX_FMT_RGB32,
pCodecCtx->width, pCodecCtx->height,1);
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);
av_dump_format(pFormatCtx, 0, file_path, 0); //输出视频信息
while(1){
if(av_read_frame(pFormatCtx, packet) < 0){
break;
}
if(packet->stream_index==videoStream)
{
ret=avcodec_send_packet(pCodecCtx,packet);
if (ret < 0) {
printf("decode error.\n");
return;
}
while(avcodec_receive_frame(pCodecCtx,pFrame)>=0)
{
sws_scale(img_convert_ctx,pFrame->data,pFrame->linesize,
0, pCodecCtx->height,
pFrameRGB->data,pFrameRGB->linesize);
//把这个RGB数据 用QImage加载
QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
QImage img=tmpImg.copy();
emit sig_Get_Frame(img);
}
}
av_free_packet(packet);
}
av_free(out_buffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}