QT用来播放显示,FFMPeg循环从H264、H265缓存队列中取数据,解码成图片后通过信号曹函数发给QT界面去显示,这样就实现了视频播放功能。
解码方法:videoplayer.cpp
#include "videoplayer.h"
#include <stdio.h>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
}
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib ,"swscale.lib")
std::mutex VideoPlayer::mutexRtpData[3];
bool VideoPlayer::IsDecodingImage;
bool VideoPlayer::IsRuning;
queue<ctlPack*> VideoPlayer::receiveQueue[3];
bool VideoPlayer::isReadRtpLeft=false;
bool VideoPlayer::isReadRtpCenter = false;
bool VideoPlayer::isReadRtpRight = false;
VideoPlayer::VideoPlayer(ChannelRtpEnum rtpType)
{
IsRuning = true;
currentRtp = rtpType;
isReadRtpLeft = false;
isReadRtpCenter = false;
isReadRtpRight = false;
}
VideoPlayer::~VideoPlayer()
{
IsRuning = false;
}
void VideoPlayer::startPlay()
{
IsRuning = true;
this->start();
}
void VideoPlayer::StopPlay()
{
IsRuning = false;
}
void VideoPlayer::ClearQueue()
{
}
int VideoPlayer::Read_bufferCenter(void *opaque, uint8_t *buf, int buf_size)
{
while (receiveQueue[1].empty() && IsRuning)
{
//if (isReadRtpCenter)
msleep(15);
}
if (receiveQueue[1].empty())
{
return -1;
}
mutexRtpData[1].lock();
CtlPack *one = receiveQueue[1].front();
receiveQueue[1].pop();
mutexRtpData[1].unlock();
memcpy(buf, one->buffer + 3, one->len - 3);
buf_size = one->len - 3;
delete one->buffer;
delete one;
return buf_size;
}
void VideoPlayer::PusSendData(unsigned __int8 camerID, ctlPack *onePack)
{
mutexRtpData[camerID].lock();
receiveQueue[camerID].push(onePack);
mutexRtpData[camerID].unlock();
}
void VideoPlayer::run()
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
AVPacket *packet;
uint8_t *out_buffer;
//VideoPlayer video;
static struct SwsContext *img_convert_ctx;
int videoStream, i, numBytes;
int ret, got_picture;
IsDecodingImage = false;
av_register_all(); //初始化FFMPEG 调用了这个才能正常适用编码器和解码器
pFormatCtx = avformat_alloc_context();
///
unsigned char * iobuffer = (unsigned char *)av_malloc(32768);
void* MemInputBuffer = av_malloc(32768);
isReadRtpCenter = false;
pFormatCtx->pb = avio_alloc_context((unsigned char *)MemInputBuffer, 32768, 0, NULL, Read_bufferCenter, NULL, NULL);
/
if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0)
{
printf("can't open the file. \n");
return;
}
//while (avformat_find_stream_info(pFormatCtx, NULL) != 0);
if (avformat_find_stream_info(pFormatCtx, NULL) != 0)
{
printf("Could't find stream infomation.\n");
return;
}
videoStream = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
}
}
if (videoStream == -1) {
printf("Didn't find a video stream.\n");
return;
}
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
IsDecodingImage = true;
if (pCodec == NULL) {
printf("Codec not found.\n");
return;
}
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
printf("Could not open codec.\n");
return;
}
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
out_buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);
int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *)malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);
isReadRtpCenter = true;
while (IsRuning)
{
try
{
if (av_read_frame(pFormatCtx, packet) < 0)
continue;
if (packet->stream_index != videoStream)
continue;
// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,packet);
avcodec_send_packet(pCodecCtx, packet);
if (avcodec_receive_frame(pCodecCtx, pFrame) < 0)
continue;
/* if (ret < 0) {
printf("decode error.\n");
return;
}*/
if (got_picture)
{
sws_scale(img_convert_ctx,(uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,pFrameRGB->linesize);
QImage tmpImg((uchar *)out_buffer, pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB32);
QImage image = tmpImg.copy(); //把图像复制一份后传递给界面显示
emit sig_GetOneFrame(image); //发送信号
}
av_free_packet(packet);
}
catch (exception ex)
{
}
}
av_free(out_buffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
界面播放窗口:
#include "Mainwindow.h"
#include "ui_mainwindow.h"
#include "qdesktopwidget.h"
#include <QPainter>
#include <QSplitter>
#include <QTextCodec>
#include <QTextEdit>
#include <functional>
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::mainWindow)
{
ui->setupUi(this);
this->setWindowTitle(u8"视频播放系统");
this->setWindowState(Qt::WindowMaximized);
centerDock = new QDockWidget(tr(u8"视频窗口"), this);
centerDock->setFeatures(QDockWidget::DockWidgetClosable | QDockWidget::DockWidgetFloatable);
centerLabel = new QLabel();
centerLabel->setText(u8"视频");
centerDock->setWidget(centerLabel);
addDockWidget(Qt::TopDockWidgetArea, centerDock);
centerLabel->setFrameShape(QFrame::Box);
centerLabel->setStyleSheet("border-width: 1px;border-style: solid;border-color: rgb(255, 170, 0);");
centerLabel->setAlignment(Qt::AlignCenter);
connect(centerDock, SIGNAL(topLevelChanged(bool)), this, SLOT(CenterTopLevelChanged(bool)));
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::StartPlay()
{
centerPlayer = new VideoPlayer(ChannelRtpEnum::Center);
connect(centerPlayer, SIGNAL(sig_GetOneFrame(QImage)), this,
SLOT(slotGetOneFrameCenter(QImage)));
centerPlayer->startPlay();
}
void MainWindow::slotGetOneFrameCenter(QImage img)
{
centerLabel->setPixmap(QPixmap::fromImage(img));
centerLabel->show();
}