下载项目地址:https://download.csdn.net/download/signal___/21559335
videoplayer.h
#include <QThread>
#include <QImage>extern "C"
{
#include "libavutil/imgutils.h"
}
class VlcInstance;
class VlcMedia;
class VlcMediaPlayer;class VideoPlayer :public QThread
{
Q_OBJECT
public:
explicit VideoPlayer();~VideoPlayer();
const static uint32_t dwLastFrameRealtime = 0;
void startPlay();//线程开始
//int interrupt_cb(void *ctx);signals:
void sig_a(int);
void sig_b(int);
void sig_GetOneFrame(QImage); //每获取到一帧图像 就发送此信号protected:
void run();private:
QString mFileName;
VlcInstance *_instance;
VlcMedia *_media;
VlcMediaPlayer *_player;
};
videoplayer.cpp
#include "videoplayer.h"
#include <stdio.h>
#include<iostream>
#include<QDebug>
#include <QDateTime>
#include <stdlib.h>extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
#include "libavutil/time.h"
}
using namespace std;
VideoPlayer::VideoPlayer()
{}
VideoPlayer::~VideoPlayer()
{}
/*线程*/
void VideoPlayer::startPlay()
{
this->start();}
/*线程*/
void VideoPlayer::run()
{AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
AVPacket *packet = NULL;
uint8_t *out_buffer = NULL;/*处理图片像素数据 图片像素格式转换 图片拉伸等 */
static struct SwsContext *img_convert_ctx;int videoStream, i, numBytes;
int ret, got_picture;
int b = 0;
int a = 1;
int num = 0;avformat_network_init(); //初始化FFmpeg网络模块
av_register_all(); //初始化FFMPEG 调用了这个才能正常适用编码器和解码器(弃用函数)//Allocate an AVFormatContext.
pFormatCtx = avformat_alloc_context();//初始化内存//AVDictionary是FFmpeg的键值对存储工具,FFmpeg经常使用AVDictionary设置/读取内部参数
AVDictionary *avdic=NULL;
char option_key[]="rtsp_transport";
char m_bTcp;
//char option_value[]="udp";
av_dict_set(&avdic,option_key,m_bTcp ? "tcp" : "udp",0);
//av_dict_set(&avdic,option_key,option_value,0);
//char option_key2[]="max_delay";
char option_key2[]="stimeout";
char option_value2[]="3000000";
av_dict_set(&avdic, "buffer_size", "425984", 0); //画质优化
av_dict_set(&avdic,option_key2,option_value2,0);
/**
* ultrafast,superfast, veryfast, faster, fast, medium
* slow, slower, veryslow, placebo.
*/
//av_dict_set(&avdic, "preset", "fast", 0); // av_opt_set(pCodecCtx->priv_data,"preset","fast",0);
//av_dict_set(&avdic, "tune", "zerolatency", 0);char url[]="rtsp://admin:123456@192.168.1.168:554/type=0&id=1";
if (avformat_open_input(&pFormatCtx, url, NULL, &avdic) != 0) //打开多媒体并获取信息
{
printf("can't open the file. \n");
a = 2;
emit sig_a(a);
sleep(8);
qDebug(" 111连接异常结束\n");
thread()->terminate();
thread()->wait();
this->terminate();
this->wait();return;
}if(avdic != NULL)
{
av_dict_free(&avdic);
}
//获取视频流信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
printf("Could't find stream infomation.\n");
return;
}videoStream = -1;
//循环查找视频中包含的流信息,直到找到视频类型的流for (i = 0; i < pFormatCtx; i++)
{
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)//codec弃用函数
{
videoStream = i;
}
}
qDebug("a ======%d\n",a);
if(a == 1)
{
emit sig_a(a);
}
if (videoStream == -1)
{
printf("Didn't find a video stream.\n");
return;
}printf("-----------rtsp流输入信息----------------\n");
av_dump_format(pFormatCtx, 0, url,0);
printf("---------------------------------------\n");
/*************************************************///查找解码器,获取指向视频流的编解码器上下文的指针
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
//通过解封装之后从avstream结构体里获取CodecID(指定格式流)
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);//设置编码器参数(不同参数对视频编质量或大小的影响)
pCodecCtx->bit_rate =0; //目标的码率,即采样的码率;显然,采样码率越大,视频大小越大 比特率
pCodecCtx->time_base.num=1; //下面两行:一秒钟25帧
pCodecCtx->time_base.den=15;
pCodecCtx->frame_number=1; //每包一个视频帧if (pCodec == NULL)
{
printf("Codec not found.\n");
return;
}//打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("Could not open codec.\n");
return;
}
pFrame = av_frame_alloc(); //创建 存储解码器信息*/
pFrameRGB = av_frame_alloc(); //创建 存储解码器信息*///解码后的h264数据转换成RGB32
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
//图像的像素格式 图像的像素宽度 图像的像素高度(计算这个格式的图片,需要多少字节来存储)
numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width,pCodecCtx->height);//(弃用函数)
qDebug() << numBytes; //需要多少字节来存储
//int flag = 0;
//if(flag == 0)
//{
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); //给存放图像数据的缓存区 开辟空间
//瓜分上一步分配到的buffer.
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1);
//根据指定的图像、提供的数组设置数据指针和线条大小参数(弃用函数相当于 av_image_fill_arrays)
avpicture_fill((AVPicture *) pFrameRGB, out_buffer, AV_PIX_FMT_RGB32,pCodecCtx->width, pCodecCtx->height);
/*pFrameRGB和out_buffer都是已经申请到的一段内存, 但是pFrameRGB只是申请了一段结构体内存, 结构体里面的值是空的,
我们需要使用av_image_fill_arrays()函数来使得pFrameRGB和out_buffer关联起来, pFrameRGB里面使用的是out_buffer所指向的内存空间.*/int y_size = pCodecCtx->width * pCodecCtx->height;
packet = (AVPacket *) malloc(sizeof(AVPacket)); //申请一个视频帧包的大小
av_new_packet(packet, y_size); //分配packet的数据,为packet分配一个指定大小的内存//}
//int fa = 1;
while (1)
{if (av_read_frame(pFormatCtx, packet) < 0)//每解码一个视频帧,需要先调用 av_read_frame()获得一帧视频的压缩数据,然后才能对该数据进行解码
{
memset(out_buffer,0,sizeof(out_buffer));
memset(packet,0,sizeof(packet));qDebug("mu == %d\n",++num);
b = 1;
emit sig_b(b);if(num == 4) // read自动有十秒阻塞,每十秒记一次,四次后(40秒)关闭程序
{
a = 2;
emit sig_a(a);
sleep(8);
qDebug(" 连接异常结束\n");
thread()->terminate();
thread()->wait();
this->terminate();
this->wait();
}
continue;
}
// printf("tihs = %d\n",++fa);
if(num != 0)
{
num = 0;
}// avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,packet); //解码一帧视频数据(弃用函数)
if (packet->stream_index == videoStream)
{
//送入解码器
ret = avcodec_send_packet(pCodecCtx, packet); //发送数据到ffmepg,放到解码队列中
got_picture = avcodec_receive_frame(pCodecCtx, pFrame); //将成功的解码队列中取出1个frame
// avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,packet); //解码一帧视频数据(弃用函数)
if (ret < 0)
{
memset(out_buffer,0,sizeof(out_buffer));
memset(packet,0,sizeof(packet));
printf("decode error.\n");
b = 1;
emit sig_b(b);continue;
}
b = 0;
emit sig_b(b);if (!got_picture)
{
//颜色空间转换,最后输出到out_buffer
sws_scale(img_convert_ctx,(uint8_t const * const *) pFrame->data,
pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,
pFrameRGB->linesize); //sws_scale库可以在一个函数里面同时实现:1.图像色彩空间转换;2.分辨率缩放;3.前后图像滤波处理。//把这个RGB数据 用QImage加载
QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
//QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示
emit sig_GetOneFrame(image); //发送信号
}
}
//if(flag == 0)
//{
//释放一个包。
av_free_packet(packet); //释放资源,否则内存会一直上升(弃用函数)
av_packet_unref(packet);
// if((fa % 12) == 0)
memset(out_buffer,0,sizeof(out_buffer));// qDebug("11111\n");
// }}
av_free(out_buffer);
av_free(pFrameRGB);
avcodec_close(pCodecCtx);//关闭给定的avcodeContext并释放与之关联的所有数据
//av_dict_free(&pCodecCtx);if(NULL != pCodecCtx){
avcodec_free_context(&pCodecCtx);
avdic = NULL;
}
if(NULL != pFormatCtx){
avformat_close_input(&pFormatCtx);//关闭打开的输入pFormatCtx。释放它和它的所有内容并设置为空。
pFormatCtx = NULL;
}}
MainWindow.h
#include <QMainWindow>
#include <QImage>
#include <QPaintEvent>
#include <QWidget>
#include <QtDebug>
#include <QTimer>
#include <qtconcurrentrun.h>
#include "videoplayer.h"
#include <QVBoxLayout>
#include <QTextCodec>
#include <QMovie>
#include <QDesktopWidget>
#include <QMessageBox>extern "C"{
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <libavutil/avassert.h>
#include <libavutil/channel_layout.h>
#include <libavutil/opt.h>
#include <libavutil/mathematics.h>
#include <libavutil/timestamp.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/imgutils.h"
}namespace Ui {
class MainWindow;
}class MainWindow : public QMainWindow
{
Q_OBJECTpublic:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();protected:
void paintEvent(QPaintEvent *event);
private:
Ui::MainWindow *ui;
VideoPlayer *mPlayer; //播放线程
QTimer *timer;QMovie *movie1;
QImage mImage; //记录当前的图像
QImage R_mImage;QString url;
bool open_red=false;
private slots:
void slotGetOneFrame(QImage img);
void slotA(int a);
void slotB(int b);
void handleTimeout();
};
mainwindow.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QThread>
#include <QPainter>
#include <QInputDialog>
#include <QDebug>using namespace std;
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
QTextCodec::setCodecForTr(QTextCodec::codecForLocale());
QTextCodec::setCodecForLocale(QTextCodec::codecForName("UTF-8"));
QTextCodec::setCodecForCStrings(QTextCodec::codecForName("UTF-8"));
setWindowFlags(Qt::FramelessWindowHint|Qt::WindowStaysOnTopHint);//全屏无边框模式
ui->setupUi(this);
mPlayer = new VideoPlayer;
mPlayer->startPlay();
ui->progressBar->setOrientation(Qt::Horizontal);
ui->progressBar->setMinimum(0);
ui->progressBar->setMaximum(0);
connect(mPlayer,SIGNAL(sig_a(int)),this,SLOT(slotA(int)));
connect(mPlayer,SIGNAL(sig_b(int)),this,SLOT(slotB(int)));
connect(mPlayer,SIGNAL(sig_GetOneFrame(QImage)),this,SLOT(slotGetOneFrame(QImage)));
}MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::paintEvent(QPaintEvent *event)
{
// ui->progressBar->setVisible(false);
// ui->label->setVisible(false);
// ui->label_2->setVisible(false);
// ui->label_3->setVisible(false);
// ui->label_4->setVisible(false);
QPainter painter(this); //窗口的绘制painter.setBrush(Qt::white);
painter.drawRect(0, 0, this->width(), this->height()); //先画成白色if (mImage.size().width() <= 0) //记录当前的图像
return;
//将图像按比例缩放成和窗口一样大小
QImage img = mImage.scaled(this->size(),Qt::KeepAspectRatio);
int x = this->width() - img.width();
int y = this->height() - img.height();
x /= 2;
y /= 2;//ainter.drawImage(QPoint(x,y),img); //画出图像
//qDebug("The image is drawn\n");
}void MainWindow::slotGetOneFrame(QImage img)
{
mImage = img;
update(); //调用update将执行 paintEvent函数}
void MainWindow::slotA(int a)
{
// printf("a = %d\n",a);
if(a == 1)
{
ui->progressBar->setVisible(false);
ui->label->setVisible(false);
ui->label_2->setVisible(false);
ui->label_3->setVisible(false);
}
if(a == 2)
{ui->progressBar->setVisible(false);
ui->label->setVisible(false);
ui->label_2->setVisible(false);
ui->label_3->setVisible(false);
ui->label_4->setVisible(true);
QPixmap tianqi_pixmap(":/new/prefix1/11.png");
tianqi_pixmap = tianqi_pixmap.scaled(ui->label_4->width(), ui->label_4->height(),
Qt::IgnoreAspectRatio, Qt::SmoothTransformation);//设置图片大小和label的长宽一致
ui->label_4->setPixmap(tianqi_pixmap);
ui->label_4->show();timer = new QTimer(this);
timer->start(4*1000);
connect(timer, SIGNAL(timeout()), this, SLOT(handleTimeout()));}
}
mian.cpp
#include "mainwindow.h"
#include <QApplication>
#include <QWSServer>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
QWSServer::setCursorVisible(false);
MainWindow w;
w.show();return a.exec();
}