#ifndef QFFMPEG_H
#define QFFMPEG_H
//必须加以下内容,否则编译不能通过,为了兼容C和C99标准
#ifndef INT64_C
#define INT64_C
#define UINT64_C
#endif
//引入ffmpeg头文件
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include <libavutil/frame.h>
}
#include <QObject>
#include <QMutex>
#include <QImage>
class QFFmpeg : public QObject
{
Q_OBJECT
public:
explicit QFFmpeg(QObject *parent = 0);
~QFFmpeg();
bool Init();
void Play();
static int CheckInterrupt(void* ctx);
void SetUrl(QString url){this->url=url;}
QString Url()const{return url;}
int VideoWidth()const{return videoWidth;}
int VideoHeight()const{return videoHeight;}
void playclose();
void RecordFile(QString filename);
void StopRecord();
time_t m_tStart;
int time_out,firsttimeplay;
private:
QMutex mutex;
AVPicture pAVPicture;
AVFormatContext *pAVFormatContext;
AVCodecContext *pAVCodecContext;
AVFrame *pAVFrame;
SwsContext * pSwsContext;
AVPacket pAVPacket;
QString url;
int videoWidth;
int videoHeight;
int videoStreamIndex;
bool m_bPlay;
QString FileOutName;
uchar m_bRecordFile;
int last_pts ;
int last_dts ;
int64_t pts, dts;
AVFormatContext *o_fmt_ctx;
//AVOutputFormat *o_fmt_ctx;
AVStream *o_video_stream;
AVStream *i_video_stream;
bool m_bIsBreak;
signals:
void GetImage(const QImage &image);
void SendRtsppImage(uchar *);
public slots:
};
#endif // QFFMPEG_H
#include "qffmpeg.h"
#include <QDateTime>
#include <QDebug>
QFFmpeg::QFFmpeg(QObject *parent) :
QObject(parent)
{
videoStreamIndex=-1;
av_register_all();//注册库中所有可用的文件格式和解码器
avformat_network_init();//初始化网络流格式,使用RTSP网络流时必须先执行
pAVFormatContext = avformat_alloc_context();//申请一个AVFormatContext结构的内存,并进行简单初始化
pAVFrame=av_frame_alloc();
// auto ctx = avformat_alloc_context();
// pAVFormatContext->interrupt_callback.callback = CheckInterrupt;//超时回调
// pAVFormatContext->interrupt_callback.opaque = this;
//
// m_tStart = time(NULL);
o_fmt_ctx = nullptr;
o_video_stream = nullptr;
i_video_stream = nullptr;
last_pts = 0 ;
last_dts = 0;
pts = 0;
dts = 0;
pSwsContext = nullptr;
time_out=firsttimeplay=0;
}
//超时回调函数
int QFFmpeg::CheckInterrupt(void* ctx)
{
auto p = (QFFmpeg*)ctx;
// return (time(NULL) - (p->m_tStart)) >= 3 ? 1 : 0;//3秒超时
// do something
p->time_out++;
if (p->time_out > 1200) {
p->time_out=0;
if (p->firsttimeplay) {
p->firsttimeplay=0;
return 1;//这个就是超时的返回
}
}
return 0;
}
QFFmpeg::~QFFmpeg()
{
if(m_bRecordFile == 1)
{
last_dts += dts;
last_pts += pts;
av_write_trailer(o_fmt_ctx);
avcodec_close(o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]);
avio_close(o_fmt_ctx->pb);
av_free(o_fmt_ctx);
m_bRecordFile = 0;
}
avformat_free_context(pAVFormatContext);
av_frame_free(&pAVFrame);
if(pSwsContext != nullptr)
sws_freeContext(pSwsContext);
}
bool QFFmpeg::Init()
{
//打开视频流
AVDictionary *opts = NULL;
av_dict_set(&opts,"stimeout","5000000",0);
//av_dict_set(&opts, "buffer_size", "1024000", 0);
pAVFormatContext->flags |= AVFMT_FLAG_NONBLOCK; // 加上既可.
av_dict_set(&opts, "rtsp_transport", "udp", 0); //以udp方式打开,如果以tcp方式打开将udp替换为tcp
av_dict_set(&opts, "max_delay", "200000", 0); //设置最大时延
int result=avformat_open_input(&pAVFormatContext, url.toStdString().c_str(),NULL,&opts);
// pAVFormatContext->interrupt_callback.callback = CheckInterrupt;//超时回调
// pAVFormatContext->interrupt_callback.opaque = this;
m_tStart = time(NULL);
if (result<0){
avformat_free_context(pAVFormatContext);
qDebug()<<"打开视频流失败";
return false;
}
//获取视频流信息
result=avformat_find_stream_info(pAVFormatContext,NULL);
if (result<0){
qDebug()<<"获取视频流信息失败";
return false;
}
//获取视频流索引
videoStreamIndex = -1;
int icount = pAVFormatContext->nb_streams;
for (uint i = 0; i < pAVFormatContext->nb_streams; i++) {
if (pAVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
i_video_stream = pAVFormatContext->streams[i];
break;
}
}
if (videoStreamIndex==-1){
qDebug()<<"获取视频流索引失败";
return false;
}
//获取视频流的分辨率大小
pAVCodecContext = pAVFormatContext->streams[videoStreamIndex]->codec;
videoWidth=pAVCodecContext->width;
videoHeight=pAVCodecContext->height;
avpicture_alloc(&pAVPicture,AV_PIX_FMT_RGB24,videoWidth,videoHeight);
AVCodec *pAVCodec;
//获取视频流解码器
pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
pSwsContext = sws_getContext(videoWidth,videoHeight,AV_PIX_FMT_YUV420P,videoWidth,videoHeight,AV_PIX_FMT_RGB24,SWS_BICUBIC,0,0,0);
pAVCodecContext->bit_rate = 0;
pAVCodecContext->time_base.num = 1;
pAVCodecContext->time_base.den = 10;
pAVCodecContext->frame_number = 1;
pAVCodecContext->flags |= AV_CODEC_FLAG_LOW_DELAY;//低延时
pAVCodecContext->err_recognition |= AV_EF_EXPLODE;//错误解码跳过该帧
pAVCodecContext->active_thread_type |= FF_THREAD_FRAME;
//pAVCodecContext->thread_count = 6;
// av_opt_set(pAVCodecContext->priv_data,"preset","ultrafast",0);
// av_opt_set(pAVCodecContext->priv_data,"tune","stillimage,fastcode,zerolatency",0);
// av_opt_set(pAVCodecContext->priv_data,"h264opts","crf=26:vbv-maxrate=728:vbv-bufsize=364:keyint=25",0);
// pAVCodecContext->delay = 0;
//打开对应解码器
result=avcodec_open2(pAVCodecContext,pAVCodec,NULL);
if (result<0){
qDebug()<<"打开解码器失败";
return false;
}
qDebug()<<"初始化视频流成功";
m_bPlay = true;
m_bIsBreak = true;
firsttimeplay = 1;
return true;
}
void QFFmpeg::Play()
{
//一帧一帧读取视频
int frameFinished=0;
uchar itt = 0;
while (m_bPlay){
if(m_bIsBreak)
{
if (av_read_frame(pAVFormatContext, &pAVPacket) >= 0){
if(pAVPacket.stream_index==videoStreamIndex){
// qDebug()<<"开始解码"<<QDateTime::currentDateTime().toString("yyyy-MM-dd HH:mm:ss");
int ret = avcodec_decode_video2(pAVCodecContext, pAVFrame, &frameFinished, &pAVPacket);
if(ret >= 0)
{
if (frameFinished){
mutex.lock();
sws_scale(pSwsContext,(const uint8_t* const *)pAVFrame->data,pAVFrame->linesize,0,videoHeight,pAVPicture.data,pAVPicture.linesize);
//发送获取一帧图像信号
// emit SendRtsppImage(pAVPicture.data[0]);
QImage image(pAVPicture.data[0],videoWidth,videoHeight,QImage::Format_RGB888);
emit GetImage(image);
mutex.unlock();
}
}
else
{
avcodec_flush_buffers(pAVCodecContext);
}
if(m_bRecordFile == 1)
{
/*
* pts and dts should increase monotonically
* pts should be >= dts
*/
pAVPacket.flags |= AV_PKT_FLAG_KEY;
pts = pAVPacket.pts;
pAVPacket.pts += last_pts;
dts = pAVPacket.dts;
pAVPacket.dts += last_dts;
pAVPacket.stream_index = 0;
//printf("%lld %lld\n", i_pkt.pts, i_pkt.dts);
//static int num = 1;
//printf("frame %d\n", num++);
av_interleaved_write_frame(o_fmt_ctx, &pAVPacket);
// av_write_frame(o_fmt_ctx, &pAVPacket);
}
else if(m_bRecordFile == 2)
{
last_dts += dts;
last_pts += pts;
av_write_trailer(o_fmt_ctx);
avcodec_close(o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]);
avio_close(o_fmt_ctx->pb);
av_free(o_fmt_ctx);
m_bRecordFile = 0;
}
}
}
av_free_packet(&pAVPacket);//释放资源,否则内存会一直上升
firsttimeplay = 1;
}
else
{
break;
}
}
}
void QFFmpeg::playclose()
{
m_bIsBreak = false;
m_bPlay = false;
if(m_bRecordFile == 1)
{
last_dts += dts;
last_pts += pts;
av_write_trailer(o_fmt_ctx);
avcodec_close(o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]);
avio_close(o_fmt_ctx->pb);
av_free(o_fmt_ctx);
m_bRecordFile = 0;
}
}
void QFFmpeg::RecordFile(QString filename)
{
FileOutName = filename;
avformat_alloc_output_context2(&o_fmt_ctx, NULL, "mp4", filename.toStdString().c_str());
/*
* since all input files are supposed to be identical (framerate, dimension, color format, ...)
* we can safely set output codec values from first input file
*/
o_video_stream = avformat_new_stream(o_fmt_ctx, NULL);
{
AVCodecContext *c;
c = o_video_stream->codec;
c->bit_rate = 200000;
c->codec_id = i_video_stream->codec->codec_id;
c->codec_type = i_video_stream->codec->codec_type;
c->time_base.num = i_video_stream->time_base.num;
c->time_base.den = i_video_stream->time_base.den;
// fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
c->width = i_video_stream->codec->width;
c->height = i_video_stream->codec->height;
c->pix_fmt = i_video_stream->codec->pix_fmt;
// printf("%d %d %d", c->width, c->height, c->pix_fmt);
c->flags = i_video_stream->codec->flags;
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
c->me_range = i_video_stream->codec->me_range;
c->max_qdiff = i_video_stream->codec->max_qdiff;
c->qmin = i_video_stream->codec->qmin;
c->qmax = i_video_stream->codec->qmax;
c->qcompress = i_video_stream->codec->qcompress;
}
av_dump_format(o_fmt_ctx,0,filename.toStdString().c_str(),1);
avio_open(&o_fmt_ctx->pb, filename.toStdString().c_str(), AVIO_FLAG_WRITE);
avformat_write_header(o_fmt_ctx, NULL);
last_pts = 0;
last_dts = 0;
pts = dts = 0;
m_bRecordFile = 1;
}
void QFFmpeg::StopRecord()
{
if(m_bRecordFile == 1)
{
m_bRecordFile = 2;
}
}