https://blog.csdn.net/wanghualin033/article/details/82050448
为了解决工程上的一个问题,我研究了nvidia较新的video-sdk-8.1.24,看完之后觉得这个sdk比较符合现代的编程规范,于是摒弃了以前的video-sdk-6.0,解决了工程上的问题。
我提供的封装支持软解和硬解,其中重要部分为做了视频复制,同一个视频源只解码一路;内存的细节管理;视频渲染;视频镜像等。
接口申明如下
#ifndef DECODER_H
#define DECODER_H
#include <string>
#include <functional>
#include <mutex>
#include "klvideodecod_global.h"
extern "C"
{
#include <libavutil/pixfmt.h>
}
class KLVIDEODECODSHARED_EXPORT Decoder
{
public:
explicit Decoder() = default;
Decoder(const Decoder&) = delete;
Decoder& operator=(const Decoder &) = delete;
virtual ~Decoder(){}
virtual bool initsize() = 0; //初始化
virtual unsigned char* framePtr() = 0; //返回视频解码后的地址
virtual bool decode(const char* source, std::string &erroStr, std::function<void(AVPixelFormat,unsigned char*,int,int)> frameHandler, std::mutex *mtx = nullptr) = 0; //解码回调
virtual void stop() = 0; //停止解码
};
#include "NvDecoder/nvidiadecoder.h"
#include "fmgdecoder/ffmpegdecoder.h"
class KLVIDEODECODSHARED_EXPORT Klvideodecod
{
public:
explicit Klvideodecod() = default;
Klvideodecod(const Klvideodecod&) = delete;
Klvideodecod& operator=(const Klvideodecod&) = delete;
virtual ~Klvideodecod(){}
template<class T> static Decoder* createDecoder(){return new T;} //创建解码器
};
#endif // DECODER_H
软解采用ffmpeg实现,申明如下
#ifndef FFMPEGDECODER_H
#define FFMPEGDECODER_H
#include "../decoder.h"
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libavutil/frame.h>
#include <libswscale/swscale.h>
#include <libavutil/pixfmt.h>
}
class KLVIDEODECODSHARED_EXPORT FFmpegDecoder : public Decoder
{
public:
~FFmpegDecoder();
bool initsize() override;
unsigned char* framePtr() override;
bool decode(const char* source, std::string &erroStr, std::function<void(AVPixelFormat,unsigned char*,int,int)> frameHandler,std::mutex *mtx=nullptr) override;
void stop() override;
private:
bool m_run = true;
unsigned char *m_buffer{nullptr};
std::mutex *m_mtx{nullptr};
};
#endif // FFMPEGDECODER_H
实现如下,和网上大多数的ffmpeg示例一样。
#include "ffmpegdecoder.h"
#include <iostream>
#include <QDebug>
FFmpegDecoder::~FFmpegDecoder()
{
if(m_mtx){
m_mtx->lock();
av_free(m_buffer);
m_buffer = nullptr;
m_mtx->unlock();
}else{
av_free(m_buffer);
m_buffer = nullptr;
}
}
bool FFmpegDecoder::initsize()
{
return true;
}
unsigned char *FFmpegDecoder::framePtr()
{
return m_buffer;
}
bool FFmpegDecoder::decode(const char* source, std::string &erroStr, std::function<void(AVPixelFormat,unsigned char*,int,int)> frameHandler, std::mutex *mtx)
{
m_run = true;
m_mtx = mtx;
static bool isInited = false;
if(!isInited){
av_register_all();
avformat_network_init();
isInited = true;
}
AVFrame *pAVFrame{nullptr},*pAVFrameRGB{nullptr};
AVFormatContext *pAVFomatContext{nullptr};
AVCodecContext *pAVCodecContext{nullptr};
SwsContext *pSwsContext{nullptr};
AVPacket pAVPacket;
pAVFomatContext = avformat_alloc_context();
pAVFrame = av_frame_alloc();
pAVFrameRGB = av_frame_alloc();
AVDictionary *opt = nullptr;
// av_dict_set(&opt,"buffer_size","1024000",0);
// av_dict_set(&opt,"max_delay","0",0);
av_dict_set(&opt,"rtsp_transport","tcp",0);
av_dict_set(&opt,"stimeout","5000000",0);
int result = avformat_open_input(&pAVFomatContext,source, nullptr, &opt);
if(result < 0){
erroStr += "open input failed errorCode: ";
erroStr += std::to_string(result);
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
avformat_close_input(&pAVFomatContext);
return false;
}
result = avformat_find_stream_info(pAVFomatContext, nullptr);
if(result < 0){
erroStr += "find video stream failed errorCode: ";
erroStr += std::to_string(result);
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
avformat_close_input(&pAVFomatContext);
return false;
}
int videoStreamIndex = -1;
for(int i = 0; i < pAVFomatContext->nb_streams; i++){
if(pAVFomatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
videoStreamIndex = i;
break;
}
}
if(videoStreamIndex == -1){
erroStr += "find video stream index failed errorCode: ";
erroStr += std::to_string(result);
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
avformat_close_input(&pAVFomatContext);
return false;
}
pAVCodecContext = pAVFomatContext->streams[videoStreamIndex]->codec;
int videoWidth = pAVCodecContext->width;
int videoHeight = pAVCodecContext->height;
AVCodec *pAVCodec = nullptr;
pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
// pAVCodec = avcodec_find_decoder_by_name("h264_cuvid");//硬解码264
if(!pAVCodec){
erroStr += "find avcodec failed errorCode: ";
erroStr += std::to_string(result);
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
avformat_close_input(&pAVFomatContext);
return false;
}
pSwsContext = sws_getContext(videoWidth,videoHeight,pAVCodecContext->pix_fmt,videoWidth,videoHeight,AV_PIX_FMT_YUV420P,SWS_FAST_BILINEAR,nullptr,nullptr,nullptr);
int numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P,videoWidth,videoHeight);
m_buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture*)pAVFrameRGB,m_buffer,AV_PIX_FMT_YUV420P,videoWidth,videoHeight);
// int y_size = pAVCodecContext->width * pAVCodecContext->height;
// pAVPacket = av_packet_alloc();
// av_new_packet(pAVPacket,y_size);
av_init_packet(&pAVPacket);
pAVPacket.data = nullptr;
pAVPacket.size = 0;
av_dump_format(pAVFomatContext,0,source,0);
AVStream *stream = pAVFomatContext->streams[videoStreamIndex];
int vden = stream->avg_frame_rate.den,vnum = stream->avg_frame_rate.num;
int video_fps;
if(vden <= 0 || vnum <= 0){
video_fps = 25;
std::cout << "use default " << video_fps << std::endl;
}else{
video_fps = vnum/vden;
std::cout << "video fps:" << video_fps << std::endl;
}
result = avcodec_open2(pAVCodecContext,pAVCodec,nullptr);
if(result < 0){
erroStr += "avcodec open failed errorCode: ";
erroStr += std::to_string(result);
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
av_free(m_buffer);
sws_freeContext(pSwsContext);
if(pAVPacket.data){
av_packet_unref(&pAVPacket);
}
avformat_close_input(&pAVFomatContext);
return false;
}
int resCode = 0;
int got_picture = 0;
while (m_run) {
if((resCode = av_read_frame(pAVFomatContext,&pAVPacket)) <0){
// if(resCode != AVERROR_EOF){
// emit sigError(QString("av_read_frame error: %1").arg(resCode));
// }
av_packet_unref(&pAVPacket);
break;
}
if(pAVPacket.stream_index == videoStreamIndex){
avcodec_decode_video2(pAVCodecContext,pAVFrame,&got_picture,&pAVPacket);
if(got_picture){
if(mtx){
mtx->lock();
sws_scale(pSwsContext,(const uint8_t *const*)pAVFrame->data,pAVFrame->linesize,0,videoHeight,pAVFrameRGB->data,pAVFrameRGB->linesize);
mtx->unlock();
}else{
sws_scale(pSwsContext,(const uint8_t *const*)pAVFrame->data,pAVFrame->linesize,0,videoHeight,pAVFrameRGB->data,pAVFrameRGB->linesize);
}
frameHandler(AV_PIX_FMT_YUV420P,m_buffer,videoWidth,videoHeight);
}
}
av_packet_unref(&pAVPacket);
}
av_frame_free(&pAVFrame);
av_frame_free(&pAVFrameRGB);
sws_freeContext(pSwsContext);
if(pAVPacket.data){
av_packet_unref(&pAVPacket);
}
avformat_close_input(&pAVFomatContext);
if(!m_run){
return true;
}else{
if(!::strncmp(source,"rtsp",4)){
erroStr = "AVERROR_EOF";
return false;
}else{
if(resCode != AVERROR_EOF){
return false;
}else{
return true;
}
}
}
}
void FFmpegDecoder::stop()
{
m_run = false;
}
硬解码采用nvidia-sdk-8.1.24,申明如下
#ifndef NVIDIADECODER_H
#define NVIDIADECODER_H
#include "../decoder.h"
#include "NvDecoder.h"
#include "Utils/FFmpegDemuxer.h"
#include <vector>
class KLVIDEODECODSHARED_EXPORT NvidiaDecoder : public Decoder
{
public:
~NvidiaDecoder();
bool initsize() override;
unsigned char* framePtr() override;
bool decode(const char* source, std::string &erroStr, std::function<void(AVPixelFormat, unsigned char *, int, int)> frameHandler, std::mutex *mtx = nullptr) override;
void stop() override;
private:
bool m_isRun = true;
NvDecoder *m_nvdecod{nullptr};
static std::vector<std::pair<CUcontext,std::string>> m_ctxV;
unsigned char *m_ptr{nullptr};
int m_curIndex = 0;
};
#endif // NVIDIADECODER_H
实现如下
#include "nvidiadecoder.h"
simplelogger::Logger *logger = simplelogger::LoggerFactory::CreateConsoleLogger();
std::vector<std::pair<CUcontext,std::string>> NvidiaDecoder::m_ctxV;
NvidiaDecoder::~NvidiaDecoder()
{
// int n = m_ctxV.size();
// for(int i = 0; i < n; i++){
// cuCtxDestroy(m_ctxV.back().first);
// m_ctxV.pop_back();
// }
if(m_nvdecod)
delete m_nvdecod;
}
bool NvidiaDecoder::initsize()
{
static bool isInitsized = false;
if(!isInitsized){ //显卡只初始化一次
ck(cuInit(0));
int nGpu = 0;
ck(cuDeviceGetCount(&nGpu));
for(int i = 0; i < nGpu; i++){
CUdevice cuDevice = 0;
ck(cuDeviceGet(&cuDevice, i));
char szDeviceName[80];
ck(cuDeviceGetName(szDeviceName, sizeof(szDeviceName), cuDevice));
LOG(INFO) << "Find Gpu: " << szDeviceName << std::endl;
CUcontext cuContext = NULL;
ck(cuCtxCreate(&cuContext, CU_CTX_SCHED_BLOCKING_SYNC, cuDevice));
CUVIDDECODECAPS videoDecodeCaps = {};
videoDecodeCaps.eCodecType = cudaVideoCodec_H264;
videoDecodeCaps.eChromaFormat = cudaVideoChromaFormat_420;
videoDecodeCaps.nBitDepthMinus8 = 0;
if (cuvidGetDecoderCaps(&videoDecodeCaps) == CUDA_SUCCESS){ //判断显卡是否支持1080p解码
LOG(INFO) << "cuvid Decoder Caps nMaxWidth " << videoDecodeCaps.nMaxWidth << " nMaxHeigth " << videoDecodeCaps.nMaxHeight << std::endl;
if(videoDecodeCaps.nMaxWidth >= 1920 && videoDecodeCaps.nMaxHeight >= 1080){
m_ctxV.push_back({cuContext,szDeviceName});
}
}
}
isInitsized = true;
}
if(m_ctxV.empty()){
return false;
}
return true;
}
unsigned char *NvidiaDecoder::framePtr()
{
return m_ptr;
}
bool NvidiaDecoder::decode(const char *source, std::string &erroStr, std::function<void(AVPixelFormat, unsigned char *, int, int)> frameHandler, std::mutex *mtx)
{
if(!m_ctxV.size()){
return false;
}
try{
m_isRun = true;
std::pair<CUcontext,std::string> &v = m_ctxV.at(m_curIndex++ % m_ctxV.size());
std::cout << "Use Contex in " << v.second << std::endl;
//nvidia-sdk-8.1.24提供的类FFmpegDemuxer与NvDecoder
FFmpegDemuxer demuxer(source);
m_nvdecod = new NvDecoder(v.first, demuxer.GetWidth(), demuxer.GetHeight(), false, FFmpeg2NvCodecId(demuxer.GetVideoCodec()),mtx);
int nVideoBytes = 0, nFrameReturned = 0, nFrame = 0;
uint8_t *pVideo = NULL;
uint8_t **ppFrame;
do {
demuxer.Demux(&pVideo, &nVideoBytes);
m_nvdecod->Decode(pVideo, nVideoBytes, &ppFrame, &nFrameReturned);
if (!nFrame && nFrameReturned)
LOG(INFO) << m_nvdecod->GetVideoInfo();
for (int i = 0; i < nFrameReturned; i++) {
if (m_nvdecod->GetBitDepth() == 8){
m_ptr = ppFrame[i];
frameHandler(AV_PIX_FMT_NV12,m_ptr,m_nvdecod->GetWidth(),m_nvdecod->GetHeight());
}else{
// P016ToBgra32((uint8_t *)ppFrame[i], 2 * dec.GetWidth(), (uint8_t *)dpFrame, nPitch, dec.GetWidth(), dec.GetHeight());
}
}
nFrame += nFrameReturned;
} while (nVideoBytes && m_isRun);
std::cout << "Total frame decoded: " << nFrame << std::endl;
return true;
}catch(std::exception &e){
erroStr = e.what();
return false;
}
}
void NvidiaDecoder::stop()
{
m_isRun = false;
}
在Qt中基于QThread在子线程中解码
#ifndef VIDEODATA_H
#define VIDEODATA_H
#include <QThread>
#include <QMutex>
#include <mutex>
#include <QTimer>
#include "decoder.h"
class VideoData : public QThread
{
Q_OBJECT
public:
int m_width,m_height;
int m_fmt;
unsigned char *m_ptr{nullptr};
std::mutex *m_mtx{nullptr};
VideoData(QString decoderName,QObject *parent = nullptr);
~VideoData();
void setVideoSource(QString);
bool isDecoding() const;
unsigned char* framePtr();
QString videoSource() const;
QString decoder();
QString errorStr();
signals:
void sigError(QString);
void sigVideoStarted();
void sigFrameLoaded();
protected:
void run() override;
private:
QString m_url,m_decoderName,m_errorStr;
bool m_isRtsp = false;
bool m_isFirst = true;
bool m_isDecoding = false;
Decoder *m_decoder{nullptr};
};
#endif // VIDEODATA_H
#include "videodata.h"
#include <iostream>
#include <QDebug>
VideoData::VideoData(QString decoderName, QObject *parent):
m_decoderName(decoderName),
QThread(parent)
{
if(m_decoderName == "h264_cuvid"){ //根据名字确定解码方案
m_decoder = Klvideodecod::createDecoder<NvidiaDecoder>();
if(!m_decoder->initsize()){
m_decoder = Klvideodecod::createDecoder<FFmpegDecoder>();
}
}else{
m_decoder = Klvideodecod::createDecoder<FFmpegDecoder>();
}
// m_mtx = new std::mutex;
}
VideoData::~VideoData()
{
if(m_mtx){
delete m_mtx;
}
delete m_decoder;
m_decoder = nullptr;
qDebug() << "video" << m_url << "deleted";
}
void VideoData::setVideoSource(QString s)
{
m_url = s;
m_isRtsp = m_url.contains("rtsp");
}
bool VideoData::isDecoding() const
{
return m_isDecoding;
}
unsigned char *VideoData::framePtr()
{
return m_decoder->framePtr();
}
QString VideoData::videoSource() const
{
return m_url;
}
QString VideoData::decoder()
{
return m_decoderName;
}
QString VideoData::errorStr()
{
return m_errorStr;
}
void VideoData::run()
{
std::string errorStr;
m_errorStr.clear();
bool isOk = m_decoder->decode(m_url.toStdString().data(),errorStr,[this](AVPixelFormat format,unsigned char*ptr, int width, int height)->void{
if(m_isFirst){
m_width = width;
m_height = height;
m_fmt = format;
m_isFirst = false;
m_isDecoding = true;
emit sigVideoStarted();
}
if(isInterruptionRequested()){ //结束解码
m_isDecoding = false;
m_decoder->stop();
}else{
m_ptr = ptr;
emit sigFrameLoaded(); //触发UI刷新
}
},m_mtx);
m_isFirst = true;
m_isDecoding = false;
if(!isOk){
m_errorStr = QString::fromStdString(errorStr);
emit sigError(m_errorStr);
}
}
支持的QOpenglWidget播放申明如下
#ifndef KLVIDEOWIDGET_H
#define KLVIDEOWIDGET_H
#include "klvideodecod_global.h"
#include <QOpenGLWidget>
#include "../utils/klvideorender.h"
#include <mutex>
#include <QTimer>
QT_FORWARD_DECLARE_CLASS(VideoData)
class KLVIDEODECODSHARED_EXPORT Klvideowidget : public QOpenGLWidget
{
Q_OBJECT
public:
enum PlayState{
Stop,
Reading,
Playing
};
Klvideowidget(QWidget *parent = nullptr);
~Klvideowidget();
PlayState playState() const;
void startPlay(QString url,QString decoderName);
int videoWidth() const;
int videoHeidht() const;
QString url() const;
QString decoderName() const;
public slots:
void stop();
signals:
void sigError(QString);
void sigVideoStart(int,int);
void sigVideoStoped();
protected:
void initializeGL() override;
void paintGL() override;
private:
PlayState m_state = Stop;
std::mutex *m_mtx{nullptr};
KLvideoRenderManager *m_renderM{nullptr};
uchar* m_ptr{nullptr};
AVPixelFormat m_fmt;
int m_videoW,m_videoH;
VideoData *m_decoThr{nullptr};
QString m_url,m_decoderName;
private slots:
void slotVideoStarted();
};
#endif // KLVIDEOWIDGET_H
实现如下
#include "klvideowidget.h"
#include "../utils/rendermanager.h"
#include "videodatacache.h"
#include <iostream>
#include <QDebug>
Klvideowidget::Klvideowidget(QWidget *parent):
QOpenGLWidget(parent)
{
m_renderM = new RenderManager;
}
Klvideowidget::~Klvideowidget()
{
stop();
delete m_renderM;
}
Klvideowidget::PlayState Klvideowidget::playState() const
{
return m_state;
}
void Klvideowidget::startPlay(QString url, QString decoderName)
{
m_state = Reading;
m_decoThr = VideoDataCache::createVideoData(url,decoderName);
m_url = url;
m_decoderName = decoderName;
m_mtx = m_decoThr->m_mtx;
if(m_decoThr->isDecoding()){
m_videoW = m_decoThr->m_width;
m_videoH = m_decoThr->m_height;
m_state = Playing;
emit sigVideoStart(m_decoThr->m_width,m_decoThr->m_height);
}else{
// if(!m_decoThr->errorStr().isEmpty()){
// emit sigError(m_decoThr->errorStr());
// }
connect(m_decoThr,SIGNAL(sigVideoStarted()),this,SLOT(slotVideoStarted()));
}
connect(m_decoThr,SIGNAL(sigFrameLoaded()),this,SLOT(update()));
//解码结束,将线程指针置空
connect(m_decoThr,&VideoData::finished,this,[this]{m_decoThr = nullptr;stop();});
connect(m_decoThr,SIGNAL(sigError(QString)),this,SIGNAL(sigError(QString)));
m_decoThr->start();
}
int Klvideowidget::videoWidth() const
{
return m_videoW;
}
int Klvideowidget::videoHeidht() const
{
return m_videoH;
}
QString Klvideowidget::url() const
{
return m_url;
}
QString Klvideowidget::decoderName() const
{
return m_decoderName;
}
void Klvideowidget::stop()
{
m_state = Stop;
if(m_decoThr){
m_decoThr->disconnect(this);
m_decoThr = nullptr;
}
m_ptr = nullptr;
m_mtx = nullptr;
update();
emit sigVideoStoped();
VideoDataCache::removeVideoData(url());
}
void Klvideowidget::initializeGL()
{
//注册渲染器
m_renderM->registerRender(AV_PIX_FMT_NV12);
m_renderM->registerRender(AV_PIX_FMT_YUV420P);
}
void Klvideowidget::paintGL()
{
if(!m_decoThr)return;
//渲染视频
if(m_mtx){
m_mtx->lock();
if(m_state != Playing){
m_renderM->render(AVPixelFormat(m_decoThr->m_fmt),nullptr,m_videoW,m_videoH);
}else{
m_renderM->render(AVPixelFormat(m_decoThr->m_fmt),m_decoThr->framePtr(),m_videoW,m_videoH);
}
m_mtx->unlock();
}else{
if(m_state != Playing){
m_renderM->render(AVPixelFormat(m_decoThr->m_fmt),nullptr,m_videoW,m_videoH);
}else{
m_renderM->render(AVPixelFormat(m_decoThr->m_fmt),m_decoThr->framePtr(),m_videoW,m_videoH);
}
}
}
void Klvideowidget::slotVideoStarted()
{
m_videoW = m_decoThr->m_width;
m_videoH = m_decoThr->m_height;
m_state = Playing;
emit sigVideoStart(m_decoThr->m_width,m_decoThr->m_height);
}
其中的RenderManager为渲染器部分,另外还有qml、视频数据缓存部分未给出;对未给出部分有兴趣的可以加关注并在下方留言。
以下为真实应用效果,硬解12路效果图cpu I5占比11,730显卡点75-80
---------------------
作者:wangzai6378
来源:CSDN
原文:https://blog.csdn.net/wanghualin033/article/details/82050448
版权声明:本文为博主原创文章,转载请附上博文链接!