QT 中调用FFMpeg实现视频播放功能

QT用来播放显示,FFMPeg循环从H264、H265缓存队列中取数据,解码成图片后通过信号曹函数发给QT界面去显示,这样就实现了视频播放功能。

解码方法:videoplayer.cpp

  


#include "videoplayer.h"
#include <stdio.h>

extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/pixfmt.h"
#include "libswscale/swscale.h"
}
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")	
#pragma comment(lib ,"swscale.lib")


std::mutex VideoPlayer::mutexRtpData[3];
bool VideoPlayer::IsDecodingImage;
bool VideoPlayer::IsRuning;
queue<ctlPack*> VideoPlayer::receiveQueue[3];

 bool VideoPlayer::isReadRtpLeft=false;
 bool VideoPlayer::isReadRtpCenter = false;
 bool VideoPlayer::isReadRtpRight = false;

 VideoPlayer::VideoPlayer(ChannelRtpEnum rtpType)
 {
	 IsRuning = true;
	 currentRtp = rtpType;
	 isReadRtpLeft = false;
	 isReadRtpCenter = false;
	 isReadRtpRight = false;
 }

VideoPlayer::~VideoPlayer()
{
	IsRuning = false;
}

void VideoPlayer::startPlay()
{
	IsRuning = true;
  
    this->start();

}
void VideoPlayer::StopPlay()
{
	IsRuning = false;

}
void VideoPlayer::ClearQueue()
{
	
}




int VideoPlayer::Read_bufferCenter(void *opaque, uint8_t *buf, int buf_size)
{
	while (receiveQueue[1].empty() && IsRuning)
	{
		//if (isReadRtpCenter)
			msleep(15);
	}

	if (receiveQueue[1].empty())
	{
		return -1;
	}
	mutexRtpData[1].lock();
	CtlPack *one = receiveQueue[1].front();
	receiveQueue[1].pop();
	mutexRtpData[1].unlock();
	memcpy(buf, one->buffer + 3, one->len - 3);
	buf_size = one->len - 3;
	delete one->buffer;
	delete one;
	return buf_size;
}

void VideoPlayer::PusSendData(unsigned  __int8 camerID, ctlPack *onePack)
{
	mutexRtpData[camerID].lock();
	receiveQueue[camerID].push(onePack);
	mutexRtpData[camerID].unlock();
}

void VideoPlayer::run()
{
	
	AVFormatContext *pFormatCtx;
	AVCodecContext *pCodecCtx;
	AVCodec *pCodec;
	AVFrame *pFrame, *pFrameRGB;
	AVPacket *packet;
	uint8_t *out_buffer;
	//VideoPlayer video;
	static struct SwsContext *img_convert_ctx;

	int videoStream, i, numBytes;
	int ret, got_picture;
	IsDecodingImage = false;
	av_register_all(); //初始化FFMPEG  调用了这个才能正常适用编码器和解码器

	pFormatCtx = avformat_alloc_context();

	///
	unsigned char * iobuffer = (unsigned char *)av_malloc(32768);
	void* MemInputBuffer = av_malloc(32768);
	
	isReadRtpCenter = false;
	pFormatCtx->pb = avio_alloc_context((unsigned char *)MemInputBuffer, 32768, 0, NULL, Read_bufferCenter, NULL, NULL);
	

	/
	if (avformat_open_input(&pFormatCtx, NULL, NULL, NULL) != 0)
	{
		printf("can't open the file. \n");
		return;
	}
	//while (avformat_find_stream_info(pFormatCtx, NULL) != 0);
	if (avformat_find_stream_info(pFormatCtx, NULL) != 0)
	{
		printf("Could't find stream infomation.\n");
		return;
	}

	videoStream = -1;

	
	for (i = 0; i < pFormatCtx->nb_streams; i++) {
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			videoStream = i;
		}
	}


	if (videoStream == -1) {
		printf("Didn't find a video stream.\n");
		return;
	}

	
	pCodecCtx = pFormatCtx->streams[videoStream]->codec;
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	IsDecodingImage = true;
	if (pCodec == NULL) {
		printf("Codec not found.\n");
		return;
	}

	
	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
		printf("Could not open codec.\n");
		return;
	}

	pFrame = av_frame_alloc();
	pFrameRGB = av_frame_alloc();


	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
		pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
		AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);

	numBytes = avpicture_get_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);

	out_buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
	avpicture_fill((AVPicture *)pFrameRGB, out_buffer, AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height);

	int y_size = pCodecCtx->width * pCodecCtx->height;

	packet = (AVPacket *)malloc(sizeof(AVPacket)); 
	av_new_packet(packet, y_size); 


	
	isReadRtpCenter = true;
	

	while (IsRuning)
	{
		try
		{
			if (av_read_frame(pFormatCtx, packet) < 0)
				continue;

			if (packet->stream_index != videoStream)
				continue;

			// ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,packet);
			avcodec_send_packet(pCodecCtx, packet);

			if (avcodec_receive_frame(pCodecCtx, pFrame) < 0)
				continue;

			/* if (ret < 0) {
				 printf("decode error.\n");
				 return;
			 }*/

			if (got_picture)
			{
				sws_scale(img_convert_ctx,(uint8_t const * const *)pFrame->data,
				        	pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,pFrameRGB->linesize);

				
				QImage tmpImg((uchar *)out_buffer, pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB32);
				QImage image = tmpImg.copy(); //把图像复制一份后传递给界面显示
				emit sig_GetOneFrame(image);  //发送信号
			}
			av_free_packet(packet);
		}
		catch (exception ex)
		{

		}
		
	}
	av_free(out_buffer);
	av_free(pFrameRGB);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);
}

界面播放窗口:


#include "Mainwindow.h"
#include "ui_mainwindow.h"
#include "qdesktopwidget.h"

#include <QPainter>
#include <QSplitter>  
#include <QTextCodec>  
#include <QTextEdit> 
#include <functional> 


MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::mainWindow)
{
   
	ui->setupUi(this);
	this->setWindowTitle(u8"视频播放系统");
	this->setWindowState(Qt::WindowMaximized);
	
	centerDock = new QDockWidget(tr(u8"视频窗口"), this); 
	centerDock->setFeatures(QDockWidget::DockWidgetClosable | QDockWidget::DockWidgetFloatable);  
	centerLabel = new QLabel();
	centerLabel->setText(u8"视频");
	centerDock->setWidget(centerLabel); 
	addDockWidget(Qt::TopDockWidgetArea, centerDock);   
	
	centerLabel->setFrameShape(QFrame::Box);
	centerLabel->setStyleSheet("border-width: 1px;border-style: solid;border-color: rgb(255, 170, 0);");
	centerLabel->setAlignment(Qt::AlignCenter);
	connect(centerDock, SIGNAL(topLevelChanged(bool)), this, SLOT(CenterTopLevelChanged(bool)));

}

MainWindow::~MainWindow()
{
	
    delete ui;
}


void MainWindow::StartPlay()
{
	

	centerPlayer = new VideoPlayer(ChannelRtpEnum::Center);
	connect(centerPlayer, SIGNAL(sig_GetOneFrame(QImage)), this, 
    SLOT(slotGetOneFrameCenter(QImage)));
	centerPlayer->startPlay();

}




void MainWindow::slotGetOneFrameCenter(QImage img)
{	
	centerLabel->setPixmap(QPixmap::fromImage(img));
	centerLabel->show();
}


 

  • 2
    点赞
  • 23
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
使用FFmpeg播放视频可以分为两个步骤: 1. 解码视频帧 在使用FFmpeg播放视频时,需要首先解码视频帧。FFmpeg提供了许多API函数,可以用来实现解码视频帧的功能。在使用Qt调用FFmpeg时,可以通过使用QOpenGLWidget来实现视频的渲染。 以下是一个简单的解码视频帧的示例代码: ``` AVFrame* pFrame = av_frame_alloc(); AVPacket packet; int frameFinished = 0; while (av_read_frame(pFormatCtx, &packet) >= 0) { if (packet.stream_index == videoStream) { avcodec_send_packet(pCodecCtx, &packet); while (avcodec_receive_frame(pCodecCtx, pFrame) == 0) { // Do something with the video frame } } av_packet_unref(&packet); } ``` 2. 渲染视频帧 在解码视频帧后,需要将其渲染到屏幕上。可以使用QOpenGLWidget来实现视频帧的渲染。以下是一个简单的渲染视频帧的示例代码: ``` QOpenGLContext* pContext = new QOpenGLContext(); pContext->create(); pContext->makeCurrent(this); QOpenGLFramebufferObjectFormat format; format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil); format.setSamples(4); QOpenGLFramebufferObject* pFbo = new QOpenGLFramebufferObject(width(), height(), format); QOpenGLShaderProgram* pShader = new QOpenGLShaderProgram(this); pShader->addShaderFromSourceCode(QOpenGLShader::Vertex, vertexShaderSourceCode); pShader->addShaderFromSourceCode(QOpenGLShader::Fragment, fragmentShaderSourceCode); pShader->link(); QOpenGLVertexArrayObject* pVao = new QOpenGLVertexArrayObject(this); pVao->create(); pVao->bind(); QOpenGLBuffer* pVbo = new QOpenGLBuffer(QOpenGLBuffer::VertexBuffer); pVbo->create(); pVbo->bind(); pVbo->allocate(vertices, sizeof(vertices)); pShader->bind(); pShader->setAttributeBuffer("position", GL_FLOAT, 0, 3, sizeof(VertexData)); pShader->enableAttributeArray("position"); pShader->setAttributeBuffer("texCoord", GL_FLOAT, sizeof(QVector3D), 2, sizeof(VertexData)); pShader->enableAttributeArray("texCoord"); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, textureId); pShader->setUniformValue("texture", 0); pShader->setUniformValue("modelViewProjectionMatrix", projectionMatrix * viewMatrix); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); pShader->disableAttributeArray("position"); pShader->disableAttributeArray("texCoord"); pVbo->release(); pVao->release(); pShader->release(); pFbo->release(); pContext->doneCurrent(); ``` 其,`VertexData`结构体包含了每个顶点的位置和纹理坐标信息,`vertices`数组包含了四个顶点的信息,`textureId`是视频帧的纹理ID。 需要注意的是,在使用QOpenGLWidget时,需要重写`initializeGL()`、`resizeGL()`和`paintGL()`函数。在`initializeGL()`函数,需要初始化OpenGL相关的设置;在`resizeGL()`函数,需要更新OpenGL上下文的视口;在`paintGL()`函数,需要渲染视频帧。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值