FFmpeg解码(FFmpeg 1.0)

1、引入头文件

#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>

// 如果需要将YUV420p转换成RGB
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>

2、 初始化

1、定义

const AVCodec *codec;	  // 解码器
AVCodecContext *c = NULL; // 上下文
AVPacket pkt;             // pkaket
AVFrame *frame;			  // AVFrame结构体
AVFrame *frameRGBA;
AVFormatContext *avformatcontext;

2、解码器配置

av_register_all(); 

avformatcontext = avformat_alloc_context();

// 从filename中读取数据到avformatcontext中,读取成功后input>0
int input = avformat_open_input(&avformatcontext, filename, NULL, NULL);
if (input < 0)
{
	fprintf(stderr, " open input error ,\n input ------->>%d", input);
	exit(1);
}

avformatcontext->probesize = 2456000;
avformatcontext->max_analyze_duration = 1500;

// 读取媒体文件的数据包以获取流信息
int streamInfo = avformat_find_stream_info(avformatcontext, NULL);
if (streamInfo < 0)
{
	fprintf(stderr, "find_stream error, \n streamInfo ------>>%d", streamInfo);
	exit(1);
}

// 找到第一帧
unsigned int i;
int videostream = -1;
for (i = 0; i < avformatcontext->nb_streams; i++)
{
	if (avformatcontext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO &&         videostream < 0)
	{
		videostream = i;
		break;
	}
}
if (videostream == -1)
{
	fprintf(stderr, "can not find a video stream");
	exit(1);
}

// 获取codec
c = avformatcontext->streams[videostream]->codec;
// 设置线程数
//c->thread_count = 4;

// 找到解码器
codec = avcodec_find_decoder(c->codec_id);
if (!codec)
{
	fprintf(stderr, "Codec not found\n");
	exit(1);
}

// 打开解码器
if (avcodec_open2(c, codec, NULL) < 0)
{
	fprintf(stderr, "Could not open codec\n");
	exit(1);
}

// 初始化frame
frame = avcodec_alloc_frame();
if (!frame)
{
	fprintf(stderr, "Could not allocate video frame\n");
	exit(1);
}

// 初始化frameRGBA,用于接收RGB数据渲染
frameRGBA = avcodec_alloc_frame();
if (!frameRGBA)
{
	fprintf(stderr, "Could not allocate video frameRGBA\n");
	exit(1);
}

// 设置经过swsscale后输出图像的大小(若不需要缩放,可以直接用c->width和c->height)
int width_out, height_out;

// 给frameRGBA分配空间
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width_out, height_out, 1);
uint8_t *buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(frameRGBA->data, frameRGBA->linesize, buffer,
					AV_PIX_FMT_RGBA, width_out, height_out, 1);

// 设置swsscale
// swscale选择的算法可以参考 https://blog.csdn.net/leixiaohua1020/article/details/12029505
sws_ctx = sws_getContext(c->width, c->height, 
							 c->pix_fmt, width_out, height_out, AV_PIX_FMT_RGBA,
							 SWS_POINT, NULL, NULL, NULL);

3、解码

while (av_read_frame(avformatcontext, &pkt) >= 0)
{
	if (pkt.stream_index == videostream)
	{
		int len;
        // 送入packet,当解码器完成一个frame的解码时,framefinished返回一个非0数
        // 送入第一个packet时可能不会立即有数据,到最后会出现少帧的现象
        // 可以参考 https://blog.csdn.net/subfate/article/details/50273955
		len = avcodec_decode_video2(c, frame, &framefinished, &pkt);
		if (len < 0)
		{
			fprintf(stderr, "Error sending a packet for decoding\n");
			exit(1);
		}
		if (framefinished)
		{
			// 将解码出来的数据转换为RGB
            sws_scale(sws_ctx, (uint8_t const *const *)frame->data,
					 frame->linesize, 0, c->height,
					 frameRGBA->data, frameRGBA->linesize);
            // 保存
            FILE *fd = fopen("RGB.rgb", "w+");
            if(fd)
            {
                fwrite(frameRGBA->data[0], 1, (frameRGBA->width * frameRGBA->height * 4), fd);
                fclose(fd);
            }
		}
		av_free_packet(&pkt);

	}
	else
	{
		printf("no stream");
	}
}

4、释放资源

avcodec_close(c);
avformat_close_input(&avformatcontext);
avcodec_free_frame(&frame);
avcodec_free_frame(&frameRGBA);
av_free_packet(&pkt);
sws_freeContext(sws_ctx);
free(buffer);

  • 2
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是一个使用Qt、FFmpeg和OpenGLWidget播放RTSP流的示例代码: 首先,确保已经安装了Qt和FFmpeg库,并在Qt项目中添加了相应的依赖项。 在Qt项目中创建一个自定义的OpenGLWidget类,用于显示视频帧: ```cpp // myopenglwidget.h #ifndef MYOPENGLWIDGET_H #define MYOPENGLWIDGET_H #include <QOpenGLWidget> #include <QOpenGLFunctions> #include <QOpenGLBuffer> #include <QOpenGLShaderProgram> #include <QOpenGLTexture> class MyOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions { Q_OBJECT public: explicit MyOpenGLWidget(QWidget *parent = nullptr); ~MyOpenGLWidget(); protected: void initializeGL() override; void resizeGL(int w, int h) override; void paintGL() override; private: QOpenGLBuffer m_vertexBuffer; QOpenGLShaderProgram m_shaderProgram; QOpenGLTexture m_texture; float m_vertices[12] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f, 1.0f, 0.0f, -1.0f, 1.0f, 0.0f }; }; #endif // MYOPENGLWIDGET_H ``` ```cpp // myopenglwidget.cpp #include "myopenglwidget.h" MyOpenGLWidget::MyOpenGLWidget(QWidget *parent) : QOpenGLWidget(parent) { } MyOpenGLWidget::~MyOpenGLWidget() { } void MyOpenGLWidget::initializeGL() { initializeOpenGLFunctions(); m_vertexBuffer.create(); m_vertexBuffer.bind(); m_vertexBuffer.allocate(m_vertices, sizeof(m_vertices)); m_shaderProgram.addShaderFromSourceCode(QOpenGLShader::Vertex, "attribute vec3 aPosition;" "void main() {" " gl_Position = vec4(aPosition, 1.0);" "}"); m_shaderProgram.link(); m_shaderProgram.bind(); m_texture.create(); m_texture.setMinificationFilter(QOpenGLTexture::Nearest); m_texture.setMagnificationFilter(QOpenGLTexture::Linear); } void MyOpenGLWidget::resizeGL(int w, int h) { glViewport(0, 0, w, h); } void MyOpenGLWidget::paintGL() { glClear(GL_COLOR_BUFFER_BIT); m_vertexBuffer.bind(); m_shaderProgram.bind(); int vertexLocation = m_shaderProgram.attributeLocation("aPosition"); m_shaderProgram.enableAttributeArray(vertexLocation); m_shaderProgram.setAttributeBuffer(vertexLocation, GL_FLOAT, 0, 3); glDrawArrays(GL_QUADS, 0, 4); } ``` 接下来,创建一个Qt窗口类,并在其中使用FFmpeg解码和播放RTSP流,并将帧渲染到OpenGLWidget中: ```cpp // mainwindow.h #ifndef MAINWINDOW_H #define MAINWINDOW_H #include <QMainWindow> #include <QThread> #include <QTimer> #include <QImage> #include <QMutex> #include "myopenglwidget.h" extern "C" { #include <libavformat/avformat.h> #include <libswscale/swscale.h> } class VideoDecoder : public QThread { Q_OBJECT public: explicit VideoDecoder(QObject *parent = nullptr); ~VideoDecoder(); void setUrl(const QString &url); void stop(); signals: void frameDecoded(const QImage &image); protected: void run() override; private: QString m_url; bool m_stopRequested; QMutex m_mutex; void decodePacket(AVPacket *packet, AVCodecContext *codecContext, SwsContext *swsContext); }; class MainWindow : public QMainWindow { Q_OBJECT public: MainWindow(QWidget *parent = nullptr); ~MainWindow(); private slots: void onFrameDecoded(const QImage &image); void onTimerTimeout(); private: MyOpenGLWidget *m_openglWidget; VideoDecoder *m_videoDecoder; QTimer *m_timer; }; #endif // MAINWINDOW_H ``` ```cpp // mainwindow.cpp #include "mainwindow.h" VideoDecoder::VideoDecoder(QObject *parent) : QThread(parent), m_stopRequested(false) { } VideoDecoder::~VideoDecoder() { stop(); } void VideoDecoder::setUrl(const QString &url) { m_url = url; } void VideoDecoder::stop() { QMutexLocker locker(&m_mutex); m_stopRequested = true; } void VideoDecoder::run() { av_register_all(); AVFormatContext *formatContext = nullptr; AVCodecContext *codecContext = nullptr; SwsContext *swsContext = nullptr; if (avformat_open_input(&formatContext, m_url.toUtf8().constData(), nullptr, nullptr) != 0) { qDebug() << "Failed to open input file"; return; } if (avformat_find_stream_info(formatContext, nullptr) < 0) { qDebug() << "Failed to find stream info"; avformat_close_input(&formatContext); return; } int videoStreamIndex = -1; for (unsigned int i = 0; i < formatContext->nb_streams; ++i) { if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStreamIndex = i; break; } } if (videoStreamIndex == -1) { qDebug() << "Failed to find video stream"; avformat_close_input(&formatContext); return; } AVCodec *codec = avcodec_find_decoder(formatContext->streams[videoStreamIndex]->codecpar->codec_id); if (!codec) { qDebug() << "Failed to find decoder"; avformat_close_input(&formatContext); return; } codecContext = avcodec_alloc_context3(codec); if (!codecContext) { qDebug() << "Failed to allocate codec context"; avformat_close_input(&formatContext); return; } if (avcodec_parameters_to_context(codecContext, formatContext->streams[videoStreamIndex]->codecpar) < 0) { qDebug() << "Failed to copy codec parameters to context"; avcodec_free_context(&codecContext); avformat_close_input(&formatContext); return; } if (avcodec_open2(codecContext, codec, nullptr) < 0) { qDebug() << "Failed to open codec"; avcodec_free_context(&codecContext); avformat_close_input(&formatContext); return; } AVPacket *packet = av_packet_alloc(); AVFrame *frame = av_frame_alloc(); swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt, codecContext->width, codecContext->height, AV_PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr); while (av_read_frame(formatContext, packet) >= 0) { if (m_stopRequested) break; if (packet->stream_index == videoStreamIndex) { decodePacket(packet, codecContext, swsContext); } av_packet_unref(packet); } av_packet_free(&packet); av_frame_free(&frame); avcodec_free_context(&codecContext); avformat_close_input(&formatContext); sws_freeContext(swsContext); } void VideoDecoder::decodePacket(AVPacket *packet, AVCodecContext *codecContext, SwsContext *swsContext) { AVFrame *frame = av_frame_alloc(); int ret = avcodec_send_packet(codecContext, packet); if (ret < 0) { qDebug() << "Error sending packet to decoder"; av_frame_free(&frame); return; } ret = avcodec_receive_frame(codecContext, frame); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { av_frame_free(&frame); return; } else if (ret < 0) { qDebug() << "Error receiving frame from decoder"; av_frame_free(&frame); return; } QImage image(codecContext->width, codecContext->height, QImage::Format_RGB888); uint8_t *srcData[4] = { frame->data[0], frame->data[1], frame->data[2], nullptr }; int srcLinesize[4] = { frame->linesize[0], frame->linesize[1], frame->linesize[2], 0 }; uint8_t *dstData[1] = { image.bits() }; int dstLinesize[1] = { image.bytesPerLine() }; sws_scale(swsContext, srcData, srcLinesize, 0, codecContext->height, dstData, dstLinesize); emit frameDecoded(image); av_frame_free(&frame); } MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), m_openglWidget(new MyOpenGLWidget(this)), m_videoDecoder(new VideoDecoder(this)), m_timer(new QTimer(this)) { setCentralWidget(m_openglWidget); connect(m_videoDecoder, &VideoDecoder::frameDecoded, this, &MainWindow::onFrameDecoded); connect(m_timer, &QTimer::timeout, this, &MainWindow::onTimerTimeout); // 设置RTSP流的URL QString rtspUrl = "rtsp://example.com/stream"; m_videoDecoder->setUrl(rtspUrl); m_videoDecoder->start(); // 设置定时器来刷新OpenGLWidget int frameRate = 30; // 帧率 int timerInterval = 1000 / frameRate; m_timer->start(timerInterval); } MainWindow::~MainWindow() { m_videoDecoder->stop(); m_videoDecoder->wait(); } void MainWindow::onFrameDecoded(const QImage &image) { m_openglWidget->update(); // 触发OpenGLWidget的重绘事件 } void MainWindow::onTimerTimeout() { // 在OpenGLWidget的paintGL()函数中绘制当前帧 QMutexLocker locker(m_videoDecoder->getMutex()); QImage image = m_videoDecoder->getImage(); if (!image.isNull()) { // 将图像数据复制到OpenGLWidget中 // ... // 更新OpenGLWidget m_openglWidget->update(); } } ``` 这只是一个简单的示例,具体的实现可能会根据你的需求有所调整。你可以根据实际情况修改代码以适应你的应用程序。同时,你还需要根据Qt和FFmpeg的文档进行更详细的学习和了解。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值