使用SDL渲染AVFrame

XVideoView.cpp

bool XVideoView::drawAVFrame(AVFrame* frame)
{
	bool ret = (frame != nullptr);

	if (ret)
	{
		switch (frame->format)
		{
			case AV_PIX_FMT_ARGB:
			case AV_PIX_FMT_RGBA:
				ret = draw(frame->data[0], frame->linesize[0]);
				break;
			case AV_PIX_FMT_YUV420P:
				ret = draw(frame->data[0], frame->linesize[0], frame->data[1], frame->linesize[1], frame->data[2], frame->linesize[2]);
				break;
			default:
				ret = false;
				break;
		}
	}

	return ret;
}

 

XSDL.cpp

bool XSDL::draw(const unsigned char* Yplane, int Ypitch,
	const unsigned char* Uplane, int Upitch,
	const unsigned char* Vplane, int Vpitch)
{
	bool ret = (Yplane && (Ypitch > 0) && Uplane && (Upitch > 0) && Vplane && (Vpitch > 0) && (m_width > 0) && (m_height > 0) && m_win && m_render && m_texture);

	if (ret)
	{
		// 复制内存到显存中
		ret = (SDL_UpdateYUVTexture(m_texture, nullptr, Yplane, Ypitch, Uplane, Upitch, Vplane, Vpitch) == 0);

		if (ret)
		{
			SDL_Rect rect;
			SDL_Rect* pr = nullptr;

			rect.x = 0;
			rect.y = 0;
			rect.w = m_scaleWid;  // rect.w, rect.h 为显示的尺寸大小
			rect.h = m_scaleHgh;

			/* 默认渲染尺寸为窗口大小,若用户设置了缩放尺寸,则使用用户设定的尺寸来渲染 */
			if ((m_scaleWid > 0) && (m_scaleHgh > 0))
			{
				pr = ▭
			}

			// 清空屏幕
			SDL_RenderClear(m_render);

			// 将材质复制到渲染器
			ret = (SDL_RenderCopy(m_render, m_texture, nullptr, pr) == 0);

			if (ret)
			{
				SDL_RenderPresent(m_render);
			}
			else
			{
				cerr << SDL_GetError() << endl;
			}
		}
		else
		{
			cerr << SDL_GetError() << endl;
		}
	}

	return ret;
}

SdlQtRGB.cpp

#include "SdlQtRGB.h"
#include "XVideoView.h"
#include <iostream>
#include <fstream>
#include <QMessageBox>

extern "C"
{
#include "libavcodec/avcodec.h"
}

using namespace std;

static AVFrame* frame = nullptr;;
static int sdl_width = 0;
static int sdl_height = 0;
static int pixel_size = 2;
static ifstream yuv_file;
static XVideoView* view = nullptr;

SdlQtRGB::SdlQtRGB(QWidget *parent)
    : QWidget(parent)
{
    int r = 0;

    yuv_file.open("400_300_25.yuv", ios::in | ios::binary);

    if (!yuv_file)
    {
        QMessageBox::information(this, "information", "open 400_300_25.yuv failed!");

        return;
    }

    ui.setupUi(this);
 
    sdl_width = 400;
    sdl_height = 300;

    ui.label->resize(400, 300);

    view = XVideoView::create();

    if (view)
    {    
        view->init(sdl_width, sdl_height, XVideoView::YUV420P);
        view->init(sdl_width, sdl_height, XVideoView::YUV420P, (void*)ui.label->winId());
    }
    
    frame = av_frame_alloc();

    frame->width = 400;
    frame->height = 300;
    frame->format = AV_PIX_FMT_YUV420P;

    
    //  Y Y
    //   UV
    //  Y Y
    frame->linesize[0] = sdl_width;      // Y
    frame->linesize[1] = sdl_width / 2;  // U
    frame->linesize[2] = sdl_width / 2;  // V

    r = av_frame_get_buffer(frame, 0);

    if (r != 0)
    {
        char buf[1024] = {0};

        av_strerror(r, buf, sizeof(buf));

        cerr << buf << endl;
    }

    startTimer(10);  // 每过10ms就会调用一次 timerEvent,1s大约有100帧的图像
}

void SdlQtRGB::timerEvent(QTimerEvent* evt)
{
    /* AVFrame 中 YUV 是单独存放的,所以需要分开进行读取 */
    yuv_file.read((char*)frame->data[0], sdl_width * sdl_height);      // Y
    yuv_file.read((char*)frame->data[1], sdl_width * sdl_height / 4);  // U
    yuv_file.read((char*)frame->data[2], sdl_width * sdl_height / 4);  // V

    if (view)
    {
        view->drawAVFrame(frame);

        if (view->isExit())
        {
            view->close();
            delete view;

            exit(0);
        }
    }
}

void SdlQtRGB::resizeEvent(QResizeEvent* evt)
{
    ui.label->resize(size());
    ui.label->move(0, 0);
    // view->scale(width(), height());
}

SdlQtRGB::~SdlQtRGB()
{
    av_frame_free(&frame);
}

测试结果

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
#include <SDL2/SDL.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h>int main() { AVFormatContext *pFormatCtx = NULL; AVCodecContext *pCodecCtx = NULL; AVCodec *pCodec = NULL; AVFrame *pFrame = NULL; AVPacket packet; int videoStream; SDL_Window *screen; SDL_Renderer *renderer; SDL_Texture *texture; int i, numBytes; uint8_t *buffer = NULL; // 首先打开视频文件 if(avformat_open_input(&pFormatCtx, "test.mp4", NULL, NULL) != 0) { printf("Couldn't open the file"); return -1; } // 找到视频流 if(avformat_find_stream_info(pFormatCtx, NULL) < 0) { printf("Couldn't find stream info"); return -1; } // 查找视频流索引 videoStream = -1; for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if(videoStream == -1) { printf("Couldn't find a video stream"); return -1; } // 初始化SDL if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf("Couldn't initialize SDL: %s", SDL_GetError()); return -1; } // 创建窗口 screen = SDL_CreateWindow("FFmpeg Player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pFormatCtx->streams[videoStream]->codec->width, pFormatCtx->streams[videoStream]->codec->height, SDL_WINDOW_OPENGL); if(!screen) { printf("SDL: could not create window - exiting: %s", SDL_GetError()); return -1; } // 创建渲染器 renderer = SDL_CreateRenderer(screen, -1, 0); if(!renderer) { printf("SDL: could not create renderer - exiting: %s", SDL_GetError()); return -1; } // 创建纹理 texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, pFormatCtx->streams[videoStream]->codec->width, pFormatCtx->streams[videoStream]->codec->height); if(!texture) { printf("SDL: could not create texture - exiting: %s", SDL_GetError()); return -1; } // 获取解码器 pCodecCtx = pFormatCtx->streams[videoStream]->codec; pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec == NULL) { printf("Unsupported codec"); return -1; } // 打开解码器 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { printf("Could not open codec"); return -1; } // 分配视频帧 pFrame = av_frame_alloc(); // 申请缓冲区 numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); // 将缓冲区放入视频帧中 avpicture_fill((AVPicture *)pFrame, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); // 开始解码 while(av_read_frame(pFormatCtx, &packet) >= 0) { if(packet.stream_index == videoStream) { int frameFinished; // 解码视频帧 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // 如果解码完成 if(frameFinished) { SDL_UpdateYUVTexture(texture, NULL, pFrame->data[0], pFrame->linesize[0], pFrame->data[1], pFrame->linesize[1], pFrame->data[2], pFrame->linesize[2]); SDL_RenderClear(renderer); SDL_RenderCopy(renderer, texture, NULL, NULL); SDL_RenderPresent(renderer); } } // 释放packet av_free_packet(&packet); } // 释放资源 av_free(buffer); av_free(pFrame); avcodec_close(pCodecCtx); avformat_close_input(&pFormatCtx); return 0; }答案:使用FFmpeg采集视频数据,然后通过SDL渲染,可以使用如下代码例程:
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值