#include <SDL.h>
#ifdef __cplusplus
extern "C" {
#endif
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/mem.h>
#include <libavutil/imgutils.h>
#include <SDL.h>
#include <SDL_thread.h>
#ifdef __cplusplus
}
#endif
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avutil.lib")
#pragma comment(lib,"avdevice.lib")
#pragma comment(lib,"avfilter.lib")
#pragma comment(lib,"postproc.lib")
#pragma comment(lib,"swresample.lib")
#pragma comment(lib,"swscale.lib")
#pragma comment(lib,"SDL2.lib")
int display_video(const char* inputfile_name) {
AVFormatContext *pFormatCtx = NULL;
int i, videoStream;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL;
AVPacket packet;
int frameFinished;
//float aspect_ratio;
AVDictionary *optionsDict = NULL;
struct SwsContext *sws_ctx = NULL;
SDL_Surface *screen = NULL;
SDL_Rect rect;
SDL_Event event;
// 窗口
SDL_Window *window = nullptr;
// 渲染上下文
SDL_Renderer *renderer = nullptr;
// 纹理(直接跟特定驱动程序相关的像素数据)
SDL_Texture *texture = nullptr;
// Register all formats and codecs.
av_register_all();
// Open video file.
if (avformat_open_input(&pFormatCtx, inputfile_name, NULL, NULL) != 0) {
return -1; // Couldn't open file.
}
// Retrieve stream information.
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
return -1; // Couldn't find stream information.
}
// Dump information about file onto standard error.
av_dump_format(pFormatCtx, 0, inputfile_name, 0);
// Find the first video stream.
videoStream = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
return -1; // Didn't find a video stream.
}
// Get a pointer to the codec context for the video stream.
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream.
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
fprintf(stderr, "Unsupported codec!\n");
return -1; // Codec not found.
}
// Open codec
if (avcodec_open2(pCodecCtx, pCodec, &optionsDict) < 0) {
return -1; // Could not open codec.
}
// Allocate video frame.
pFrame = av_frame_alloc();
// Make a screen to put our video.
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
exit(1);
}
// 创建窗口
window = SDL_CreateWindow("Display BMP", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_SHOWN);
if (!window) {
return -1;
}
// 创建渲染上下文
renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC);
if (!renderer) {
return -1;
}
//创建纹理
texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
if (!texture) {
SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't set create texture: %s\n", SDL_GetError());
SDL_free(texture);
return -1;
}
// Allocate a place to put our YUV image on that screen.
//bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);
AVFrame *yuvFrame = av_frame_alloc();
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t *buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(yuvFrame->data, yuvFrame->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
// Read frames and save first five frames to disk.
i = 0;
while (av_read_frame(pFormatCtx, &packet) >= 0) {
// Is this a packet from the video stream?.
if (packet.stream_index == videoStream) {
// Decode video frame.
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
// Did we get a video frame?
if (frameFinished) {
//AVFrame pict;
//pict.data[0] = bmp->pixels[0];
//pict.data[1] = bmp->pixels[2];
//pict.data[2] = bmp->pixels[1];
//pict.linesize[0] = bmp->pitches[0];
//pict.linesize[1] = bmp->pitches[2];
//pict.linesize[2] = bmp->pitches[1];
// Convert the image into YUV format that SDL uses.
//sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize);
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, yuvFrame->data, yuvFrame->linesize);
rect.x = 0;
rect.y = 0;
rect.w = pCodecCtx->width;
rect.h = pCodecCtx->height;
//SDL_UpdateYUVTexture();
int iPitch = pCodecCtx->width * SDL_BYTESPERPIXEL(SDL_PIXELFORMAT_YV12);
SDL_UpdateTexture(texture, NULL, yuvFrame->data[0], *yuvFrame->linesize);
// 用绘制颜色清除渲染目标
SDL_RenderClear(renderer);
// 复制纹理到渲染目标(渲染目标默认是 window)
SDL_RenderCopy(renderer, texture, nullptr, nullptr);
// 更新所有的渲染操作到屏幕上
SDL_RenderPresent(renderer);
}
}
// Free the packet that was allocated by av_read_frame.
av_packet_unref(&packet);
SDL_PollEvent(&event);
switch (event.type) {
case SDL_QUIT:
printf("SDL_QUIT\n");
SDL_Quit();
exit(0);
break;
default:
break;
}
}
// Free the YUV frame.
av_free(pFrame);
// Close the codec.
avcodec_close(pCodecCtx);
// Close the video file.
avformat_close_input(&pFormatCtx);
SDL_DestroyRenderer(renderer);
return 0;
}
开发工具:vs2017
SDL2-2.0.20
ffmpeg-n4.4-latest-win64-gpl-shared-4.4