本代码实例演示通过FFmpeg 4.0.2打开本地摄像头,解码本地摄像头视频并通过SDL播放。
/**
* 在Windows下可以使用2种方式读取摄像头数据:
* 1.VFW: Video for Windows 屏幕捕捉设备。注意输入URL是设备的序号,从0至9。
* 2.dshow: 使用Directshow,使用的时候需要改成自己电脑上摄像头设备的名称。
* 在Linux下则可以使用video4linux2读取摄像头设备。
*/
#include <iostream>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <libavfilter/avfilter.h>
#include <libavutil/imgutils.h>
#include <SDL.h>
#include <SDL_main.h>
};
#pragma comment(lib ,"SDL2.lib")
#pragma comment(lib ,"SDL2main.lib")
using namespace std;
//Refresh Event
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)
#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)
int thread_exit = 0;
int thread_pause = 0;
int sfp_refresh_thread(void *opaque)
{
thread_exit = 0;
thread_pause = 0;
while (!thread_exit)
{
if (!thread_pause)
{
SDL_Event event;
event.type = SFM_REFRESH_EVENT;
SDL_PushEvent(&event);
}
SDL_Delay(5);
}
thread_exit = 0;
thread_pause = 0;
//Break
SDL_Event event;
event.type = SFM_BREAK_EVENT;
SDL_PushEvent(&event);
return 0;
}
/***********************
* 打开本地摄像头
***********************/
int OpenLocalCamera(AVFormatContext *pFormatCtx, bool isUseDshow = false)
{
avdevice_register_all();
#ifdef _WIN32
if (isUseDshow)
{
AVInputFormat *ifmt = av_find_input_format("dshow");
//Set own video device's name
if (avformat_open_input(&pFormatCtx, "video=Integrated Camera", ifmt, NULL) != 0)
{
printf("Couldn't open input stream.(无法打开输入流)\n");
return -1;
}
}
else
{
AVInputFormat *ifmt = av_find_input_format("vfwcap");
if (avformat_open_input(&pFormatCtx, "0", ifmt, NULL) != 0)
{
printf("Couldn't open input stream.(无法打开输入流)\n");
return -1;
}
}
#endif
//Linux
#ifdef linux
AVInputFormat *ifmt = av_find_input_format("video4linux2");
if (avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL) != 0) {
printf("Couldn't open input stream.(无法打开输入流)\n");
return -1;
}
#endif
return 0;
}
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *pFrame, AVFrame *yuvFrame, struct SwsContext *imgCtx/*, FILE *outfile*/)
{
int ret;
/* send the packet with the compressed data to the decoder */
ret = avcodec_send_packet(dec_ctx, pkt);
if (ret < 0)
{
fprintf(stderr, "Error submitting the packet to the decoder\n");
exit(1);
}
/* read all the output frames (in general there may be any number of them */
while (ret >= 0)
{
ret = avcodec_receive_frame(dec_ctx, pFrame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0)
{
fprintf(stderr, "Error during decoding\n");
exit(1);
}
//cout << "decoding the frame " << dec_ctx->frame_number << endl;
sws_scale(imgCtx, pFrame->data, pFrame->linesize, 0, dec_ctx->height, yuvFrame->data, yuvFrame->linesize);
//int y_size = dec_ctx->width*dec_ctx->height;
//fwrite(yuvFrame->data[0], 1, y_size, outfile); // Y
//fwrite(yuvFrame->data[1], 1, y_size / 4, outfile); // U
//fwrite(yuvFrame->data[2], 1, y_size / 4, outfile); // V
}
}
int main(int argc, char* argv[])
{
avformat_network_init();
AVFormatContext *pFormatCtx = avformat_alloc_context();
pFormatCtx->probesize = 10000 * 1024;
pFormatCtx->duration = 10 * AV_TIME_BASE;
// 打开本地摄像头
OpenLocalCamera(pFormatCtx);
printf("---------------- File Information ---------------\n");
av_dump_format(pFormatCtx, 0, NULL, 0);
printf("-------------------------------------------------\n");
// 寻找视频流信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
{
printf("Couldn't find stream information.\n");
return -1;
}
// 打开视频获取视频流,设置视频默认索引值
int videoindex = -1;
for (int i = 0; i < pFormatCtx->nb_streams; i++)
{
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
//break;
}
}
// 如果没有找到视频的索引说明没有视频流
if (videoindex == -1)
{
printf("Didn't find a video stream.\n");
return -1;
}
// 分配解码器上下文
AVCodecContext *pCodecCtx = avcodec_alloc_context3(NULL);
// 获取解码器上下文信息
if (avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoindex]->codecpar) < 0)
{
cout << "Copy stream failed!" << endl;
return -1;
}
// 查找解码器
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL)
{
printf("Codec not found.\n");
return -1;
}
// 打开解码器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("Could not open codec.\n");
return -1;
}
// 对图形进行裁剪以便于显示得更好
struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
if (NULL == img_convert_ctx)
{
cout << "Get swscale context failed!" << endl;
return -1;
}
//SDL Init--------------------------------------------------------------
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
{
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
//SDL 2.0 Support for multiple windows
int screen_w = pCodecCtx->width;
int screen_h = pCodecCtx->height;
SDL_Window* screen = SDL_CreateWindow("FFmpegPlayer", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL_WINDOW_OPENGL);
if (!screen)
{
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
return -1;
}
SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
SDL_Texture* sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
SDL_Rect sdlRect;
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
AVFrame *pFrame = av_frame_alloc();
AVFrame *pFrameYUV = av_frame_alloc();
uint8_t *out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1));
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
//Event Loop
SDL_Event event;
for (;;)
{
//Wait
SDL_WaitEvent(&event);
if (event.type == SFM_REFRESH_EVENT)
{
//------------------------------
if (av_read_frame(pFormatCtx, packet) >= 0)
{
if (packet->stream_index == videoindex)
{
decode(pCodecCtx, packet, pFrame, pFrameYUV, img_convert_ctx/*, fp_yu*/);
//SDL---------------------------
SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
SDL_RenderClear(sdlRenderer);
//SDL_RenderCopy(sdlRenderer, sdlTexture, &sdlRect, &sdlRect);
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(sdlRenderer);
//SDL End-----------------------
}
av_packet_unref(packet);
}
else
{
//Exit Thread
thread_exit = 1;
}
}
else if (event.type == SDL_KEYDOWN)
{
//Pause
if (event.key.keysym.sym == SDLK_SPACE)
thread_pause = !thread_pause;
}
else if (event.type == SDL_QUIT)
{
thread_exit = 1;
}
else if (event.type == SFM_BREAK_EVENT)
{
break;
}
}
sws_freeContext(img_convert_ctx);
SDL_Quit();
//--------------
av_frame_free(&pFrameYUV);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
return getchar();
}
*【注】如果你觉得此文不错,可以考虑打赏我哦,您的打赏将是我更新的最大动力,非常感谢。(打赏也是基于自愿原则的哦( ̄︶ ̄))