实时视频流通常是由安防设备负责推流,服务器端进行解析和转发,手机app播放视频流。
在开发服务器端和手机app时,为了方便调试码流和解决相关bug,我们在windows平台下利用
ffmpeg获取采集实时视频流,并对视频流进行H264编码后推流到视频流服务器。
分析详细的代码之前,先看运行效果:
下面先看main的代码:
int _tmain(int argc, char* argv[])
{
init_ffmpeg();
CCameraVideo* cVideo = new CCameraVideo(true);
//rtmp://192.168.3.94:1935/live/home
int frame_index = 0;
int64_t start_time = 0;
//SDL----------------------------
SDL_Window *screen = NULL;
SDL_Renderer* sdlRenderer=NULL;
SDL_Texture* sdlTexture = NULL;
SDL_Thread *video_tid = NULL;
SDL_Event event;
unsigned int sdlFlag = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
if (SDL_Init(sdlFlag))
{
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
int screen_w = 1280;
int screen_h = 780;
//SDL 2.0 Support for multiple windows
AVCodecContext* pCodecCtx = cVideo->getInputCodecContext();
if (pCodecCtx)
{
screen_w = pCodecCtx->width / VIDEO_SCALE;
screen_h = pCodecCtx->height / VIDEO_SCALE;
}
screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL_WINDOW_OPENGL);
if (screen != NULL)
{
sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
screen_w, screen_h);
SDL_SetWindowSize(screen, 1280, 1280.0*(float)screen_h / (float)screen_w);
SDL_SetWindowPosition(screen, 300, 100);
}
SDL_Rect sdlRectS1;
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;
SDL_Rect sdlRectS2;
SDL_Rect sdlRectD1;
SDL_Rect sdlRectD2;
SDL_Texture *myTexture;
thread_exit = 0;
thread_pause = 0;
unsigned int threadID = 0;
HANDLE aT