在liunx下,捕获摄像头视频流有2种方案,一种是使用ffmpeg捕获流,另一种是使用v4l2接口捕获视频流。前者使用方法简单,但是有后遗症,并不是所有型号摄像头都能够获取成功,这就是我写这篇文章的目的。
先介绍第一种方法。老规矩,直接粘贴代码(这里虽然是使用Qt代码格式,稍加变换,即可支持其他平台)。
头文件声明(VideoPushThread.h):
#ifndef PUSHVIDEOTHREAD_H
#define PUSHVIDEOTHREAD_H
#include <QThread>
class PushVideoThread : public QThread
{
Q_OBJECT
public:
explicit VideoPlayer();
~VideoPlayer();
void run();
bool openCamera();
void closeCamera();
private:
int image_width, image_height;
AVOutputFormat *ofmt = NULL;
AVFormatContext *ifmt_ctx = NULL;
AVFormatContext *ofmt_ctx = NULL;
AVCodecContext *iCodecCtx;
AVCodecContext *oCodecCtx;
AVStream *video_st;
AVCodec *iCodec;
AVCodec *oCodec;
};
#endif // PUSHVIDEOTHREAD_H
源文件定义(PushVideoThread.cpp)
#include "PushVideoThread.h"
#include <QDebug>
extern "C"
{
#include "libavcodec/avdct.h"
#include "libavutil/opt.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/pixfmt.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
}
PushVideoThread::PushVideoThread()
{
}
PushVideoThread::~PushVideoThread()
{
}
void PushVideoThread::run()
{
/*记录发送帧数*/
int frame_index = 0;
/*打开摄像头*/
if (!openCamera())
return;
/*注册ffmpeg库*/
//av_register_all();//该接口新版ffmpeg已经废弃不再使用了
avdevice_register_all();
avformat_network_init();
/*流媒体服务器推送字符串*/
const char *out_filename = "rtsp://192.168.137.213:8554/aaa";
/*分配并初始化一个输出媒体格式上下文*/
AVFormatContext *ofmt_ctx = NULL;
avformat_alloc_output_context2(&ofmt_ctx, NULL, "rtsp", out_filename);
if (!ofmt_ctx) {
qDebug() << "Could not create output context";
return ;
}
/*根据ID查找h264解码器*/
const AVCodec *oCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!oCodec) {
qDebug() << "Can not find encoder!";
return ;
}
/*定义输出图像格式*/
AVPixelFormat dstFormat = AV_PIX_FMT_YUV422P;
//AVPixelFormat dstFormat = AV_PIX_FMT_YUV420P;
/*分配并初始化一个结构体*/
AVCodecContext *oCodecCtx = avcodec_alloc_context3(oCodec);
oCodecCtx->pix_fmt = dstFormat;//输出图像格式
oCodecCtx->width = 1920;// 宽度
oCodecCtx->height = 1080;// 高度
oCodecCtx->time_base.num = 1; //
oCodecCtx->time_base.den = 25;// 时间基准,表示一帧或多个样本的持续时间。这里是25帧/秒
oCodecCtx->bit_rate = 8000000;// 比特率(以比特/秒为单位),这里改为8M
oCodecCtx->gop_size = 250;// 图像组(GOP)的大小
/*
* @AV_CODEC_FLAG_GLOBAL_HEADER 将全局头部信息放在extradata指针中,而不是每一个关键帧中
* @AV_CODEC_FLAG_LOW_DELAY 较低延迟
*/
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
oCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER | AV_CODEC_FLAG_LOW_DELAY;
oCodecCtx->max_b_frames = 0; // B帧为0
/*编码器参数*/
AVDictionary *param = 0;
av_dict_set(¶m, "preset", "ultrafast", 0);/*编码速度*/
av_dict_set(¶m, "tune", "zerolatency", 0);/*减少编码延迟*/
/*打开编码器*/
if (avcodec_open2(oCodecCtx, oCodec, ¶m) < 0) {
qDebug() << "Failed to open encoder! (±àÂëÆ÷´ò¿ªÊ§°Ü£¡)";
return ;
}
/*创建一个流,这里指视频流*/
AVStream *video_st = avformat_new_stream(ofmt_ctx, oCodec);
if (video_st == NULL) {
return ;
}
/*打印输出流相详细信息*/
av_dump_format(ofmt_ctx, 0, out_filename, 1);
/*输出流时间基准*/
video_st->time_base.num = 1;
video_st->time_base.den = 25;
/*将编码器编码参数信息复制给视频流*/
avcodec_parameters_from_context(video_st->codecpar, oCodecCtx);
/*写入媒体头部信息*/
int ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
qDebug() << "Error occurred when opening output URL";
return ;
}
/*申请一个编码数据包*/
AVPacket *dec_packet = av_packet_alloc();
AVPacket *enc_packet = av_packet_alloc();
/*创建图像格式转换上下文*/
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(1920, 1080, AV_PIX_FMT_NV12, oCodecCtx->width, oCodecCtx->height, dstFormat, SWS_BICUBIC/*SWS_FAST_BILINEAR*/, NULL, NULL, NULL);
/*申请一个YUV图像帧*/
AVFrame *pFrameYUV = av_frame_alloc();
pFrameYUV->format = oCodecCtx->pix_fmt;// 图像格式
pFrameYUV->width = oCodecCtx->width;// 宽度
pFrameYUV->height = oCodecCtx->height;// 高度
/*给YUV图像帧分配内存*/
uint8_t *out_buffer;
out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(dstFormat, oCodecCtx->width, oCodecCtx->height, 1));
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, dstFormat, oCodecCtx->width, oCodecCtx->height, 1);
/*申请内存,存放摄像头图像数据*/
struct v4l2_plane *tmp_plane;
tmp_plane = (struct v4l2_plane *)calloc(num_planes, sizeof(*tmp_plane));
uint32_t pts = 0;
start_time = av_gettime();
for (;;)
{
// 获取图像帧数据
if (av_read_frame(ifmt_ctx, dec_packet) >= 0)
{
if (dec_packet->stream_index == videoindex)
{
pFrame = av_frame_alloc();
if (!pFrame) {
printf("alloc pFrame Failed.\n");
break;
}
// 将图像送给解码器
ret = avcodec_send_packet(iCodecCtx, dec_packet);
// 从解码器获取解码后的图像
int got_decpicture = avcodec_receive_frame(iCodecCtx, pFrame);
if (ret < 0)
{
av_frame_free(&pFrame);
printf("Decode Error.\n");
break;
}
// 这里得到一帧图像了
if (got_decpicture == 0)
{
// 转码成YUV格式(h264编码一般都是用的yuv图像格式)
ret = sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, out_picture_height/*iCodecCtx->height*/, pFrameYUV->data, pFrameYUV->linesize);
if ( ret < 0) {
av_frame_free(&pFrame);
char error_string[AV_ERROR_MAX_STRING_SIZE] = {0};
qDebug() << "sws_scale Error. >> " << av_make_error_string(error_string, AV_ERROR_MAX_STRING_SIZE, ret);
break ;
}
// 将YUV图像进行编码
ret = avcodec_send_frame(oCodecCtx, pFrameYUV);
if ( ret < 0) {
av_frame_free(&pFrame);
char error_string[AV_ERROR_MAX_STRING_SIZE] = {0};
qDebug() << "Encode Error. >> " << av_make_error_string(error_string, AV_ERROR_MAX_STRING_SIZE, ret);
break ;
}
AVRational time_base_in = ifmt_ctx->streams[videoindex]->time_base;//{ 1, 1000000 };
time_base_in = iCodecCtx->time_base;//{ 1, 1000 };
AVRational time_base_conert = { 1, AV_TIME_BASE };
pFrameYUV->pts = av_rescale_q(dec_packet->pts, time_base_in, time_base_conert);
// 获取编码后的图像
int got_encpicture = avcodec_receive_packet(oCodecCtx, enc_packet);
if (got_encpicture == 0)
{
if (enc_packet->pts == AV_NOPTS_VALUE)
{
/*时间戳信息*/
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
enc_packet->pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
enc_packet->dts = enc_packet->pts;
enc_packet->duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
/*计算PTS*/
AVRational time_base = ofmt_ctx->streams[video_st->index]->time_base;//{ 1, 1000 };
AVRational r_framerate1 = ifmt_ctx->streams[videoindex]->r_frame_rate;// { 50, 2 };
AVRational time_base_q = { 1, AV_TIME_BASE };
int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
enc_packet->pts = av_rescale_q(frame_index*calc_duration, time_base_q, time_base);
enc_packet->dts = enc_packet->pts;
enc_packet->duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
enc_packet->pos = -1;
int64_t pts_time = av_rescale_q(enc_packet->dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
/*数据包流索引*/
enc_packet->stream_index = video_st->index;
/*日志打印发送帧索引*/
qDebug() << "Send video frames index=" << frame_index++;
/*将编码好的数据发送出去*/
ret = av_interleaved_write_frame(ofmt_ctx, enc_packet);
// 显示帧索引信息
if (enc_packet->stream_index == videoindex) {
qDebug() << "Send %8d video frames index=" << frame_index;
}
}
av_packet_unref(enc_packet);
}
av_frame_free(&pFrame);
}
av_packet_unref(dec_packet);
}
}
/*关闭摄像头*/
closeCamera();
/*写入媒体尾部信息*/
av_write_trailer(ofmt_ctx);
/*释放资源*/
av_free(out_buffer);
av_free(enc_packet);
av_free(dec_packet);
if (video_st)
avcodec_close(oCodecCtx);
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
}
bool PushVideoThread::openCamera()
{
//char src_path[128]="/home/sky/Desktop/123/video-02.mp4";//推送本地文件
char src_path[128]="/dev/video0";//推送本地摄像头
// 查找输入设备格式
AVInputFormat *ifmt = av_find_input_format("v4l2");
// 打开摄像头
ifmt_ctx = avformat_alloc_context();
if (avformat_open_input(&ifmt_ctx, src_path, NULL, NULL) != 0)
{
qDebug() << "Couldn't open input stream.";
//char error_string[AV_ERROR_MAX_STRING_SIZE] = {0};
//printf("Failed to open encoder! (解码器打开失败) >> %s\n", av_make_error_string(error_string, AV_ERROR_MAX_STRING_SIZE, errnum));
return false;
}
// 查找设备流信息
if (avformat_find_stream_info(ifmt_ctx, NULL)<0)
{
qDebug() << "Couldn't find stream information.";
return false;
}
// 查找视频流索引
int videoindex = -1;
for (unsigned int i = 0; i<ifmt_ctx->nb_streams; i++)
{
if (ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
videoindex = i;
break;
}
}
// 判断视频流是否存在
if (videoindex == -1)
{
qDebug() << "Couldn't find a video stream.";
return false;
}
// 申请解码器
iCodecCtx = avcodec_alloc_context3(NULL);
avcodec_parameters_to_context(iCodecCtx, ifmt_ctx->streams[videoindex]->codecpar);
// 根据输入设备视频流个是查找解码器
iCodec = avcodec_find_decoder(ifmt_ctx->streams[videoindex]->codecpar->codec_id);
if (iCodec == NULL)
{
qDebug() << "Codec not found.";
return false;
}
// 打开解码器
if (avcodec_open2(iCodecCtx, iCodec, NULL)<0)
{
qDebug() << "Could not open codec.";
return false;
}
// 打印输入设备信息
av_dump_format(ifmt_ctx, 0, src_path, 0);
image_width = ifmt_ctx->streams[videoindex]->codecpar->width;
image_heitht = ifmt_ctx->streams[videoindex]->codecpar->height;
return true;
}
void PushVideoThread::closeCamera()
{
avformat_close_input(&ifmt_ctx);
avformat_free_context(ifmt_ctx);
}