本篇文章介绍使用v4l2接口捕获摄像头图像数据进行ffmpeg转发。
头文件声明(PushVideoThread.h):
#ifndef PUSHVIDEOTHREAD_H
#define PUSHVIDEOTHREAD_H
#include <QThread>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <errno.h>
struct plane_start {
void * start;
};
struct buffer {
struct plane_start* plane_start;
struct v4l2_plane* planes_buffer;
};
class PushVideoThread : public QThread
{
Q_OBJECT
public:
explicit VideoPlayer();
~VideoPlayer();
void run();
bool openCamera();
void closeCamera();
private:
int fd;
FILE *file_fd;
FILE *yuvfile_fd;
int num_planes;
struct v4l2_requestbuffers req;
struct buffer *buffers;
enum v4l2_buf_type type;
int image_width, image_height;
};
#endif // PUSHVIDEOTHREAD_H
源文件定义(PushVideoThread.cpp):
#include "PushVideoThread.h"
#include <QDebug>
extern "C"
{
#include "libavcodec/avdct.h"
#include "libavutil/opt.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/pixfmt.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
}
PushVideoThread::PushVideoThread()
{
}
PushVideoThread::~PushVideoThread()
{
}
void PushVideoThread::run()
{
/*记录发送帧数*/
int frame_index = 0;
/*打开摄像头*/
if (!openCamera())
return;
/*注册ffmpeg库*/
//av_register_all();//该接口新版ffmpeg已经废弃不再使用了
avdevice_register_all();
avformat_network_init();
/*流媒体服务器推送字符串*/
const char *out_filename = "rtsp://192.168.137.213:8554/aaa";
/*分配并初始化一个输出媒体格式上下文*/
AVFormatContext *ofmt_ctx = NULL;
avformat_alloc_output_context2(&ofmt_ctx, NULL, "rtsp", out_filename);
if (!ofmt_ctx) {
qDebug() << "Could not create output context";
return ;
}
/*根据ID查找h264解码器*/
const AVCodec *oCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!oCodec) {
qDebug() << "Can not find encoder!";
return ;
}
/*定义输出图像格式*/
AVPixelFormat dstFormat = AV_PIX_FMT_YUV422P;
//AVPixelFormat dstFormat = AV_PIX_FMT_YUV420P;
/*分配并初始化一个结构体*/
AVCodecContext *oCodecCtx = avcodec_alloc_context3(oCodec);
oCodecCtx->pix_fmt = dstFormat;//输出图像格式
oCodecCtx->width = image_width;// 宽度
oCodecCtx->height = image_height;// 高度
oCodecCtx->time_base.num = 1; //
oCodecCtx->time_base.den = 25;// 时间基准,表示一帧或多个样本的持续时间。这里是25帧/秒
oCodecCtx->bit_rate = 8000000;// 比特率(以比特/秒为单位),这里改为8M
oCodecCtx->gop_size = 250;// 图像组(GOP)的大小
/*
* @AV_CODEC_FLAG_GLOBAL_HEADER 将全局头部信息放在extradata指针中,而不是每一个关键帧中
* @AV_CODEC_FLAG_LOW_DELAY 较低延迟
*/
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
oCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER | AV_CODEC_FLAG_LOW_DELAY;
oCodecCtx->max_b_frames = 0; // B帧为0
/*编码器参数*/
AVDictionary *param = 0;
av_dict_set(¶m, "preset", "ultrafast", 0);/*编码速度*/
av_dict_set(¶m, "tune", "zerolatency", 0);/*减少编码延迟*/
/*打开编码器*/
if (avcodec_open2(oCodecCtx, oCodec, ¶m) < 0) {
qDebug() << "Failed to open encoder! (±àÂëÆ÷´ò¿ªÊ§°Ü£¡)";
return ;
}
/*创建一个流,这里指视频流*/
AVStream *video_st = avformat_new_stream(ofmt_ctx, oCodec);
if (video_st == NULL) {
return ;
}
/*打印输出流相详细信息*/
av_dump_format(ofmt_ctx, 0, out_filename, 1);
/*输出流时间基准*/
video_st->time_base.num = 1;
video_st->time_base.den = 25;
/*将编码器编码参数信息复制给视频流*/
avcodec_parameters_from_context(video_st->codecpar, oCodecCtx);
/*写入媒体头部信息*/
int ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
qDebug() << "Error occurred when opening output URL";
return ;
}
/*申请一个编码数据包*/
AVPacket *enc_packet = av_packet_alloc();
/*创建图像格式转换上下文*/
struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(image_width, image_height, AV_PIX_FMT_NV12, oCodecCtx->width, oCodecCtx->height, dstFormat, SWS_BICUBIC/*SWS_FAST_BILINEAR*/, NULL, NULL, NULL);
/*申请一个YUV图像帧*/
AVFrame *pFrameYUV = av_frame_alloc();
pFrameYUV->format = oCodecCtx->pix_fmt;// 图像格式
pFrameYUV->width = oCodecCtx->width;// 宽度
pFrameYUV->height = oCodecCtx->height;// 高度
/*给YUV图像帧分配内存*/
uint8_t *out_buffer;
out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(dstFormat, oCodecCtx->width, oCodecCtx->height, 1));
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, dstFormat, oCodecCtx->width, oCodecCtx->height, 1);
/*申请内存,存放摄像头图像数据*/
struct v4l2_plane *tmp_plane;
tmp_plane = (struct v4l2_plane *)calloc(num_planes, sizeof(*tmp_plane));
uint32_t pts = 0;
for (;;) {
fd_set fds;
FD_ZERO(&fds);
FD_SET(fd, &fds);
struct timeval tv;
tv.tv_sec = 0;
tv.tv_usec = 40000;/*20毫秒检查一次*/
/*监视摄像机是否准备好读取操作*/
r = select (fd + 1, &fds, NULL, NULL, &tv);
if (0 > r) {
/*发生错误*/
if (EINTR == errno)/*连接是正常的,继续接收吧*/
continue
break;
}
/*发生超时*/
if (0 == r)
continue;
/*初始化缓存信息*/
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_MMAP;
buf.m.planes = tmp_plane;
buf.length = num_planes;
/*读取摄像头缓冲区图像数据*/
if (ioctl (fd, VIDIOC_DQBUF, &buf) < 0)
printf("dqbuf fail\n");
for (int i = 0; i < num_planes; i++) {
/* 录制nv12数据,主要是检查原始图像数据是否正确
* fwrite(((buffers + buf.index)->plane_start + j)->start, (tmp_plane + j)->bytesused, 1, file_fd);
*/
/*
* 图像格式转换
* 返回值:图像高度
*/
uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
data[0] = (uint8_t*)((buffers + buf.index)->plane_start + i)->start;
data[1] = (uint8_t*)((buffers + buf.index)->plane_start + i)->start + image_width * image_height;
int insize[AV_NUM_DATA_POINTERS] = {0};
insize[0] = image_width;
insize[1] = image_width;
ret = sws_scale(img_convert_ctx, data, insize, 0, image_height, pFrameYUV->data, pFrameYUV->linesize);
if (ret != image_height)
continue;
/* 录制转换后的图像数据,主要是检查转换后图像数据是否正确
* fwrite(pFrameYUV->data[0], y_size, 1, yuvfile_fd);
* fwrite(pFrameYUV->data[1], uv_size, 1, yuvfile_fd);
* fwrite(pFrameYUV->data[2], uv_size, 1, yuvfile_fd);
*/
pFrameYUV->pts = pts++;
/*将YUV数据发给编码器*/
ret = avcodec_send_frame(oCodecCtx, pFrameYUV);
if (ret < 0)
continue;
/*从编码器获取数据*/
int got_encpicture = avcodec_receive_packet(oCodecCtx, enc_packet);
if (got_encpicture == 0)
{
/*数据包流索引*/
enc_packet->stream_index = video_st->index;
/*日志打印发送帧索引*/
qDebug() << "Send video frames index=" << frame_index++;
/*将编码好的数据发送出去*/
ret = av_interleaved_write_frame(ofmt_ctx, enc_packet);
}
av_packet_unref(enc_packet);
}
/*重置缓冲区,方便下次使用*/
if (ioctl (fd, VIDIOC_QBUF, &buf) < 0)
printf("failture VIDIOC_QBUF\n");
}
free(tmp_plane);
/*关闭摄像头*/
closeCamera();
/*写入媒体尾部信息*/
av_write_trailer(ofmt_ctx);
/*释放资源*/
av_free(out_buffer);
av_free(enc_packet);
if (video_st)
avcodec_close(oCodecCtx);
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
}
bool PushVideoThread::openCamera()
{
/*打开摄像头*/
fd = open("/dev/video0", O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
return false;
/*创建nv12文件*/
/*
file_fd = fopen("./nv12.data", "wb+");
if (!file_fd) {
printf("open save_file: %s fail\n", "./nv12.data");
close(fd);
return ret;
}
*/
/*创建yuv文件*/
/*
yuvfile_fd = fopen("./yuv.data", "wb+");
if (!file_fd) {
printf("open save_file: %s fail\n", "./yuv.data");
//¹Ø±ÕÉãÏñÍ·
close(fd);
return ret;
}
*/
/*查询摄像头获取能力*/
struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
printf("Get video capability error!\n");
close(fd);
return false;
}
/*数据流类型*/
if (!(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
printf("Video device not support capture!\n");
close(fd);
return false;
}
if (cap.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
else
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
/*设置摄像头图像参数*/
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(struct v4l2_format));
fmt.type = type;
fmt.fmt.pix_mp.width = 1920;
fmt.fmt.pix_mp.height = 1080;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12;//视频捕获格式
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) < 0) {
printf("Set format fail\n");
close(fd);
return false;
}
/*读取摄像头图像参数*/
/*
memset(&fmt, 0, sizeof(struct v4l2_format));
fmt.type = type;
if (ioctl(fd, VIDIOC_G_FMT, &fmt) < 0) {
printf("Set format fail\n");
goto err;
}
*/
image_width = fmt.fmt.pix_mp.width;
image_heitht = fmt.fmt.pix_mp.height;
/*请求视频缓冲区,这里定义了5个缓冲区*/
req.count = 5;
req.type = type;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
printf("Reqbufs fail\n");
close(fd);
return false;
}
/*图像数据内存映射*/
num_planes = fmt.fmt.pix_mp.num_planes;//视频捕获平面数
buffers = (struct buffer*)malloc(req.count * sizeof(*buffers));
for(int i = 0; i < req.count; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
struct v4l2_plane* planes_buffer = (struct v4l2_plane*)calloc(num_planes, sizeof(*planes_buffer));
struct plane_start* plane_start = (struct plane_start*)calloc(num_planes, sizeof(*plane_start));
memset(planes_buffer, 0, sizeof(*planes_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;//缓冲区的类型
buf.memory = V4L2_MEMORY_MMAP;//缓冲区位置(内存映射、用户指针等)
buf.m.planes = planes_buffer;//对于多平面格式(如 YUV420),此数组描述了每个平面的详细信息
buf.length = num_planes;
buf.index = i;//缓冲区的索引号
/*查询视频设备上一个已请求(通过 VIDIOC_REQBUFS)的缓冲区的属性,主要用于查询上次申请是否成功*/
if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buf)) {
printf("Querybuf fail\n");
req.count = i;
closeCamera();
return false;
}
(buffers + i)->planes_buffer = planes_buffer;
(buffers + i)->plane_start = plane_start;
for(int j = 0; j < num_planes; j++) {
(plane_start + j)->start = mmap (NULL,
(planes_buffer + j)->length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd,
(planes_buffer + j)->m.mem_offset);
if (MAP_FAILED == (plane_start +j)->start) {
printf ("mmap failed\n");
req.count = i;
closeCamera();
return false;
}
}
}
/*将一个空的视频缓冲区“入队”到视频捕获设备的输入队列中*/
for (int i = 0; i < req.count; ++i) {
memset(&buf, 0, sizeof(buf));
buf.type = type;
buf.memory = V4L2_MEMORY_MMAP;
buf.length = num_planes;
buf.index = i;
buf.m.planes = (buffers + i)->planes_buffer;
if (ioctl (fd, VIDIOC_QBUF, &buf) < 0)
printf ("VIDIOC_QBUF failed\n");
}
/*开始捕获*/
if (ioctl(fd, VIDIOC_STREAMON, &type) < 0)
printf ("VIDIOC_STREAMON failed\n");
return true;
}
void PushVideoThread::closeCamera()
{
/*停止捕获*/
if (ioctl(fd, VIDIOC_STREAMOFF, &type) < 0)
printf("VIDIOC_STREAMOFF fail\n");
/*取消内存映射*/
for (int i = 0; i < req.count; i++) {
for (int j = 0; j < num_planes; j++) {
if (MAP_FAILED != ((buffers + i)->plane_start + j)->start) {
if (-1 == munmap(((buffers + i)->plane_start + j)->start, ((buffers + i)->planes_buffer + j)->length))
printf ("munmap error\n");
}
}
}
/*是否缓冲区*/
for (int i = 0; i < req.count; i++) {
free((buffers + i)->planes_buffer);
free((buffers + i)->plane_start);
}
free(buffers);
fclose(file_fd);
/*关闭摄像头*/
close(fd);
}