V4L2 设备驱动框架向应用层提供了一套统一、标准的接口规范,应用程序按照该接口规范来进行应用 编程,从而使用摄像头。对于摄像头设备来说,其编程模式如下所示:
- 1. 首先是打开摄像头设备;
- 2. 查询设备的属性或功能;
- 3. 设置设备的参数,譬如像素格式、帧大小、帧率;
- 4. 申请帧缓冲、内存映射;
- 5. 帧缓冲入队;
- 6. 开启视频采集;
- 7. 帧缓冲出队、对采集的数据进行处理;
- 8. 处理完后,再次将帧缓冲入队,往复;
- 9. 结束采集
流程图如下所示:
从流程图中可以看到,几乎对摄像头的所有操作都是通过 ioctl()来完成,搭配不同的 V4L2 指令在(request参数)请求不同的操作,这些指令定义在头文件 linux/videodev2.h 中,在摄像头应用程序代码中,需要包含 头文件 linux/videodev2.h,该头文件中申明了很多与摄像头应用编程相关的数据结构以及宏定义,大家可以 打开这个头文件看看在 videodev2.h 头文件中,定义了很ioctl()的指令,以宏定义的形式提供(VIDIOC_XXX),如下所示:
/*
* I O C T L C O D E S F O R V I D E O D E V I C E S
*
*/
#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers)
#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer)
#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer)
#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer)
#define VIDIOC_OVERLAY _IOW('V', 14, int)
#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer)
#define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer)
#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer)
#define VIDIOC_STREAMON _IOW('V', 18, int)
#define VIDIOC_STREAMOFF _IOW('V', 19, int)
#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm)
#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm)
#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id)
#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id)
#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard)
#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input)
#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control)
#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control)
#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner)
#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner)
#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio)
#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio)
#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl)
#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu)
#define VIDIOC_G_INPUT _IOR('V', 38, int)
#define VIDIOC_S_INPUT _IOWR('V', 39, int)
#define VIDIOC_G_EDID _IOWR('V', 40, struct v4l2_edid)
#define VIDIOC_S_EDID _IOWR('V', 41, struct v4l2_edid)
#define VIDIOC_G_OUTPUT _IOR('V', 46, int)
#define VIDIOC_S_OUTPUT _IOWR('V', 47, int)
#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output)
#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout)
#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout)
#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator)
#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator)
#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency)
#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency)
#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap)
#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop)
#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */
#define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) /* enum v4l2_priority */
#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap)
#define VIDIOC_LOG_STATUS _IO('V', 70)
#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls)
#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls)
#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls)
#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum)
#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum)
#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx)
#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd)
#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd)
/*
* Experimental, meant for debugging, testing and internal use.
* Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
* You must be root to use these ioctls. Never use these in applications!
*/
#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings)
#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings)
#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event)
#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription)
#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription)
#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers)
#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer)
#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection)
#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection)
#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd)
#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd)
#define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings)
#define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings)
#define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap)
#define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band)
/*
* Experimental, meant for debugging, testing and internal use.
* Never use this in applications!
*/
#define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info)
#define VIDIOC_QUERY_EXT_CTRL _IOWR('V', 103, struct v4l2_query_ext_ctrl)
每一个 不 同 的 指 令 宏 就 表 示 向 设 备 请 求 不 同 的 操 作 , 从 上 面 可 以 看 到 , 每 一 个 宏 后 面 (_IOWR/_IOR/_IOW
)还携带了一个
struct
数据结构体,譬如
struct v4l2_capability
、
struct v4l2_fmtdesc
,这 就是调用 ioctl()
时需要传入的第三个参数的类型;调用
ioctl()
前,定义一个该类型变量,调用
ioctl()
时、将 变量的指针作为 ioctl()
的第三个参数传入,譬如:
struct v4l2_capability cap;
……
ioctl(fd, VIDIOC_QUERYCAP, &cap);
在实际的应用编程中,并不是所有的指令都会用到,针对视频采集类设备,以下笔者列出了一些常用的 指令:
上面说完来点实际代码:
一.打开摄像头
static int vidioc_init(const char *path)
{
// 1.打开摄像头设备
fd = open(path, O_RDWR);
if (fd < 0)
{
perror("open error");
return -1;
}
// 首先查看设备是否为视频采集设备
struct v4l2_capability vcap;
ioctl(fd, VIDIOC_QUERYCAP, &vcap);
if (!(V4L2_CAP_VIDEO_CAPTURE & vcap.capabilities))
{
perror("Error: No capture video device!\n");
return -1;
}
return 0;
}
这里我封装成了函数,方便后期修改。
二.查询设备的属性/能力/功能
void v4l2_print_formats(void)
{
struct v4l2_fmtdesc v4fmt;
int ret = 0;
v4fmt.index = 0;
v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
while (1)
{
ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
if (ret < 0)
{
// perror("ioctrl error");
break;
}
printf("v4l2 index:%d\r\n", v4fmt.index);
printf("v4l2 description:%s\r\n", v4fmt.description);
unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
printf("pixelformat:%c%c%c%c\r\n", p[0], p[1], p[2], p[3]);
printf("reserved:%d\r\n", v4fmt.reserved);
v4fmt.index++;
printf("\r\n");
}
}
通过第二点就可以知道购买的摄像头支持什么格式,例如:
从上面可知摄像头支持RGB565,JPEG,YUYV格式
三.配置摄像头参数
static int v4l2_set_format(void)
{
struct v4l2_format fmt;
struct v4l2_streamparm streamparm = {0};
int val = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
fmt.fmt.pix.width = width; // 宽
fmt.fmt.pix.height = height; // 高
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // 设置为YUYV格式
val = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (val < 0)
{
perror("fmt error");
close(fd);
return -1;
}
/* 判断是否是JPEG格式 */
if (V4L2_PIX_FMT_YUYV != fmt.fmt.pix.pixelformat)
{
fprintf(stderr, "error the device does not V4L2_PIX_FMT_JPEG");
close(fd);
return -1;
}
// 获取格式看看设置有木有成功
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (val < 0)
{
fprintf(stderr, "get vidioc get fmt error");
close(fd);
return -1;
}
printf("########################################\r\n");
printf("type:%d\r\n", fmt.type);
printf("width:%d\r\n", fmt.fmt.pix.width);
printf("height:%d\r\n", fmt.fmt.pix.height);
/* 获取streamparm */
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(fd, VIDIOC_G_PARM, &streamparm);
/** 判断是否支持帧率设置 **/
if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability)
{
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator = 30; // 30fps
if (0 > ioctl(fd, VIDIOC_S_PARM, &streamparm))
{
fprintf(stderr, "ioctl error: VIDIOC_S_PARM: %s\n", strerror(errno));
return -1;
}
}
return 0;
}
四.申请队列缓存区
static int v4l2_init_buffer(void)
{
struct v4l2_requestbuffers reqbuf;
int val = 0;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.count = 3; // 申请3个缓冲区
reqbuf.memory = V4L2_MEMORY_MMAP; // 映射方式
val = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
if (val < 0)
{
fprintf(stderr, "VIDIOC_REQBUFS error");
close(fd);
return -1;
}
return 0;
}
五.入队,将帧缓冲映射到进程地址空间
static int v4l2_init_buf(void)
{
struct v4l2_buffer mapbuf;
int i = 0;
int val = 0;
// 初始化type, index
mapbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mapbuf.memory = V4L2_MEMORY_MMAP;
for (i = 0; i < 3; i++)
{
mapbuf.index = i;
// 入队
val = ioctl(fd, VIDIOC_QUERYBUF, &mapbuf); // 从内核空间查询空间映射
if (val < 0)
{
perror("mapbuf error");
}
// 映射缓存区
buf_infos[mapbuf.index].start = (unsigned short *)mmap(NULL, mapbuf.length, PROT_READ | PROT_WRITE,
MAP_SHARED, fd, mapbuf.m.offset);
buf_infos[mapbuf.index].length = mapbuf.length;
// 出队
val = ioctl(fd, VIDIOC_QBUF, &mapbuf);
if (val < 0)
{
perror("pop queueu error");
}
}
return 0;
}
六.开始采集
static int v4l2_stream_on(void)
{
int val = 0;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_STREAMON, &type);
if (val < 0)
{
perror("get VIDIOC_STREAMON error");
return -1;
}
return 0;
}
七.读取数据:出队
static int v4l2_read_data(void)
{
unsigned short rgbdata[width * height * 4];
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
while (1)
{
for (buf.index = 0; buf.index < 3; buf.index++)
{
// 7.采集数据(出队)
ioctl(fd, VIDIOC_DQBUF, &buf);
//yuyv_to_rgb(buf_infos[buf.index].start, rgbdata, width, height);
lcd_show_rgb(buf_infos[buf.index].start, width, height);
// 8.数据处理完毕入队
ioctl(fd, VIDIOC_QBUF, &buf);
}
}
return 0;
}
九.停止采集
static int v4l2_stream_off(void)
{
int val = 0;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (val < 0)
{
perror("VIDIOC_STREAMOFF error");
return -1;
}
return 0;
}
最后解除映射关闭文件
// 释放映射
int i = 0;
for (i = 0; i < 3; i++)
{
munmap(buf_infos[i].start, buf_infos[i].length);
}
// munmap(screen_base, screen_size);
close(fd);
close(lcd_fd);
完整代码如下:
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <string.h>
#include <errno.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <linux/fb.h>
int lcd_fd = -1;
unsigned int width = 1024, height = 600;
static unsigned int *screen_base = NULL; // LCD显存基地址
//
/*** 描述一个帧缓冲的信息 ***/
typedef struct cam_buf_info
{
unsigned short *start; // 帧缓冲起始地址
unsigned long length; // 帧缓冲长度
} cam_buf_info;
int fd = -1;
struct v4l2_buffer buf;
unsigned int map_lenght[3];
static cam_buf_info buf_infos[3];
static int lcd_init(void)
{
struct fb_var_screeninfo fb_var = {0};
struct fb_fix_screeninfo fb_fix = {0};
unsigned long screen_size;
// 打开设备
lcd_fd = open("/dev/fb0", O_RDWR);
if (lcd_fd < 0)
{
perror("lcd open error");
return -1;
}
// 获取LCD信息设备
/* 获取framebuffer设备信息 */
ioctl(lcd_fd, FBIOGET_VSCREENINFO, &fb_var);
ioctl(lcd_fd, FBIOGET_FSCREENINFO, &fb_fix);
screen_size = fb_fix.line_length * fb_var.yres;
width = fb_var.xres;
height = fb_var.yres;
/*内存映射*/
screen_base = (unsigned int *)mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd_fd, 0);
if (MAP_FAILED == (void *)screen_base)
{
perror("mmap error");
close(lcd_fd);
return -1;
}
/*LCD 刷白*/
memset(screen_base, 0xff, screen_size);
return 0;
}
void lcd_show_rgb(unsigned short *buf, int w, int h)
{
int j = 0;
unsigned short *start = buf;
unsigned short *base =(unsigned short *)screen_base;
for (j = 0; j < h; j++)
{
memcpy(base, start, w * 2); // 一个RGB565像素占两个字节
base += width; // LCD显示指向下一行
start += w; // 指向下一行数据
}
}
void yuyv_to_rgb(unsigned short *yuyvdata, unsigned short *rgbdata, int w, int h)
{
int r1, g1, b1;
int r2, g2, b2;
int i;
for (i = 0; i < w * h / 2; i++)
{
char data[4];
memcpy(data, yuyvdata + i * 4, 4);
// Y0 U0 Y1 V1-->[Y0 U0 V1] [Y1 U0 v1]
unsigned char Y0 = data[0];
unsigned char U0 = data[1];
unsigned char Y1 = data[2];
unsigned char V1 = data[3];
r1 = Y0 + 1.4075 * (V1 - 128);
if (r1 > 255)
r1 = 255;
if (r1 < 0)
r1 = 0;
g1 = Y0 - 0.3455 * (U0 - 128) - 0.7169 * (V1 - 128);
if (g1 > 255)
g1 = 255;
if (g1 < 0)
g1 = 0;
b1 = Y0 + 1.779 * (U0 - 128);
if (b1 > 255)
b1 = 255;
if (b1 < 0)
b1 = 0;
r2 = Y1 + 1.4075 * (V1 - 128);
if (r2 > 255)
r2 = 255;
if (r2 < 0)
r2 = 0;
g2 = Y1 - 0.3455 * (U0 - 128) - 0.7169 * (V1 - 128);
if (g2 > 255)
g2 = 255;
if (g2 < 0)
g2 = 0;
b2 = Y1 + 1.779 * (U0 - 128);
if (b2 > 255)
b2 = 255;
if (b2 < 0)
b2 = 0;
rgbdata[i * 6 + 0] = r1;
rgbdata[i * 6 + 1] = g1;
rgbdata[i * 6 + 2] = b1;
rgbdata[i * 6 + 3] = r2;
rgbdata[i * 6 + 4] = g2;
rgbdata[i * 6 + 5] = b2;
}
}
static int vidioc_init(const char *path)
{
// 1.打开摄像头设备
fd = open(path, O_RDWR);
if (fd < 0)
{
perror("open error");
return -1;
}
// 首先查看设备是否为视频采集设备
struct v4l2_capability vcap;
ioctl(fd, VIDIOC_QUERYCAP, &vcap);
if (!(V4L2_CAP_VIDEO_CAPTURE & vcap.capabilities))
{
perror("Error: No capture video device!\n");
return -1;
}
return 0;
}
void v4l2_print_formats(void)
{
struct v4l2_fmtdesc v4fmt;
int ret = 0;
v4fmt.index = 0;
v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
while (1)
{
ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
if (ret < 0)
{
// perror("ioctrl error");
break;
}
printf("v4l2 index:%d\r\n", v4fmt.index);
printf("v4l2 description:%s\r\n", v4fmt.description);
unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
printf("pixelformat:%c%c%c%c\r\n", p[0], p[1], p[2], p[3]);
printf("reserved:%d\r\n", v4fmt.reserved);
v4fmt.index++;
printf("\r\n");
}
}
static int v4l2_set_format(void)
{
struct v4l2_format fmt;
struct v4l2_streamparm streamparm = {0};
int val = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
fmt.fmt.pix.width = width; // 宽
fmt.fmt.pix.height = height; // 高
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // 设置为YUYV格式
val = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (val < 0)
{
perror("fmt error");
close(fd);
return -1;
}
/* 判断是否是JPEG格式 */
if (V4L2_PIX_FMT_YUYV != fmt.fmt.pix.pixelformat)
{
fprintf(stderr, "error the device does not V4L2_PIX_FMT_JPEG");
close(fd);
return -1;
}
// 获取格式看看设置有木有成功
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (val < 0)
{
fprintf(stderr, "get vidioc get fmt error");
close(fd);
return -1;
}
printf("########################################\r\n");
printf("type:%d\r\n", fmt.type);
printf("width:%d\r\n", fmt.fmt.pix.width);
printf("height:%d\r\n", fmt.fmt.pix.height);
/* 获取streamparm */
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(fd, VIDIOC_G_PARM, &streamparm);
/** 判断是否支持帧率设置 **/
if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability)
{
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator = 30; // 30fps
if (0 > ioctl(fd, VIDIOC_S_PARM, &streamparm))
{
fprintf(stderr, "ioctl error: VIDIOC_S_PARM: %s\n", strerror(errno));
return -1;
}
}
return 0;
}
static int v4l2_init_buffer(void)
{
struct v4l2_requestbuffers reqbuf;
int val = 0;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.count = 3; // 申请3个缓冲区
reqbuf.memory = V4L2_MEMORY_MMAP; // 映射方式
val = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
if (val < 0)
{
fprintf(stderr, "VIDIOC_REQBUFS error");
close(fd);
return -1;
}
return 0;
}
static int v4l2_init_buf(void)
{
struct v4l2_buffer mapbuf;
int i = 0;
int val = 0;
// 初始化type, index
mapbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mapbuf.memory = V4L2_MEMORY_MMAP;
for (i = 0; i < 3; i++)
{
mapbuf.index = i;
// 入队
val = ioctl(fd, VIDIOC_QUERYBUF, &mapbuf); // 从内核空间查询空间映射
if (val < 0)
{
perror("mapbuf error");
}
// 映射缓存区
buf_infos[mapbuf.index].start = (unsigned short *)mmap(NULL, mapbuf.length, PROT_READ | PROT_WRITE,
MAP_SHARED, fd, mapbuf.m.offset);
buf_infos[mapbuf.index].length = mapbuf.length;
// 出队
val = ioctl(fd, VIDIOC_QBUF, &mapbuf);
if (val < 0)
{
perror("pop queueu error");
}
}
return 0;
}
static int v4l2_stream_on(void)
{
int val = 0;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_STREAMON, &type);
if (val < 0)
{
perror("get VIDIOC_STREAMON error");
return -1;
}
return 0;
}
static int v4l2_stream_off(void)
{
int val = 0;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (val < 0)
{
perror("VIDIOC_STREAMOFF error");
return -1;
}
return 0;
}
static int v4l2_read_data(void)
{
unsigned short rgbdata[width * height * 4];
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
while (1)
{
for (buf.index = 0; buf.index < 3; buf.index++)
{
// 7.采集数据(出队)
ioctl(fd, VIDIOC_DQBUF, &buf);
//yuyv_to_rgb(buf_infos[buf.index].start, rgbdata, width, height);
lcd_show_rgb(buf_infos[buf.index].start, width, height);
// 8.数据处理完毕入队
ioctl(fd, VIDIOC_QBUF, &buf);
}
}
return 0;
}
int main(int argc, char const *argv[])
{
if (argc != 2)
{
fprintf(stderr, "usage: %s <device>\r\n", argv[0]);
exit(-1);
}
if (lcd_init())
exit(-1);
if (vidioc_init(argv[1]))
exit(-1);
// 2.查询摄像头格式
v4l2_print_formats();
// 3.配置摄像头参数
if (v4l2_set_format())
exit(-1);
// 4.申请队列缓存区
if (v4l2_init_buffer())
exit(-1);
// 5.入队,将帧缓冲映射到进程地址空间
if (v4l2_init_buf())
exit(-1);
// 6.开始采集
if (v4l2_stream_on())
exit(-1);
/* 读取数据:出队 */
v4l2_read_data(); // 在函数内循环采集数据、将其显示到LCD屏
// 9.停止采集
if (v4l2_stream_off())
exit(-1);
// 释放映射
int i = 0;
for (i = 0; i < 3; i++)
{
munmap(buf_infos[i].start, buf_infos[i].length);
}
// munmap(screen_base, screen_size);
close(fd);
close(lcd_fd);
return 0;
}
通过ov5640拍照,如下代码:
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <string.h>
#include <errno.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <linux/fb.h>
#pragma pack(2) // 设置结构体以2字节对齐
// https://blog.csdn.net/neuq_jtxw007/article/details/87877225
// https://blog.csdn.net/m1751250104/article/details/122971606
// BMP 文件头
// BMP文件头数据结构含有BMP文件的类型、文件大小和位图起始位置等信息。
typedef struct
{
unsigned short bfType; // 位图文件的类型,必须为BM(1-2字节)
unsigned int bfSize; // 位图文件的大小,以字节为单位(3-6字节,低位在前)
unsigned short bfReserved1; // 位图文件保留字,必须为0(7-8字节)
unsigned short bfReserved2; // 位图文件保留字,必须为0(7-8字节)
unsigned int bfOffBits; /// 位图数据位置的地址偏移,即起始位置,以相对于位图(11-14字节,低位在前)
} BMPFileHeader;
// BMP 信息头
typedef struct
{
unsigned int biSize; // 定义以下用来描述影像的区块(BitmapInfoHeader)的大小
int biWidth; // 位图宽度
int biHeight; // 位图高度
unsigned short biPlanes; // :保存所用彩色位面的个数。不经常使用
unsigned short biBitCount; // 保存每个像素的位数
unsigned int biCompression; // 定义所用的压缩算法。允许的值是0、1、2、3、4、5。
unsigned int biSizeImage; // 位图的大小(其中包含了为了补齐行数是4的倍数而添加的空字节),以字节为单位
int biXPelsPerMeter; // 位图水平分辨率
int biYPelsPerMeter; // 位图垂直分辨率
unsigned int biClrUsed; // 位图实际使用的颜色表中的颜色数
unsigned int biClrImportant; // 位图显示过程中重要的颜色数,当每个颜色都重要时这个值与颜色数目
} BMPInfoHeader;
// 生成 BMP 文件头
void generate_bmp_header(int width, int height, BMPFileHeader *file_header, BMPInfoHeader *info_header)
{
file_header->bfType = 0x4D42; // "BM"
file_header->bfReserved1 = 0;
file_header->bfReserved2 = 0;
file_header->bfOffBits = sizeof(BMPFileHeader) + sizeof(BMPInfoHeader);
info_header->biSize = sizeof(BMPInfoHeader);
info_header->biWidth = width;
info_header->biHeight = height;
info_header->biPlanes = 1;
info_header->biBitCount = 16; // RGB565 格式,16 位色深
info_header->biCompression = 0;
info_header->biSizeImage = width * height * 2; // 每个像素占两个字节
info_header->biXPelsPerMeter = 0;
info_header->biYPelsPerMeter = 0;
info_header->biClrUsed = 0;
info_header->biClrImportant = 0;
}
int lcd_fd = -1;
unsigned int width = 1024, height = 600;
static unsigned int *screen_base = NULL; // LCD显存基地址
//
/*** 描述一个帧缓冲的信息 ***/
typedef struct cam_buf_info
{
unsigned short *start; // 帧缓冲起始地址
unsigned long length; // 帧缓冲长度
} cam_buf_info;
int fd = -1;
struct v4l2_buffer buf;
static cam_buf_info buf_infos[3];
int type;
static int lcd_init(void)
{
struct fb_var_screeninfo fb_var = {0};
struct fb_fix_screeninfo fb_fix = {0};
unsigned long screen_size;
// 打开设备
lcd_fd = open("/dev/fb0", O_RDWR);
if (lcd_fd < 0)
{
perror("lcd open error");
return -1;
}
// 获取LCD信息设备
/* 获取framebuffer设备信息 */
ioctl(lcd_fd, FBIOGET_VSCREENINFO, &fb_var);
ioctl(lcd_fd, FBIOGET_FSCREENINFO, &fb_fix);
screen_size = fb_fix.line_length * fb_var.yres;
width = fb_var.xres;
height = fb_var.yres;
/*内存映射*/
screen_base = (unsigned int *)mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd_fd, 0);
if (MAP_FAILED == (void *)screen_base)
{
perror("mmap error");
close(lcd_fd);
return -1;
}
/*LCD 刷白*/
memset(screen_base, 0xff, screen_size);
return 0;
}
void lcd_show_rgb(unsigned short *buf, int w, int h)
{
int j = 0;
unsigned short *start;
unsigned short *base;
for (j = 0, base = screen_base, start = buf; j < h; j++)
{
memcpy(base, start, w * 2); // 一个RGB565像素占两个字节
base += width; // LCD显示指向下一行
start += w; // 指向下一行数据
}
}
static int vidioc_init(const char *path)
{
// 1.打开摄像头设备
fd = open(path, O_RDWR);
if (fd < 0)
{
perror("open error");
return -1;
}
// 首先查看设备是否为视频采集设备
struct v4l2_capability vcap;
ioctl(fd, VIDIOC_QUERYCAP, &vcap);
if (!(V4L2_CAP_VIDEO_CAPTURE & vcap.capabilities))
{
perror("Error: No capture video device!\n");
return -1;
}
return 0;
}
void v4l2_print_formats(void)
{
struct v4l2_fmtdesc v4fmt;
int ret = 0;
v4fmt.index = 0;
v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
while (1)
{
ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
if (ret < 0)
{
// perror("ioctrl error");
break;
}
printf("v4l2 index:%d\r\n", v4fmt.index);
printf("v4l2 description:%s\r\n", v4fmt.description);
unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
printf("pixelformat:%c%c%c%c\r\n", p[0], p[1], p[2], p[3]);
printf("reserved:%d\r\n", v4fmt.reserved);
v4fmt.index++;
printf("\r\n");
}
}
static int v4l2_set_format(void)
{
struct v4l2_format fmt;
struct v4l2_streamparm streamparm = {0};
int val = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 设置为采集视频模式
fmt.fmt.pix.width = width; // 宽
fmt.fmt.pix.height = height; // 高
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565; // 设置为YUYV格式
fmt.fmt.pix.field = V4L2_FIELD_ANY;
val = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (val < 0)
{
perror("fmt error");
close(fd);
return -1;
}
/* 判断是否是JPEG格式 */
if (V4L2_PIX_FMT_RGB565 != fmt.fmt.pix.pixelformat)
{
fprintf(stderr, "error the device does not V4L2_PIX_FMT_JPEG");
close(fd);
return -1;
}
// 获取格式看看设置有木有成功
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (val < 0)
{
fprintf(stderr, "get vidioc get fmt error");
close(fd);
return -1;
}
printf("########################################\r\n");
printf("type:%d\r\n", fmt.type);
printf("width:%d\r\n", fmt.fmt.pix.width);
printf("height:%d\r\n", fmt.fmt.pix.height);
/* 获取streamparm */
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(fd, VIDIOC_G_PARM, &streamparm);
/** 判断是否支持帧率设置 **/
if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability)
{
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator = 30; // 30fps
if (0 > ioctl(fd, VIDIOC_S_PARM, &streamparm))
{
fprintf(stderr, "ioctl error: VIDIOC_S_PARM: %s\n", strerror(errno));
return -1;
}
}
return 0;
}
static int v4l2_init_buffer(void)
{
struct v4l2_requestbuffers reqbuf;
int val = 0;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.count = 3; // 申请3个缓冲区
reqbuf.memory = V4L2_MEMORY_MMAP; // 映射方式
val = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
if (val < 0)
{
fprintf(stderr, "VIDIOC_REQBUFS error");
close(fd);
return -1;
}
return 0;
}
static int v4l2_init_buf(void)
{
struct v4l2_buffer mapbuf;
int i = 0;
int val = 0;
// 初始化type, index
mapbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mapbuf.memory = V4L2_MEMORY_MMAP;
for (i = 0; i < 3; i++)
{
mapbuf.index = i;
// 入队
val = ioctl(fd, VIDIOC_QUERYBUF, &mapbuf); // 从内核空间查询空间映射
if (val < 0)
{
perror("mapbuf error");
}
// 映射缓存区
buf_infos[mapbuf.index].start = (unsigned short *)mmap(NULL, mapbuf.length, PROT_READ | PROT_WRITE,
MAP_SHARED, fd, mapbuf.m.offset);
buf_infos[mapbuf.index].length = mapbuf.length;
// 出队
val = ioctl(fd, VIDIOC_QBUF, &mapbuf);
if (val < 0)
{
perror("pop queueu error");
}
}
return 0;
}
static int v4l2_stream_on(void)
{
int val = 0;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
val = ioctl(fd, VIDIOC_STREAMON, &type);
if (val < 0)
{
perror("get VIDIOC_STREAMON error");
return -1;
}
return 0;
}
static int v4l2_stream_off(void)
{
int val = 0;
val = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (val < 0)
{
perror("VIDIOC_STREAMOFF error");
return -1;
}
return 0;
}
static int v4l2_read_data(void)
{
char filename[32];
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
int file_count= 0;
int jpg_fd;
// FILE *f = fopen("my.jpg","w+");
// fwrite(buf_infos[buf.index].start,buf.length,1,f);
// fclose(f);
while (1)
{
for (buf.index = 0; buf.index < 3; buf.index++)
{
// 7.采集数据(出队)
ioctl(fd, VIDIOC_DQBUF, &buf);
// sprintf(filename,"video_raw_data%04d.bmp",file_count++);
// jpg_fd = open(filename, O_RDWR | O_CREAT, 0666); // O_CREAT文件不存在创建文件
// BMPFileHeader file_header; // BMP文件头
// BMPInfoHeader info_header; // BMP信息头
// // 生成 BMP 文件头
// generate_bmp_header(width, height, &file_header, &info_header);
// // 写入 BMP 文件头
// write(jpg_fd, &file_header, sizeof(BMPFileHeader));
// write(jpg_fd, &info_header, sizeof(BMPInfoHeader));
// // 写入图像数据
// write(jpg_fd, buf_infos[buf.index].start, buf_infos[buf.index].length);
// close(jpg_fd);
lcd_show_rgb(buf_infos[buf.index].start,width,height);
// 8.数据处理完毕入队
ioctl(fd, VIDIOC_QBUF, &buf);
}
}
return 0;
}
int main(int argc, char const *argv[])
{
if (argc != 2)
{
fprintf(stderr, "usage: %s <device>\r\n", argv[0]);
exit(-1);
}
if (lcd_init())
exit(-1);
if (vidioc_init(argv[1]))
exit(-1);
// 2.查询摄像头格式
v4l2_print_formats();
// 3.配置摄像头参数
if (v4l2_set_format())
exit(-1);
// 4.申请队列缓存区
if (v4l2_init_buffer())
exit(-1);
// 5.入队,将帧缓冲映射到进程地址空间
if (v4l2_init_buf())
exit(-1);
// 6.开始采集
if (v4l2_stream_on())
exit(-1);
/* 读取数据:出队 */
v4l2_read_data(); // 在函数内循环采集数据、将其显示到LCD屏
// 9.停止采集
if (v4l2_stream_off())
exit(-1);
// 释放映射
int i = 0;
for (i = 0; i < 3; i++)
{
munmap(buf_infos[i].start, buf_infos[i].length);
}
// munmap(screen_base, screen_size);
close(fd);
close(lcd_fd);
return 0;
}
最后结束,在移植ov5640过程出现了很多问题,经过不懈的努力终于慢慢理解v4l2框架,C语言读取和存储bmp格式图片,图片格式,不过还没有试过YUYV转RGB565格式,下次什么时候测试做出来再更新。