Linux下V4L2实时显示摄像头捕捉画面

本文详细介绍了如何在Linux环境下通过Video4Linux2 (V4L2) API与摄像头交互,包括设备打开、格式设置、内存映射、数据采集与处理(YUV转RGB)以及显示在LCD屏幕上。重点展示了设置视频捕获格式、申请缓冲区和实际数据采集的过程。
摘要由CSDN通过智能技术生成

Video for Linuxtwo(Video4Linux2)简称V4L2,是V4L的改进版。V4L2是linux操作系统下用于采集图片、视频和音频数据的API接口,配合适当的视频采集设备和相应的驱动程序,可以实现图片、视频、音频等的采集。在远程会议、可视电话、视频监控系统和嵌入式多媒体终端中都有广泛的应用。

在Linux下,所有外设都被看成一种特殊的文件,成为“设备文件”,可以象访问普通文件一样对其进行读写。一般来说,采用V4L2驱动的摄像头设备文件是/dev/video0。

连接摄像头设备到虚拟机。
在这里插入图片描述
之后会在设备里面看到Video0设备。
在这里插入图片描述

  1. 打开video0设备
int fd = open("/dev/video0", O_RDWR);
if (fd < 0) {
   perror("Open video0");
   return -1;
}
  1. 获取摄像头的相关信息
struct v4l2_fmtdesc v4fmt;
v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4fmt.index = 1;//0 MJPEG 1 YUYV
int ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
if (ret < 0) {
    perror("Get inf failed:");
    return -1;
}
printf("index:%d\n", v4fmt.index);
printf("flags:%d\n", v4fmt.flags);
printf("description:%s\n", v4fmt.description);
unsigned char *p =(unsigned char *)(&v4fmt.pixelformat);
printf("pixelformat:%c%c%c%c\n", p[0], p[1], p[2], p[3]);

v4fmt.index的值会决定获取的每一帧图片的格式,需要的可以设置其他数字看看自己的摄像头支持什么格式,我这里0代表的是MJPEG格式、1代表YUYV格式,下面我是用的都是YUYV格式。
在这里插入图片描述
在这里插入图片描述

  1. 设置视频格式
struct v4l2_format vfmt;
vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vfmt.fmt.pix.width = 640;
vfmt.fmt.pix.height = 480;
vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//设置视频采集格式YUYV
ret = ioctl(fd, VIDIOC_S_FMT, &vfmt);
if (ret < 0) {
    perror("Set format failed");
    return -1;
}
memset(&vfmt, 0, sizeof(vfmt));
vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_G_FMT, &vfmt);
if (ret < 0) {
    perror("Get format failed");
    return -1;
}

if (vfmt.fmt.pix.width == 640 && vfmt.fmt.pix.height == 480 && vfmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
    printf("Set successful!\n");
}
else {
    printf("Set failed!\n");
    return -1;
}

  1. 申请内核缓冲区队列
struct v4l2_requestbuffers reqbuffer;
reqbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuffer.count = 4;
reqbuffer.memory = V4L2_MEMORY_MMAP;//映射方式
ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuffer);
if (ret < 0) {
    perror("Request Queue space failed");
    return -1;
}

  1. 映射到用户空间
struct v4l2_buffer mapbuffer;
int i;
unsigned char *mptr[4];
unsigned int size[4];//存储大小,方便释放
mapbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (i = 0; i < 4; i++) {
    mapbuffer.index = i;
    ret = ioctl(fd, VIDIOC_QUERYBUF, &mapbuffer);
    if (ret < 0) {
        perror("Kernel space queue failed");
        return -1;
    }
    mptr[i] = (unsigned char *)mmap(NULL, mapbuffer.length, PROT_READ | PROT_WRITE, MAP_SHAR    ED, fd, mapbuffer.m.offset);
    size[i] = mapbuffer.length;
    //使用完毕
    ret = ioctl(fd, VIDIOC_QBUF, &mapbuffer);
    if (ret < 0) {
        perror("Return failed");
        return -1;
    }
}

  1. 开始采集数据

打开fb设备,获取虚拟的LCD屏幕宽和高,地址映射到用户空间。

lcdfd = open("/dev/fb0", O_RDWR);
if (lcdfd < 0) {
    perror("LCD open failed:");
}
/*获取LCD信息*/
struct fb_var_screeninfo info;
int lret = ioctl(lcdfd, FBIOGET_VSCREENINFO, &info);
if (lret < 0) {
    perror("get info failed:");
}
//获取虚拟LCD屏幕宽和高
lcd_w = info.xres_virtual;
lcd_h = info.yres_virtual;
//开发板用物理宽和高;
//lcd_w = info.xres;
//lcd_h = info.yres;
lcdptr = (int *)mmap(NULL, lcd_w*lcd_h*4,PROT_READ | PROT_WRITE, MAP_SHARED, lcdfd, 0);
if (lcdptr == NULL) {
    perror("lcd mmap failed:");
}

YUYV格式转RGB格式函数

void yuyv_to_rgb(unsigned char *yuyvdata, unsigned char * rgbdata, int w, int h)
{
   int r1, g1, b1;
   int r2, g2, b2;
   for (int i = 0; i < w*h/2; i++) {
       char data[4];
       memcpy(data, yuyvdata + i*4, 4);
       unsigned char Y0 = data[0];
       unsigned char U0 = data[1];
       unsigned char Y1 = data[2];
       unsigned char V1 = data[3];
       r1 = Y0 + 1.4075*(V1 - 128);
       if (r1 > 255)
           r1 = 255;
       if (r1 < 0)
           r1 = 0;
       g1 = Y0 - 0.3455*(U0 - 128) - 0.7169*(V1 - 128);
       if (g1 > 255)
           g1 = 255;
       if (g1 < 0)
           g1 = 0;
       b1 = Y0 + 1.779*(U0 - 128);
       if (b1 > 255)
           b1 = 255;
       if (b1 < 0)
           b1 = 0;

       r2 = Y1 + 1.4075*(V1 - 128);
       if (r2 > 255)
           r2 = 255;
       if (r2 < 0)
           r2 = 0;
       g2 = Y1 - 0.3455*(U0 - 128) - 0.7169*(V1 - 128);
       if (g2 > 255)
           g2 = 255;
       if (g2 < 0)
           g2 = 0;
       b2 = Y1 + 1.779*(U0 - 128);
       if (b2 > 255)
           b2 = 255;
       if (b2 < 0)
           b2 = 0;

        rgbdata[i*6 + 0] = r1;
        rgbdata[i*6 + 1] = g1;
        rgbdata[i*6 + 2] = b1;
        rgbdata[i*6 + 3] = r2;
        rgbdata[i*6 + 4] = g2;
        rgbdata[i*6 + 5] = b2;
    }
}

将RGB格式的数据显示在LCD屏幕上。

void lcd_show_rgb(unsigned char *rgbdata, int w, int h)
{
    unsigned int *ptr = lcdptr;
    for (int i = 0; i < h; i++) {
        for (int j = 0; j < w; j++) {
            memcpy(ptr + j, rgbdata + j*3, 3);
        }
        ptr += lcd_w;//偏移一行
        rgbdata += w*3;//偏移一行
    }
}

开始采集数据。

int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
     perror("Start failed:");
     return -1;
}
//从队列中提取一帧数据
unsigned char rgbdata[640*480*3];
while (1) {
    struct v4l2_buffer readbuffer;
    readbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret = ioctl(fd, VIDIOC_DQBUF, &readbuffer);
    if (ret < 0) {
        perror("Capture failed:");
    }
    //显示在LCD上
    yuyv_to_rgb(mptr[readbuffer.index], rgbdata, 640, 480);
    lcd_show_rgb(rgbdata, 640, 480);
    //通知内核已经使用完毕
    ret = ioctl(fd, VIDIOC_QBUF, &readbuffer);
    if (ret < 0) {
        perror("return failed:");
    }
}

  1. 停止采集数据,释放映射空间。
ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
    perror("Stop failed:");
    return -1;
}

//释放映射空间
for (i = 0; i < 4; i++) {
    munmap(mptr[i], size[i]);
}
close(fd);

完整代码

#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <string.h>
#include <linux/fb.h>
#include <memory.h>

int lcdfd = 0;
int *lcdptr = NULL;
int lcd_w = 800, lcd_h = 480;

void yuyv_to_rgb(unsigned char *yuyvdata, unsigned char * rgbdata, int w, int h)
{
	int r1, g1, b1;
	int r2, g2, b2;
	for (int i = 0; i < w*h/2; i++) {
		char data[4];
		memcpy(data, yuyvdata + i*4, 4);
		unsigned char Y0 = data[0];
		unsigned char U0 = data[1];
		unsigned char Y1 = data[2];
		unsigned char V1 = data[3];
		r1 = Y0 + 1.4075*(V1 - 128);
		if (r1 > 255)
			r1 = 255;
		if (r1 < 0)
			r1 = 0;
		g1 = Y0 - 0.3455*(U0 - 128) - 0.7169*(V1 - 128);
		if (g1 > 255)
			g1 = 255;
		if (g1 < 0)
			g1 = 0;
		b1 = Y0 + 1.779*(U0 - 128);
		if (b1 > 255)
			b1 = 255;
		if (b1 < 0)
			b1 = 0;

		r2 = Y1 + 1.4075*(V1 - 128);
		if (r2 > 255)
			r2 = 255;
		if (r2 < 0)
			r2 = 0;
		g2 = Y1 - 0.3455*(U0 - 128) - 0.7169*(V1 - 128);
		if (g2 > 255)
			g2 = 255;
		if (g2 < 0)
			g2 = 0;
		b2 = Y1 + 1.779*(U0 - 128);
		if (b2 > 255)
			b2 = 255;
		if (b2 < 0)
			b2 = 0;

		rgbdata[i*6 + 0] = r1;
		rgbdata[i*6 + 1] = g1;
		rgbdata[i*6 + 2] = b1;
		rgbdata[i*6 + 3] = r2;
		rgbdata[i*6 + 4] = g2;
		rgbdata[i*6 + 5] = b2;
	}
}



void lcd_show_rgb(unsigned char *rgbdata, int w, int h)
{
	unsigned int *ptr = lcdptr;
	for (int i = 0; i < h; i++) {
		for (int j = 0; j < w; j++) {
			memcpy(ptr + j, rgbdata + j*3, 3);
		}
		ptr += lcd_w;//偏移一行
		rgbdata += w*3;//偏移一行
	}
}

int main()
{
	lcdfd = open("/dev/fb0", O_RDWR);
	if (lcdfd < 0) {
		perror("LCD open failed:");
	}
	/*获取LCD信息*/
	struct fb_var_screeninfo info;
	int lret = ioctl(lcdfd, FBIOGET_VSCREENINFO, &info);
	if (lret < 0) {
		perror("get info failed:");
	}
	//获取虚拟LCD屏幕宽和高
	lcd_w = info.xres_virtual;
	lcd_h = info.yres_virtual;
	//开发板用物理宽和高;
	//lcd_w = info.xres;
	//lcd_h = info.yres;
	lcdptr = (int *)mmap(NULL, lcd_w*lcd_h*4,PROT_READ | PROT_WRITE, MAP_SHARED, lcdfd, 0);
	if (lcdptr == NULL) {
		perror("lcd mmap failed:");
	}
	/*打开设备*/
	int fd = open("/dev/video0", O_RDWR);
	if (fd < 0) {
		perror("Open video0:");
		return -1;
	}

	/*获取设备相关信息*/
	struct v4l2_fmtdesc v4fmt;
	v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	v4fmt.index = 1;//0 MJPEG 1 YUYV
	int ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
	if (ret < 0) {
		perror("Get inf failed:");
		return -1;
	}
	printf("index:%d\n", v4fmt.index);
	printf("flags:%d\n", v4fmt.flags);
	printf("description:%s\n", v4fmt.description);
	unsigned char *p =(unsigned char *)(&v4fmt.pixelformat);
	printf("pixelformat:%c%c%c%c\n", p[0], p[1], p[2], p[3]);
	
	/*设置视频格式*/
	struct v4l2_format vfmt;
	vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	vfmt.fmt.pix.width = 640;
	vfmt.fmt.pix.height = 480;
	vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//设置视频采集格式YUYV
	ret = ioctl(fd, VIDIOC_S_FMT, &vfmt);
	if (ret < 0) {
		perror("Set format failed:");
		return -1;
	}
	memset(&vfmt, 0, sizeof(vfmt));
	vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	ret = ioctl(fd, VIDIOC_G_FMT, &vfmt);
	if (ret < 0) {
		perror("Get format failed:");
		return -1;
	}
	
	if (vfmt.fmt.pix.width == 640 && vfmt.fmt.pix.height == 480 && vfmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
		printf("Set successful!\n");
	}
	else {
		printf("Set failed!\n");
		return -1;
	}

	/*申请内核缓冲区队列*/
	struct v4l2_requestbuffers reqbuffer;
	reqbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	reqbuffer.count = 4;
	reqbuffer.memory = V4L2_MEMORY_MMAP;//映射方式
	ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuffer);
	if (ret < 0) {
		perror("Request Queue space failed:");
		return -1;
	}
	
	/*映射到用户空间*/
	struct v4l2_buffer mapbuffer;
	int i;
	unsigned char *mptr[4];
	unsigned int size[4];//存储大小,方便释放
	mapbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	for (i = 0; i < 4; i++) {
		mapbuffer.index = i;
		ret = ioctl(fd, VIDIOC_QUERYBUF, &mapbuffer);
		if (ret < 0) {
			perror("Kernel space queue failed:");
			return -1;
		}
		mptr[i] = (unsigned char *)mmap(NULL, mapbuffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, mapbuffer.m.offset);
		size[i] = mapbuffer.length;
		//使用完毕
		ret = ioctl(fd, VIDIOC_QBUF, &mapbuffer);
		if (ret < 0) {
			perror("Return failed:");
			return -1;
		}
	}

	/*开始采集数据*/
	int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	ret = ioctl(fd, VIDIOC_STREAMON, &type);
	if (ret < 0) {
		perror("Start failed:");
		return -1;
	}
	//从队列中提取一帧数据
	unsigned char rgbdata[640*480*3];
	while (1) {
		struct v4l2_buffer readbuffer;
		readbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		ret = ioctl(fd, VIDIOC_DQBUF, &readbuffer);
		if (ret < 0) {
			perror("Capture failed:");
		}
		//显示在LCD上
		yuyv_to_rgb(mptr[readbuffer.index], rgbdata, 640, 480);
		lcd_show_rgb(rgbdata, 640, 480);
		//通知内核已经使用完毕
		ret = ioctl(fd, VIDIOC_QBUF, &readbuffer);
		if (ret < 0) {
			perror("return failed:");
		}
	}
	//停止采集数据
	ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
	if (ret < 0) {
		perror("Stop failed:");
		return -1;
	}

	//释放映射空间
	for (i = 0; i < 4; i++) {
		munmap(mptr[i], size[i]);
	}
	close(fd);
	return 0;
}

代码测试
CTRL+ALT+F1进入字符界面
进入代码目录在这里插入图片描述

能够看见屏幕中显示摄像头实时捕捉的画面。
(显示出来的画面有点偏蓝,解码函数可能不是很准确。。。)

要在 C++ 中显示摄像头,可以使用 V4L2(Video for Linux 2)库。以下是一个简单的示例代码,可以帮助你开始显示摄像头: ```c++ #include <iostream> #include <fcntl.h> #include <unistd.h> #include <sys/ioctl.h> #include <linux/videodev2.h> #include <opencv2/opencv.hpp> #define CAMERA_DEVICE "/dev/video0" #define CAPTURE_WIDTH 640 #define CAPTURE_HEIGHT 480 using namespace std; using namespace cv; int main() { int fd; struct v4l2_capability cap; struct v4l2_format fmt; struct v4l2_requestbuffers req; struct v4l2_buffer buf; void* buffer_start; unsigned int i; // 打开摄像头设备 fd = open(CAMERA_DEVICE, O_RDWR); if (fd == -1) { cout << "无法打开摄像头设备" << endl; return -1; } // 查询摄像头设备信息 if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) { cout << "无法查询摄像头设备信息" << endl; close(fd); return -1; } // 设置摄像头格式 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = CAPTURE_WIDTH; fmt.fmt.pix.height = CAPTURE_HEIGHT; fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) { cout << "无法设置摄像头格式" << endl; close(fd); return -1; } // 请求摄像头缓冲区 req.count = 4; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) { cout << "请求摄像头缓冲区失败" << endl; close(fd); return -1; } // 映射摄像头缓冲区 buffer_start = malloc(req.count * sizeof(*buffer_start)); if (!buffer_start) { cout << "映射摄像头缓冲区失败" << endl; close(fd); return -1; } for (i = 0; i < req.count; i++) { buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) { cout << "无法查询摄像头缓冲区" << endl; close(fd); return -1; } *((void**)buffer_start + i) = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset); if (*((void**)buffer_start + i) == MAP_FAILED) { cout << "无法映射摄像头缓冲区" << endl; close(fd); return -1; } } // 将缓冲区入队 for (i = 0; i < req.count; i++) { buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = i; if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) { cout << "无法将缓冲区入队" << endl; close(fd); return -1; } } // 开始采集 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) { cout << "无法开始采集" << endl; close(fd); return -1; } // 显示帧图像 Mat frame(CAPTURE_HEIGHT, CAPTURE_WIDTH, CV_8UC3); while (true) { // 获取缓冲区 if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) { cout << "无法获取缓冲区" << endl; close(fd); return -1; } // 处理图像 memcpy(frame.data, *((void**)buffer_start + buf.index), buf.bytesused); // 将缓冲区重新入队 if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) { cout << "无法将缓冲区重新入队" << endl; close(fd); return -1; } // 显示图像 imshow("camera", frame); waitKey(1); } // 停止采集 if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) { cout << "无法停止采集" << endl; close(fd); return -1; } // 释放缓冲区 for (i = 0; i < req.count; i++) { munmap(*((void**)buffer_start + i), buf.length); } free(buffer_start); // 关闭摄像头设备 close(fd); return 0; } ``` 这段代码使用了 OpenCV 库来显示图像,需要在编译时链接该库。你可以使用以下命令进行编译: ``` g++ -o camera camera.cpp `pkg-config opencv --cflags --libs` ``` 请注意,这段代码可能需要根据你的摄像头设备和环境进行调整。你需要根据自己的需求修改它。
评论 7
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值