V4L2(Video4Linux version 2):
- 优点:
原生Linux支持:V4L2是Linux内核的一部分,因此对于在Linux系统上进行视频捕获的应用来说,V4L2是一种自然的选择。
功能丰富:V4L2提供了广泛的功能和选项,允许对视频采集设备进行细粒度的控制。
稳定性:V4L2经过长时间的发展和实践,已经被广泛使用并且比较成熟稳定。
- 缺点:
学习曲线陡峭:V4L2的使用可能相对复杂,对于初学者来说需要花费一些时间来了解其API和概念。
编码繁琐:使用V4L2需要编写大量的代码来设置设备、请求缓冲区、处理帧等,可能会增加开发工作量
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <zconf.h>
#include <pthread.h>
#include <bits/types/sig_atomic_t.h>
#include <signal.h>
#define VIDEO_DEVICE "/dev/video0"
#define OUTPUT_FILE "output.yuv"
#define WIDTH 640
#define HEIGHT 480
void querySupportedPixelFormats(int fd) {
struct v4l2_fmtdesc fmt_desc;
memset(&fmt_desc, 0, sizeof(fmt_desc));
fmt_desc.index = 0;
fmt_desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Supported Pixel Formats:\n");
while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt_desc) != -1) {
printf("%d. %s\n", fmt_desc.index + 1, (char*)&fmt_desc.pixelformat);
fmt_desc.index++;
}
}
void queryCameraResolution(int fd) {
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret = ioctl(fd, VIDIOC_G_FMT, &fmt);
if (ret == -1) {
perror("Error getting format");
return;
}
printf("Camera Resolution: %dx%d\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
printf("%s\n", (char*)&fmt.fmt.pix.pixelformat);
}
struct buffer {
void *start;
size_t length;
};
int fd;
struct buffer *buffers;
unsigned int n_buffers;
volatile sig_atomic_t stop = 0;
void signalHandler(int signo) {
if (signo == SIGINT) {
printf("\nReceived SIGINT. Cleaning up...\n");
stop = 1;
}
}
void processFrame(const void *data, size_t size) {
// Here, you can implement your logic to process the YUYV frame data.
// For simplicity, this example writes the raw YUYV data to a file.
FILE *outputFile = fopen(OUTPUT_FILE, "ab");
if (outputFile) {
fwrite(data, size, 1, outputFile);
fclose(outputFile);
} else {
perror("Error opening output file");
}
}
void *videoCaptureThread(void *arg) {
struct v4l2_buffer buf;
while (!stop) {
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
// Dequeue a buffer
if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) {
perror("Error dequeuing buffer");
break;
}
// Process the frame
processFrame(buffers[buf.index].start, buf.bytesused);
// Queue the buffer back
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
perror("Error queuing buffer");
break;
}
}
return NULL;
}
int main() {
int ret;
struct v4l2_capability cap;
struct v4l2_format fmt;
struct v4l2_requestbuffers reqbuf;
// Set up signal handler
signal(SIGINT, signalHandler);
// Open the video device
fd = open(VIDEO_DEVICE, O_RDWR);
if (fd == -1) {
perror("Error opening device");
return -1;
}
// Get device capabilities
ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
if (ret == -1) {
perror("Error querying capabilities");
close(fd);
return -1;
}
// Set the video format
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = WIDTH;
fmt.fmt.pix.height = HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; // YUYV format
fmt.fmt.pix.field = V4L2_FIELD_NONE;
ret = ioctl(fd, VIDIOC_S_FMT, &fmt);
if (ret == -1) {
perror("Error setting format");
close(fd);
return -1;
}
// Request video buffers
memset(&reqbuf, 0, sizeof(reqbuf));
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
reqbuf.count = 4; // You can adjust the number of buffers
ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
if (ret == -1) {
perror("Error requesting buffers");
close(fd);
return -1;
}
buffers = calloc(reqbuf.count, sizeof(*buffers));
if (!buffers) {
perror("Error allocating memory for buffers");
close(fd);
return -1;
}
// Map the video buffers
for (n_buffers = 0; n_buffers < reqbuf.count; ++n_buffers) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
perror("Error querying buffer");
close(fd);
return -1;
}
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
if (buffers[n_buffers].start == MAP_FAILED) {
perror("Error mapping buffer");
close(fd);
return -1;
}
}
// Queue the video buffers
for (unsigned int i = 0; i < n_buffers; ++i) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
perror("Error queuing buffer");
close(fd);
return -1;
}
}
// Start capturing (streamon)
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {
perror("Error starting capture");
close(fd);
return -1;
}
// Create video capture thread
pthread_t captureThread;
if (pthread_create(&captureThread, NULL, videoCaptureThread, NULL) != 0) {
perror("Error creating capture thread");
close(fd);
return -1;
}
// Wait for SIGINT signal (Ctrl+C)
pause();
// Signal received, stop capturing
stop = 1;
pthread_join(captureThread, NULL);
// Stop capturing (streamoff)
if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) {
perror("Error stopping capture");
close(fd);
return -1;
}
// Cleanup
for (unsigned int i = 0; i < n_buffers; ++i)
munmap(buffers[i].start, buffers[i].length);
free(buffers);
close(fd);
return 0;
}