创建 v4l2.c
#include <errno.h>
#include <fcntl.h>
#include <linux/videodev2.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <stdlib.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
typedef unsigned char uchar;
uchar *buffer; //buffers 指针记录缓冲帧
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
#define TRUE 1
#define FALSE 0
#define FILE_VIDEO1 "/dev/video0"
static int fd; //设备描述符
struct v4l2_streamparm setfps; //结构体v4l2_streamparm来描述视频流的属性
struct v4l2_capability cap; //取得设备的capability,看看设备具有什么功能,比如是否具有视频输入,或者音频输入输出等
struct v4l2_fmtdesc fmtdesc; //枚举设备所支持的image format: VIDIOC_ENUM_FMT
struct v4l2_format fmt,fmtack; //子结构体struct v4l2_pix_format设置摄像头采集视频的宽高和类型:V4L2_PIX_FMT_YYUV V4L2_PIX_FMT_YUYV
struct v4l2_requestbuffers req; //向驱动申请帧缓冲的请求,里面包含申请的个数
struct v4l2_buffer buf; //代表驱动中的一帧
enum v4l2_buf_type type; //帧类型
int init_v4l2(void); //初始化
int v4l2_grab(void); //采集
int main()
{
printf("first~~\n");
if(init_v4l2() == FALSE){ //打开摄像头
printf("Init fail~~\n");
exit(1);
}
printf("second~~\n");
if(v4l2_grab() == FALSE){
printf("grab fail~~\n");
exit(2);
}
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //Stream 或者Buffer的类型。此处肯定为V4L2_BUF_TYPE_VIDEO_CAPTURE
buf.memory = V4L2_MEMORY_MMAP; //既然是Memory Mapping模式,则此处设置为:V4L2_MEMORY_MMAP
printf("third~~\n");
#if 1
cvNamedWindow("one",CV_WINDOW_AUTOSIZE);
IplImage* img;
CvMat cvmat;
int i = 100;
double t;
while(1){
t = (double)cvGetTickCount(); //调用时钟测时间
ioctl(fd,VIDIOC_DQBUF,&buf);
buf.index = 0;
cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)buffer);//CV_8UC3
//t = (double)cvGetTickCount();
img = cvDecodeImage(&cvmat,1);
//t=(double)cvGetTickCount()-t;
//printf("used time is %gms\n",(t/(cvGetTickFrequency()*1000)));
if(!img) printf("No img\n");
cvShowImage("one",img);
cvReleaseImage(&img);
ioctl(fd,VIDIOC_QBUF,&buf); //在 driver 内部管理着两个 buffer queues ,一个输入队列,一个输出队列。
//对于 capture device 来说,当输入队列中的 buffer 被塞满数据以后会自动变为输出队列,
//等待调用 VIDIOC_DQBUF 将数据进行处理以后重新调用 VIDIOC_QBUF 将 buffer 重新放进输入队列.
if((cvWaitKey(1)&255) == 27) exit(0);
t=(double)cvGetTickCount()-t;
printf("used time is %gms\n",(t/(cvGetTickFrequency()*1000)));
}
#endif
ioctl(fd,VIDIOC_STREAMOFF,&type); // 停止视频采集命令,应用程序调用VIDIOC_ STREAMOFF停止视频采集命令后,视频设备驱动程序不在采集视频数据。
return 0;
}
int init_v4l2(void){
if ((fd = open(FILE_VIDEO1, O_RDWR)) == -1){
printf("Opening video device error\n");
return FALSE;
}
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1){ // 查询视频设备的功能
printf("unable Querying Capabilities\n");
return FALSE;
}
else
{
printf( "Driver Caps:\n"
" Driver: \"%s\"\n"
" Card: \"%s\"\n"
" Bus: \"%s\"\n"
" Version: %d\n"
" Capabilities: %x\n",
cap.driver,
cap.card,
cap.bus_info,
cap.version,
cap.capabilities);
}
if((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == V4L2_CAP_VIDEO_CAPTURE){
printf("Camera device %s: support capture\n",FILE_VIDEO1);
}
if((cap.capabilities & V4L2_CAP_STREAMING) == V4L2_CAP_STREAMING){
printf("Camera device %s: support streaming.\n",FILE_VIDEO1);
}
fmtdesc.index = 0;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format: \n");
while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc) != -1){ // 获取当前视频设备支持的视频格式
printf("\t%d. %s\n",fmtdesc.index+1,fmtdesc.description);
fmtdesc.index++;
}
//set fmt
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = IMAGEWIDTH;
fmt.fmt.pix.height = IMAGEHEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************可以选择
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1){ // 设置视频设备的视频数据格式,例如设置视频图像数据的长、宽,图像格式(JPEG、YUYV格式)
printf("Setting Pixel Format error\n");
return FALSE;
}
if(ioctl(fd,VIDIOC_G_FMT,&fmt) == -1){ //获取图像格式
printf("Unable to get format\n");
return FALSE;
}
// else
{
printf("fmt.type:\t%d\n",fmt.type); //可以输出图像的格式
printf("pix.pixelformat:\t%c%c%c%c\n",fmt.fmt.pix.pixelformat & 0xFF,(fmt.fmt.pix.pixelformat >> 8) & 0xFF,\
(fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF);
printf("pix.height:\t%d\n",fmt.fmt.pix.height);
printf("pix.field:\t%d\n",fmt.fmt.pix.field);
}
/*
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 100;
setfps.parm.capture.timeperframe.denominator = 100;
printf("init %s is OK\n",FILE_VIDEO1);
*/
return TRUE;
}
int v4l2_grab(void){
//struct v4l2_requestbuffers req = {0};
//4 request for 4 buffers
req.count = 1;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) //开启内存映射或用户指针I/O
{
printf("Requesting Buffer error\n");
return FALSE;
}
//5 mmap for buffers
buffer = (uchar*)malloc(req.count * sizeof(*buffer));
if(!buffer){
printf("Out of memory\n");
return FALSE;
}
unsigned int n_buffers;
for(n_buffers = 0;n_buffers < req.count; n_buffers++){
//struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if(ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1){ // 查询已经分配的V4L2的视频缓冲区的相关信息,包括视频缓冲区的使用状态、在内核空间的偏移地址、缓冲区长度等。
printf("Querying Buffer error\n");
return FALSE;
}
buffer = (uchar*)mmap (NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);
if(buffer == MAP_FAILED){
printf("buffer map error\n");
return FALSE;
}
printf("Length: %d\nAddress: %p\n", buf.length, buffer);
printf("Image Length: %d\n", buf.bytesused);
}
//6 queue
for(n_buffers = 0;n_buffers <req.count;n_buffers++){
buf.index = n_buffers;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(ioctl(fd,VIDIOC_QBUF,&buf)){ // 投放一个空的视频缓冲区到视频缓冲区输入队列中
printf("query buffer error\n");
return FALSE;
}
}
//7 starting
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(ioctl(fd,VIDIOC_STREAMON,&type) == -1){ //
printf("stream on error\n");
return FALSE;
}
return TRUE;
}
C++的版本
camera.h
#ifndef _CAMERA_H_
#define _CAMERA_H_
#include <vector>
#include "base/base.h"
class Camera : public BaseClass
{
public:
/* get obj function, must achieve */
static Camera * getInstace(void);
/* parent virtual function, must achieve */
int handler(int argc, char * const argv[]);
void releaseResource(void);
/* member function */
int openCamera(const std::string path);
int queryCameraCap(struct v4l2_capability * cap);
int enumCameraInput(void);
int setCameraInput(int index);
int enumCameraFormat(void);
int setCameraFormat(int format, int width, int height);
int requestCameraBufs(int nr_bufs);
int queryCameraBuf(int index);
void * mmapCameraBufs(int index);
int putCameraBufsToQueue(int index);
int cameraStreamOn(void);
int pollCameraStream(void);
int CamearaBufsDequeue(void);
int readCameraStreamData(int index, char * buf);
int CamearaBufsInqueue(int index);
int cameraStreamOff(void);
int munmapCameraBufs(int nr_bufs);
int closeCamera(void);
private:
static Camera * sInstace;
std::vector <void*> addr;
std::vector <int> v4l2BufferLen;
std::vector <int> v4l2BufferOffset;
int fd;
int num_bufs = 5;
};
#endif
camera.c
#include <iostream>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/mman.h>
#include <poll.h>
#include <linux/videodev2.h>
#include <sys/ioctl.h>
#include <getopt.h>
#include <string.h>
#include "camera/camera.h"
Camera * Camera::sInstace = nullptr;
Camera * Camera::getInstace(void)
{
return (nullptr == sInstace)? (sInstace = new Camera()) : sInstace;
}
void Camera::releaseResource(void)
{
if (nullptr != sInstace)
{
delete sInstace;
}
}
int Camera::openCamera(const std::string path)
{
fd = open(path.c_str(), O_RDWR);
if (fd < 0)
{
std::cout << "open " << path << "failed\n";
return -NG;
}
return OK;
}
int Camera::queryCameraCap(struct v4l2_capability * cap)
{
int ret = ioctl(fd, VIDIOC_QUERYCAP, cap);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_QUERYCAP failed\n";
return NG;
}
return OK;
}
int Camera::enumCameraInput(void)
{
struct v4l2_input input = {0};
while (!ioctl(fd, VIDIOC_ENUMINPUT, &input))
{
std::cout << "input: " << input.name << "\n";
++input.index;
}
return input.index;
}
int Camera::setCameraInput(int index)
{
int ret = -1;
struct v4l2_input input = {0};
input.index = index;
ret = ioctl(fd, VIDIOC_S_INPUT, &input);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_S_INPUT failed\n";
return NG;
}
return OK;
}
int Camera::enumCameraFormat(void)
{
struct v4l2_fmtdesc fmtdesc = {0};
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (!ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc))
{
std::cout << "fmt: " << fmtdesc.description << "\n";
fmtdesc.index++;
}
return fmtdesc.index;
}
int Camera::setCameraFormat(int format, int width, int height)
{
struct v4l2_format v4l2_fmt = {0};
v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_fmt.fmt.pix.width = width;
v4l2_fmt.fmt.pix.height = height;
v4l2_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
v4l2_fmt.fmt.pix.field = V4L2_FIELD_ANY;
int ret = ioctl(fd, VIDIOC_S_FMT, &v4l2_fmt);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_S_FMT failed\n";
return NG;
}
return OK;
}
int Camera::requestCameraBufs(int nr_bufs)
{
struct v4l2_requestbuffers req;
req.count = nr_bufs;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
int ret = ioctl(fd, VIDIOC_REQBUFS, &req);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_REQBUFS failed\n";
return NG;
}
return OK;
}
int Camera::queryCameraBuf(int index)
{
struct v4l2_buffer v4l2_buffer;
memset(&v4l2_buffer, 0, sizeof(struct v4l2_buffer));
v4l2_buffer.index = index;
v4l2_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buffer.memory = V4L2_MEMORY_MMAP;
int ret = ioctl(fd, VIDIOC_QUERYBUF, &v4l2_buffer);
if(ret < 0)
{
std::cout << "ioctl VIDIOC_QUERYBUF failed\n";
return NG;
}
v4l2BufferLen.push_back(v4l2_buffer.length);
v4l2BufferOffset.push_back(v4l2_buffer.m.offset);
return OK;
}
void * Camera::mmapCameraBufs(int index)
{
void * mmapAddr = nullptr;
mmapAddr = mmap(NULL /* start anywhere */ ,v4l2BufferLen[index],
PROT_READ | PROT_WRITE, MAP_SHARED,
fd, v4l2BufferOffset[index]);
return mmapAddr;
}
int Camera::putCameraBufsToQueue(int index)
{
struct v4l2_buffer v4l2_buffer;
memset(&v4l2_buffer, 0, sizeof(struct v4l2_buffer));
v4l2_buffer.index = index;
v4l2_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buffer.memory = V4L2_MEMORY_MMAP;
int ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buffer);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_QBUF failed\n";
return NG;
}
return OK;
}
int Camera::cameraStreamOn(void)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret = ioctl(fd, VIDIOC_STREAMON, &type);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_STREAMON failed\n";
return NG;
}
return OK;
}
int Camera::pollCameraStream(void)
{
struct pollfd poll_fds[1];
poll_fds[0].fd = fd;
poll_fds[0].events = POLLIN;
int ret = poll(poll_fds, 1, 10000);
return ret;
}
int Camera::CamearaBufsDequeue(void)
{
struct v4l2_buffer buffer;
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
int ret = ioctl(fd, VIDIOC_DQBUF, &buffer);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_DQBUF failed\n";
return NG;
}
return buffer.index;
}
int Camera::readCameraStreamData(int index, char * buf)
{
memcpy(buf, addr[index], v4l2BufferLen[index]);
std::cout << "addr:" << addr[index] << "\n"
<< "len:" << v4l2BufferLen[index] <<"\n";
return OK;
}
int Camera::CamearaBufsInqueue(int index)
{
struct v4l2_buffer v4l2_buf;
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf.memory = V4L2_MEMORY_MMAP;
v4l2_buf.index = index;
int ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buf);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_QBUF failed\n";
return NG;
}
return OK;
}
int Camera::cameraStreamOff(void)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
if (ret < 0)
{
std::cout << "ioctl VIDIOC_STREAMOFF failed\n";
return NG;
}
return OK;
}
int Camera::munmapCameraBufs(int nr_bufs)
{
for(int i=0; i<nr_bufs; ++i)
munmap(addr[i], v4l2BufferLen[i]);
}
int Camera::closeCamera(void)
{
close(fd);
return OK;
}
int Camera::handler(int argc, char * const argv[])
{
int ret = -1;
char *string = "v:p:v:";
int opt = 0;
int optionIndex = 0;
struct v4l2_capability cap = {0};
std::string deviceBuf;
while(-1 != (opt = getopt_long(argc, argv, string, NULL, &optionIndex)))
{
if ('v' == opt)
{
deviceBuf = optarg;
break;
}
}
if (deviceBuf.empty())
{
std::cout << "deviceBuf is empty\n";
return NG;
}
ret = openCamera(deviceBuf);
if (ret < 0)
{
std::cout << "openCamera fail\n";
return NG;
}
ret = queryCameraCap(&cap);
if (ret < 0)
{
std::cout << "queryCameraCap fail\n";
return NG;
}
if(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
std::cout << "v4l2 dev support capture\n";
if(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)
std::cout << "v4l2 dev support output\n";
if(cap.capabilities & V4L2_CAP_VIDEO_OVERLAY)
std::cout << "v4l2 dev support overlay\n";
if(cap.capabilities & V4L2_CAP_STREAMING)
std::cout << "v4l2 dev support streaming\n";
if(cap.capabilities & V4L2_CAP_READWRITE)
std::cout << "v4l2 dev support read write\n";
ret = enumCameraInput();
if (ret < 0)
{
std::cout << "enumCameraInput fail\n";
return NG;
}
ret = setCameraInput(0);
if (ret < 0)
{
std::cout << "setCameraInput fail\n";
return NG;
}
ret = enumCameraFormat();
if (ret < 0)
{
std::cout << "enumCameraFormat fail\n";
return NG;
}
ret = setCameraFormat(0, 1920, 1080);
if (ret < 0)
{
std::cout << "setCameraFormat fail\n";
return NG;
}
ret = requestCameraBufs(num_bufs);
if (ret < 0)
{
std::cout << "requestCameraBufs fail\n";
return NG;
}
for (int i=0; i<num_bufs; i++)
{
ret = queryCameraBuf(i);
if (ret < 0)
{
std::cout << "queryCameraBuf fail\n";
return NG;
}
void * pAddr = mmapCameraBufs(i);
if (nullptr == pAddr)
{
std::cout << "mmapCameraBufs fail\n";
return NG;
}
addr.push_back(pAddr);
ret = putCameraBufsToQueue(i);
if (ret < 0)
{
std::cout << "putCameraBufsToQueue fail\n";
return NG;
}
}
ret = cameraStreamOn();
if (ret < 0)
{
std::cout << "cameraStreamOn fail\n";
return NG;
}
while (1)
{
ret = pollCameraStream();
if (ret > 0)
{
char buf[1920 * 1080 * 4] = {0};
int check = CamearaBufsDequeue();
if (check < 0)
{
continue;
}
readCameraStreamData(check, buf);
CamearaBufsInqueue(check);
}
else if (0 == ret)
{
}
else
{
}
}
return OK;
}