最近在做一个linux摄像头的应用程序,主要功能是:arm板子跑linux系统,进行摄像头视频采集,捕获一帧视频然后保存成图片。功能很简单,但是我确搞了很久,过程中遇到了很多问题,在此写下点滴记录以备忘,还有很多问题待决解……
硬件平台:arm cotex-A8实验箱 + ov3640 CMOS摄像头
软件平台: (1)开发平台:xp系统上装的virtualbox-2.6.8虚拟机 + ubuntu10.10
(2)arm板子系统:linux 2.6.35 内核 + qtopia文件系统
一、摄像头程序:
1、源代码:camera3640.c
- #include "classroom.h"
- /*******************************************************************************************************************************************************************/
- extern char * chpt_lcd_mmap_addr; //lcd的缓存指针
- extern unsigned int int_lcd_width; //lcd的宽度
- extern unsigned int int_lcd_height; //lcd的高度
- extern unsigned int int_lcd_pixel; //lcd的像素
- extern unsigned int cameratimes; //Camera采集的次数控制变量
- extern const unsigned int camMaxtime; //Camera最多采集的次数
- struct buffer // 每个缓冲帧的数据结构
- {
- void * start;
- size_t length;
- }*buffers;
- const char * CameraName = "/dev/video0"; //摄像头设备名
- int cam_fd = -1; //摄像头打开文件
- static int n_buffers = 0;
- unsigned int times = 0;
- unsigned int bufferLenth = 0;
- /*******************************************************************************************************************************************************************/
- void cameraOpen(void)
- {
- cam_fd = open( CameraName, O_RDWR | O_NONBLOCK, 0 ); //阻塞方式打开摄像头
- if(cam_fd < 0)
- {
- printf("Open fimc0 error.\n");
- exit(1);
- }
- }
- void cameraInit(void)
- {
- struct v4l2_capability cap;
- int ret = 0;
- unsigned int min;
- struct v4l2_input input;
- ret = ioctl( cam_fd, VIDIOC_QUERYCAP, &cap );
- if( ret < 0 )
- {
- printf("set VIDIOC_QUERYCAP error.\n");
- exit(1);
- }
- if( !(cap.capabilities & V4L2_CAP_STREAMING) )
- {
- printf("%s can not streaming.\n");
- exit(1);
- }
- input.index = 0;
- if ((ioctl(cam_fd, VIDIOC_S_INPUT, &input)) < 0) //单输入模式
- {
- printf("set s_input error.\n");
- exit(1);
- }
- //设置视频的格式
- struct v4l2_format fmt;
- CLEAR(fmt);
- fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //数据流类型,永远是:V4L2_BUF_TYPE_VIDEO_CAPTURE
- fmt.fmt.pix.width = int_lcd_width; //800 宽,必须是16的倍数
- fmt.fmt.pix.height = int_lcd_height; //400 高,必须是16的倍数
- fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32; //视频数据存储类型,例如是YUV4:2:2还是RGB
- fmt.fmt.pix.field = V4L2_FIELD_ANY;
- if ( ioctl(cam_fd, VIDIOC_S_FMT, &fmt) == -1 )
- {
- printf("set format error\n");
- }
- //如果该视频设备驱动不支持你所设定的图像格式,视频驱动会重新修改struct v4l2_format结构体变量的值为该视频设备所支持的图像格式,
- //所以在程序设计中,设定完所有的视频格式后,要获取实际的视频格式,要重新读取struct v4l2_format结构体变量。
- if(ioctl(cam_fd, VIDIOC_G_FMT, &fmt) == -1)
- {
- printf("Unable to get format\n");
- exit(1);
- }
- {
- printf("fmt.type:\t\t%d\n",fmt.type);
- printf("pix.pixelformat:\t%c%c%c%c\n",fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,
- (fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF);
- printf("pix.height:\t\t%d\n",fmt.fmt.pix.height);
- printf("pix.width:\t\t%d\n",fmt.fmt.pix.width);
- printf("pix.field:\t\t%d\n",fmt.fmt.pix.field);
- }
- //printf("real format is %d X %d,pixel is %d!\n",fmt.fmt.pix.width,fmt.fmt.pix.height,fmt.fmt.pix.pixelformat);
- if( int_lcd_width != fmt.fmt.pix.width )
- {
- int_lcd_width = fmt.fmt.pix.width;
- fprintf(stderr,"Image width set to %i by device %s.\n", int_lcd_width, CameraName );
- }
- if( int_lcd_height != fmt.fmt.pix.height )
- {
- int_lcd_height = fmt.fmt.pix.height;
- fprintf(stderr, "Image height set to %i by device %s.\n", int_lcd_height, CameraName );
- }
- min = fmt.fmt.pix.width * 2;
- if( fmt.fmt.pix.bytesperline < min )
- {
- fmt.fmt.pix.bytesperline = min;
- }
- min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
- if( fmt.fmt.pix.sizeimage < min )
- {
- fmt.fmt.pix.sizeimage = min;
- }
- mmapInit();
- }
- void mmapInit(void) //内存映射
- {
- struct v4l2_requestbuffers req;
- CLEAR (req);
- req.count = 4;
- req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- req.memory = V4L2_MEMORY_MMAP;
- if (-1 == xioctl(cam_fd, VIDIOC_REQBUFS, &req))//申请缓存,count是申请的数量
- {
- if (EINVAL == errno)
- {
- fprintf(stderr, "%s does not support memory mapping\n", CameraName);
- exit(EXIT_FAILURE);
- }
- else
- {
- errno_exit("VIDIOC_REQBUFS");
- }
- }
- if (req.count < 2)
- {
- fprintf(stderr, "Insufficient buffer memory on %s\n", CameraName);
- exit(EXIT_FAILURE);
- }
- buffers = (struct buffer*)calloc(req.count, sizeof(*buffers));//内存中建立对应的空间
- if (!buffers)
- {
- fprintf(stderr, "Out of memory\n");
- exit(EXIT_FAILURE);
- }
- for (n_buffers = 0; n_buffers < req.count; ++n_buffers)//申请4帧缓存
- {
- struct v4l2_buffer buf; //驱动中的一帧
- CLEAR (buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- buf.index = n_buffers;
- if (-1 == xioctl(cam_fd, VIDIOC_QUERYBUF, &buf))//映射用户空间
- {
- errno_exit("VIDIOC_QUERYBUF");
- }
- buffers[n_buffers].length = buf.length;
- //通过mmap()建立映射关系
- buffers[n_buffers].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, cam_fd, buf.m.offset);
- if (MAP_FAILED == buffers[n_buffers].start)
- {
- errno_exit("mmap");
- }
- }
- }
- void captureStart(void)
- {
- unsigned int i;
- enum v4l2_buf_type type;
- for ( i = 0; i < n_buffers; ++i )
- {
- struct v4l2_buffer buf;
- CLEAR (buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- buf.index = i;
- if (-1 == xioctl(cam_fd, VIDIOC_QBUF, &buf))//申请到的缓存进入队列
- {
- errno_exit("VIDIOC_QBUF");
- }
- }
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if ( -1 == xioctl(cam_fd, VIDIOC_STREAMON, &type) )//开始捕获图像数据
- {
- errno_exit("VIDIOC_STREAMON");
- }
- }
- void mainLoop(void)
- {
- unsigned int count;
- count = 1;
- while (count-- > 0)
- {
- for ( ; ; )
- {
- fd_set fds;
- struct timeval tv;
- int r;
- FD_ZERO(&fds); //将指定的文件描述符集清空
- FD_SET(cam_fd, &fds); //在文件描述符集中增加一个新的文件描述符
- tv.tv_sec = 2;
- tv.tv_usec = 0;
- r = select(cam_fd + 1, &fds, NULL, NULL, &tv); //判断是否可读(即摄像头是否准备好),tv是等待的时间
- if (-1 == r)
- {
- if (EINTR == errno)
- continue;
- errno_exit("select");
- }
- if (0 == r)
- {
- fprintf (stderr, "select timeout\n");
- exit(EXIT_FAILURE);
- }
- if ( frameRead() )//如果可读则执行frameRead()并跳出循环
- {
- break;
- }
- }
- }
- }
- unsigned char* frameRead(void)
- {
- struct v4l2_buffer buf;
- CLEAR (buf);
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- if (-1 == xioctl(cam_fd, VIDIOC_DQBUF, &buf)) //列出采集的帧缓存
- {
- switch (errno)
- {
- case EAGAIN:
- return 0;
- case EIO:
- default:
- errno_exit("VIDIOC_DQBUF");
- }
- }
- assert (buf.index < n_buffers);
- imageProcess(buffers[buf.index].start); //拷贝视频到lcd
- bufferLenth = buffers[buf.index].length;
- if (-1 == xioctl(cam_fd, VIDIOC_QBUF, &buf))//再将其入列
- {
- errno_exit("VIDIOC_QBUF");
- }
- return (unsigned char*)buffers[buf.index].start;
- }
- void imageProcess(const void* p)
- {
- unsigned char* src = (unsigned char*)p;//摄像头采集的图像数据
- cameratimes++;
- printf("\n--------- this is %d times.\n",cameratimes);
- printf("========= bufferLenth = %d.\n",bufferLenth);
- printf("+++++++++ string length = %d.\n",strlen(src));
- if(cameratimes >= camMaxtime)
- {
- //productBmp(src); //生成bmp图片
- jpgImageProduct(src); //生成jpg图片
- memcpy( chpt_lcd_mmap_addr, src, int_lcd_width*int_lcd_height*4 ); //在LCD液晶屏显示
- }
- }
- void errno_exit(const char * s)
- {
- fprintf( stderr, "%s error %d, %s\n", s, errno, strerror(errno) );
- exit(EXIT_FAILURE);
- }
- int xioctl( int ffd, int request, void * argp)
- {
- int r;
- do
- {
- r = ioctl( ffd, request, argp );
- }
- while( r == -1 && EINTR == errno );
- return r;
- }
2、执行流程:
(1)打开设备:cameraOpen()
(2)设备初始化:cameraInit()
(3)建立内存映射:mmapInit()
(4)开始视频采集并捕获图像数据:captureStart()
(5)循环采集:mainLoop()
(6)读取数据:frameRead()
(7)数据处理:imageProcess()
3、常见错误及解决方法:
(1) No capture device info
VIDIOC_REQBUFS error 19, No such device
这个错误纠结了很久很久,在网上搜了很久,在群里问了很多人,都没解决,最后求助出售此试验箱设备公司的工程师,他也没给出原因,只是给了我一个可以运行的例程,我就自己对照了下,然后调试,查出来,原因在与,没有设置
二、保存jpg图片程序:
1、源代码:jpg.c
- /*
- * jpg.c
- *
- */
- #include "classroom.h"
- //
- #define jpgImageName "test.jpg"
- extern char * chpt_lcd_mmap_addr;
- extern unsigned int int_lcd_height;
- extern unsigned int int_lcd_width;
- extern unsigned int int_lcd_pixel;
- static unsigned int width;
- static unsigned int height;
- static unsigned int channel;
- static unsigned char jpegQuality = 100;
- //
- static void jpegWrite(unsigned char* img)
- {
- struct jpeg_compress_struct cinfo;
- struct jpeg_error_mgr jerr;
- JSAMPROW row_pointer[1];
- FILE *outfile = fopen( jpgImageName, "w");
- // try to open file for saving
- if (!outfile) {
- errno_exit("jpeg");
- }
- // create jpeg data
- cinfo.err = jpeg_std_error( &jerr );
- jpeg_create_compress(&cinfo);
- jpeg_stdio_dest(&cinfo, outfile);
- // set image parameters
- cinfo.image_width = width;
- cinfo.image_height = height;
- cinfo.input_components = 3;
- cinfo.in_color_space = JCS_RGB;
- // set jpeg compression parameters to default
- jpeg_set_defaults(&cinfo);
- // and then adjust quality setting
- jpeg_set_quality(&cinfo, jpegQuality, TRUE);
- // start compress
- jpeg_start_compress(&cinfo, TRUE);
- // feed data
- while (cinfo.next_scanline < cinfo.image_height)
- {
- row_pointer[0] = &img[(cinfo.image_height - cinfo.next_scanline - 1) * cinfo.image_width*3];
- jpeg_write_scanlines(&cinfo, row_pointer, 1);
- }
- // finish compression
- jpeg_finish_compress(&cinfo);
- // destroy jpeg data
- jpeg_destroy_compress(&cinfo);
- // close output file
- fclose(outfile);
- }
- void jpgImageProduct(const void* p)
- {
- usleep(500000);
- unsigned char* dst;
- unsigned char* src = (unsigned char*)p;
- unsigned int j;
- unsigned int i;
- width = int_lcd_width;
- height = int_lcd_height;
- dst = (unsigned char*)malloc (width*height*3+66);
- for (i=0; i< height; i++)
- {
- for(j=0;j<width;j++)
- {
- memcpy(dst+(i*width+j)*3, src+(i*width+j)*4+2,1);
- memcpy(dst+(i*width+j)*3+1, src+(i*width+j)*4+1,1);
- memcpy(dst+(i*width+j)*3+2, src+(i*width+j)*4,1);
- }
- }
- jpegWrite(dst);
- free(dst);
- }
static int ov3640_video_probe(struct soc_camera_device *icd,
struct i2c_client *client)
{
struct ov3640 *ov3640 = to_ov3640(client);
int ver;
ver = ov3640_detect(client);
if (ver < 0) {
dev_err(&client->dev, "Unable to detect sensor, err %d\n",
ver);
return ver;
}
ov3640->ver = ver;
dev_dbg(&client->dev, "Chip version 0x%02x detected\n", ov3640->ver);
return 0;
}
static int ov3640_g_chip_ident(struct v4l2_subdev *sd,
struct v4l2_dbg_chip_ident *id)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
if (id->match.type != V4L2_CHIP_MATCH_I2C_ADDR)
return -EINVAL;
if (id->match.addr != client->addr)
return -ENODEV;
id->ident = V4L2_IDENT_OV3640;
id->revision = 0;
return 0;
}
static int ov3640_s_register(struct v4l2_subdev *sd,
struct v4l2_dbg_register *reg)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
if (reg->match.type != V4L2_CHIP_MATCH_I2C_ADDR || reg->size > 2)
return -EINVAL;
if (reg->match.addr != client->addr)
return -ENODEV;
if (ov3640_reg_write(client, reg->reg, reg->val))
return -EIO;
return 0;
}
static struct v4l2_subdev_core_ops ov3640_subdev_core_ops = {
.g_chip_ident = ov3640_g_chip_ident,
.g_ctrl = ov3640_g_ctrl,
.s_ctrl = ov3640_s_ctrl,
#ifdef CONFIG_VIDEO_ADV_DEBUG
.g_register = ov3640_g_register,
.s_register = ov3640_s_register,
#endif
};
static int ov3640_s_stream(struct v4l2_subdev *sd, int enable)
{
if (enable)
ov3640_configure(sd);
return 0;
}
static int ov3640_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{
int i_fmt;
int i_size;
i_fmt = ov3640_find_datafmt(mf->code);
mf->code = ov3640_fmts[i_fmt].code;
mf->colorspace = ov3640_fmts[i_fmt].colorspace;
mf->field = V4L2_FIELD_NONE;
i_size = ov3640_find_size(mf->width, mf->height);
mf->width = ov3640_sizes[i_size].width;
mf->height = ov3640_sizes[i_size].height;
return 0;
}
static int ov3640_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640 *ov3640 = to_ov3640(client);
mf->width = ov3640_sizes[ov3640->i_size].width;
mf->height = ov3640_sizes[ov3640->i_size].height;
mf->code = ov3640_fmts[ov3640->i_fmt].code;
mf->colorspace = ov3640_fmts[ov3640->i_fmt].colorspace;
mf->field = V4L2_FIELD_NONE;
return 0;
}
static int ov3640_enum_fmt(struct v4l2_subdev *sd, unsigned int index,
enum v4l2_mbus_pixelcode *code)
{
if (index >= ARRAY_SIZE(ov3640_fmts))
return -EINVAL;
*code = ov3640_fmts[index].code;
return 0;
}
static int ov3640_enum_framesizes(struct v4l2_subdev *sd,
struct v4l2_frmsizeenum *frms)
{
int ifmt;
for (ifmt = 0; ifmt < OV3640_NUM_CAPTURE_FORMATS; ifmt++) {
if (frms->pixel_format == ov3640_formats[ifmt].pixelformat)
break;
}
/* Is requested pixelformat not found on sensor? */
if (ifmt == OV3640_NUM_CAPTURE_FORMATS)
return -EINVAL;
/* Do we already reached all discrete framesizes? */
if (frms->index >= 2)
return -EINVAL;
frms->type = V4L2_FRMSIZE_TYPE_DISCRETE;
frms->discrete.width = ov3640_sizes[frms->index].width;
frms->discrete.height = ov3640_sizes[frms->index].height;
return 0;
}
static int ov3640_enum_frameintervals(struct v4l2_subdev *sd,
struct v4l2_frmivalenum *frmi)
{
int ifmt;
for (ifmt = 0; ifmt < OV3640_NUM_CAPTURE_FORMATS; ifmt++) {
if (frmi->pixel_format == ov3640_formats[ifmt].pixelformat)
break;
}
/* Is requested pixelformat not found on sensor? */
if (ifmt == OV3640_NUM_CAPTURE_FORMATS)
return -EINVAL;
/* Do we already reached all discrete framesizes? */
if ((frmi->width == ov3640_sizes[1].width) &&
(frmi->height == ov3640_sizes[1].height)) {
/* FIXME: The only frameinterval supported by QXGA capture is
* 2/15 fps
*/
if (frmi->index != 0)
return -EINVAL;
} else {
if (frmi->index >= 3)
return -EINVAL;
}
frmi->type = V4L2_FRMIVAL_TYPE_DISCRETE;
frmi->discrete.numerator =
ov3640_frameintervals[frmi->index].numerator;
frmi->discrete.denominator =
ov3640_frameintervals[frmi->index].denominator;
return 0;
}
static int ov3640_g_parm(struct v4l2_subdev *sd, struct v4l2_streamparm *a)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640 *ov3640 = to_ov3640(client);
struct v4l2_captureparm *cparm = &a->parm.capture;
if (a->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
return -EINVAL;
memset(a, 0, sizeof(*a));
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cparm->capability = V4L2_CAP_TIMEPERFRAME;
cparm->timeperframe = ov3640->timeperframe;
return 0;
}
static int ov3640_s_parm(struct v4l2_subdev *sd, struct v4l2_streamparm *a)
{
int rval = 0;
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640 *ov3640 = to_ov3640(client);
struct v4l2_fract *timeperframe = &a->parm.capture.timeperframe;
struct v4l2_fract timeperframe_old;
int desired_fps;
timeperframe_old = ov3640->timeperframe;
ov3640->timeperframe = *timeperframe;
desired_fps = timeperframe->denominator / timeperframe->numerator;
if ((desired_fps < OV3640_MIN_FPS) || (desired_fps > OV3640_MAX_FPS))
rval = -EINVAL;
if (rval)
ov3640->timeperframe = timeperframe_old;
else
*timeperframe = ov3640->timeperframe;
return rval;
}
static struct v4l2_subdev_video_ops ov3640_subdev_video_ops = {
.s_stream = ov3640_s_stream,
.try_mbus_fmt = ov3640_try_fmt,
.s_mbus_fmt = ov3640_s_fmt,
.g_mbus_fmt = ov3640_g_fmt,
.enum_mbus_fmt = ov3640_enum_fmt,
.enum_framesizes = ov3640_enum_framesizes,
.enum_frameintervals = ov3640_enum_frameintervals,
.g_parm = ov3640_g_parm,
.s_parm = ov3640_s_parm,
};
static int ov3640_g_skip_frames(struct v4l2_subdev *sd, u32 *frames)
{
/* Quantity of initial bad frames to skip. Revisit. */
*frames = 3;
return 0;
}
static int ov3640_g_interface_parms(struct v4l2_subdev *sd,
struct v4l2_subdev_sensor_interface_parms *parms)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640 *ov3640 = to_ov3640(client);
if (!parms)
return -EINVAL;
parms->if_type = ov3640->plat_parms->if_type;
parms->if_mode = ov3640->plat_parms->if_mode;
/* FIXME */
parms->parms.serial.lanes = 2;
parms->parms.serial.channel = OV3640_CSI2_VIRTUAL_ID;
parms->parms.serial.phy_rate = 224000000; /* FIX: ov3640_calc_mipiclk */
parms->parms.serial.pix_clk = 21; /* Revisit */
return 0;
}
static struct v4l2_subdev_sensor_ops ov3640_subdev_sensor_ops = {
.g_skip_frames = ov3640_g_skip_frames,
.g_interface_parms = ov3640_g_interface_parms,
};
static struct v4l2_subdev_ops ov3640_subdev_ops = {
.core = &ov3640_subdev_core_ops,
.video = &ov3640_subdev_video_ops,
.sensor = &ov3640_subdev_sensor_ops,
};
static int ov3640_set_bus_param(struct soc_camera_device *icd,
unsigned long flags)
{
/* TODO: Do the right thing here, and validate bus params */
return 0;
}
static unsigned long ov3640_query_bus_param(struct soc_camera_device *icd)
{
unsigned long flags = SOCAM_PCLK_SAMPLE_FALLING |
SOCAM_HSYNC_ACTIVE_HIGH | SOCAM_VSYNC_ACTIVE_HIGH |
SOCAM_DATA_ACTIVE_HIGH | SOCAM_MASTER;
/* TODO: Do the right thing here, and validate bus params */
flags |= SOCAM_DATAWIDTH_10;
return flags;
}
static struct soc_camera_ops ov3640_ops = {
.set_bus_param = ov3640_set_bus_param,
.query_bus_param = ov3640_query_bus_param,
.controls = ov3640_controls,
.num_controls = ARRAY_SIZE(ov3640_controls),
};
/*
* ov3640_probe - sensor driver i2c probe handler
* @client: i2c driver client device structure
*
* Register sensor as an i2c client device and V4L2
* device.
*/
static int ov3640_probe(struct i2c_client *client,
const struct i2c_device_id *id)
{printk("---------------probe-------------");
struct ov3640 *ov3640;
struct soc_camera_device *icd = client->dev.platform_data;
struct soc_camera_link *icl;
int ret;
if (!icd) {
dev_err(&client->dev, "OV3640: missing soc-camera data!\n");
return -EINVAL;
}
icl = to_soc_camera_link(icd);
if (!icl) {
dev_err(&client->dev, "OV3640 driver needs platform data\n");
return -EINVAL;
}
if (!icl->priv) {
dev_err(&client->dev,
"OV3640 driver needs i/f platform data\n");
return -EINVAL;
}
ov3640 = kzalloc(sizeof(struct ov3640), GFP_KERNEL);
if (!ov3640)
return -ENOMEM;
v4l2_i2c_subdev_init(&ov3640->subdev, client, &ov3640_subdev_ops);
/* Second stage probe - when a capture adapter is there */
icd->ops = &ov3640_ops;
/* Set sensor default values */
ov3640->i_size = XGA;
ov3640->i_fmt = 0; /* First format in the list */
ov3640->timeperframe.numerator = 1;
ov3640->timeperframe.denominator = 15;
ov3640->plat_parms = icl->priv;
ret = ov3640_video_probe(icd, client);
if (ret) {
icd->ops = NULL;
kfree(ov3640);
}
return 0;
}
static int ov3640_remove(struct i2c_client *client)
{
struct ov3640 *ov3640 = to_ov3640(client);
struct soc_camera_device *icd = client->dev.platform_data;
icd->ops = NULL;
client->driver = NULL;
kfree(ov3640);
return 0;
}
static const struct i2c_device_id ov3640_id[] = {
{ OV3640_DRIVER_NAME, 0 },
{ },
};
MODULE_DEVICE_TABLE(i2c, ov3640_id);
static struct i2c_driver ov3640_i2c_driver = {
.driver = {
.name = OV3640_DRIVER_NAME,
},
.probe = ov3640_probe,
.remove = ov3640_remove,
.id_table = ov3640_id,
};
static int __init ov3640_mod_init(void)
{printk("---------mod init-------------");
return i2c_add_driver(&ov3640_i2c_driver);
}
static void __exit ov3640_mod_exit(void)
{
i2c_del_driver(&ov3640_i2c_driver);
}
module_init(ov3640_mod_init);
module_exit(ov3640_mod_exit);
MODULE_DESCRIPTION("OmniVision OV3640 Camera driver");
MODULE_AUTHOR("Sergio Aguirre <saaguirre@ti.com>");
MODULE_LICENSE("GPL v2");