s5pv210 cmos摄像头驱动(二)

在__video_do_ioctl函数中

struct video_device *vfd = video_devdata(file);

通过打开的节点获取相应的video设备

void *fh = file->private_data;

通过file->private_data获取保存有ctrl的结构体

该数据在fimc_open 函数中保存的

prv_data->ctrl = ctrl;
filp->private_data = prv_data;

switch (cmd){

...


...

}

中的每一项都是对应的v4l2控制命令,解析后,回调相应的方法

其中一个命令


case VIDIOC_QUERYCAP:
{
struct v4l2_capability *cap = (struct v4l2_capability *)arg;


if (!ops->vidioc_querycap)
break;


ret = ops->vidioc_querycap(file, fh, cap);
if (!ret)
dbgarg(cmd, "driver=%s, card=%s, bus=%s, "
"version=0x%08x, "
"capabilities=0x%08x\n",
cap->driver, cap->card, cap->bus_info,
cap->version,
cap->capabilities);
break;
}

case VIDIOC_S_FMT:
{
struct v4l2_format *f = (struct v4l2_format *)arg;


/* FIXME: Should be one dump per type */
dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names));


switch (f->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.pix);
v4l_print_pix_fmt(vfd, &f->fmt.pix);
if (ops->vidioc_s_fmt_vid_cap)
ret = ops->vidioc_s_fmt_vid_cap(file, fh, f);
break;
case V4L2_BUF_TYPE_VIDEO_OVERLAY:
CLEAR_AFTER_FIELD(f, fmt.win);
if (ops->vidioc_s_fmt_vid_overlay)
ret = ops->vidioc_s_fmt_vid_overlay(file,
   fh, f);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.pix);
v4l_print_pix_fmt(vfd, &f->fmt.pix);
if (ops->vidioc_s_fmt_vid_out)
ret = ops->vidioc_s_fmt_vid_out(file, fh, f);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
CLEAR_AFTER_FIELD(f, fmt.win);
if (ops->vidioc_s_fmt_vid_out_overlay)
ret = ops->vidioc_s_fmt_vid_out_overlay(file,
fh, f);
break;
case V4L2_BUF_TYPE_VBI_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.vbi);
if (ops->vidioc_s_fmt_vbi_cap)
ret = ops->vidioc_s_fmt_vbi_cap(file, fh, f);
break;
case V4L2_BUF_TYPE_VBI_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.vbi);
if (ops->vidioc_s_fmt_vbi_out)
ret = ops->vidioc_s_fmt_vbi_out(file, fh, f);
break;
case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.sliced);
if (ops->vidioc_s_fmt_sliced_vbi_cap)
ret = ops->vidioc_s_fmt_sliced_vbi_cap(file,
fh, f);
break;
case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.sliced);
if (ops->vidioc_s_fmt_sliced_vbi_out)
ret = ops->vidioc_s_fmt_sliced_vbi_out(file,
fh, f);
break;
case V4L2_BUF_TYPE_PRIVATE:
/* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */
if (ops->vidioc_s_fmt_type_private)
ret = ops->vidioc_s_fmt_type_private(file,
fh, f);
break;
}
break;
}
case VIDIOC_TRY_FMT:
{
struct v4l2_format *f = (struct v4l2_format *)arg;


/* FIXME: Should be one dump per type */
dbgarg(cmd, "type=%s\n", prt_names(f->type,
v4l2_type_names));
switch (f->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.pix);
if (ops->vidioc_try_fmt_vid_cap)
ret = ops->vidioc_try_fmt_vid_cap(file, fh, f);
if (!ret)
v4l_print_pix_fmt(vfd, &f->fmt.pix);
break;
case V4L2_BUF_TYPE_VIDEO_OVERLAY:
CLEAR_AFTER_FIELD(f, fmt.win);
if (ops->vidioc_try_fmt_vid_overlay)
ret = ops->vidioc_try_fmt_vid_overlay(file,
fh, f);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.pix);
if (ops->vidioc_try_fmt_vid_out)
ret = ops->vidioc_try_fmt_vid_out(file, fh, f);
if (!ret)
v4l_print_pix_fmt(vfd, &f->fmt.pix);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
CLEAR_AFTER_FIELD(f, fmt.win);
if (ops->vidioc_try_fmt_vid_out_overlay)
ret = ops->vidioc_try_fmt_vid_out_overlay(file,
      fh, f);
break;
case V4L2_BUF_TYPE_VBI_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.vbi);
if (ops->vidioc_try_fmt_vbi_cap)
ret = ops->vidioc_try_fmt_vbi_cap(file, fh, f);
break;
case V4L2_BUF_TYPE_VBI_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.vbi);
if (ops->vidioc_try_fmt_vbi_out)
ret = ops->vidioc_try_fmt_vbi_out(file, fh, f);
break;
case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:
CLEAR_AFTER_FIELD(f, fmt.sliced);
if (ops->vidioc_try_fmt_sliced_vbi_cap)
ret = ops->vidioc_try_fmt_sliced_vbi_cap(file,
fh, f);
break;
case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:
CLEAR_AFTER_FIELD(f, fmt.sliced);
if (ops->vidioc_try_fmt_sliced_vbi_out)
ret = ops->vidioc_try_fmt_sliced_vbi_out(file,
fh, f);
break;
case V4L2_BUF_TYPE_PRIVATE:
/* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */
if (ops->vidioc_try_fmt_type_private)
ret = ops->vidioc_try_fmt_type_private(file,
fh, f);
break;
}


break;

case VIDIOC_S_INPUT:
{
unsigned int *i = arg;


if (!ops->vidioc_s_input)
break;
dbgarg(cmd, "value=%d\n", *i);
ret = ops->vidioc_s_input(file, fh, *i);
break;
}

case VIDIOC_REQBUFS:
{
struct v4l2_requestbuffers *p = arg;


if (!ops->vidioc_reqbufs)
break;
ret = check_fmt(ops, p->type);
if (ret)
break;


if (p->type < V4L2_BUF_TYPE_PRIVATE)
CLEAR_AFTER_FIELD(p, memory);


ret = ops->vidioc_reqbufs(file, fh, p);
dbgarg(cmd, "count=%d, type=%s, memory=%s\n",
p->count,
prt_names(p->type, v4l2_type_names),
prt_names(p->memory, v4l2_memory_names));
break;
}

case VIDIOC_STREAMON:
{
enum v4l2_buf_type i = *(int *)arg;


if (!ops->vidioc_streamon)
break;
dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names));
ret = ops->vidioc_streamon(file, fh, i);
break;
}


最终都调用video的ioctl_ops

.ioctl_ops = &fimc_v4l2_ops,

const struct v4l2_ioctl_ops fimc_v4l2_ops = {
.vidioc_querycap = fimc_querycap,
.vidioc_reqbufs = fimc_reqbufs,
.vidioc_querybuf = fimc_querybuf,
.vidioc_g_ctrl = fimc_g_ctrl,
.vidioc_s_ctrl = fimc_s_ctrl,
.vidioc_s_ext_ctrls = fimc_s_ext_ctrls,
.vidioc_cropcap = fimc_cropcap,
.vidioc_g_crop = fimc_g_crop,
.vidioc_s_crop = fimc_s_crop,
.vidioc_streamon = fimc_streamon,
.vidioc_streamoff = fimc_streamoff,
.vidioc_qbuf = fimc_qbuf,
.vidioc_dqbuf = fimc_dqbuf,
.vidioc_enum_fmt_vid_cap = fimc_enum_fmt_vid_capture,
.vidioc_g_fmt_vid_cap = fimc_g_fmt_vid_capture,
.vidioc_s_fmt_vid_cap = fimc_s_fmt_vid_capture,
.vidioc_try_fmt_vid_cap = fimc_try_fmt_vid_capture,
.vidioc_enum_input = fimc_enum_input,
.vidioc_g_input = fimc_g_input,
.vidioc_s_input = fimc_s_input,
.vidioc_g_parm = fimc_g_parm,
.vidioc_s_parm = fimc_s_parm,
.vidioc_queryctrl = fimc_queryctrl,
.vidioc_querymenu = fimc_querymenu,
.vidioc_g_fmt_vid_out = fimc_g_fmt_vid_out,
.vidioc_s_fmt_vid_out = fimc_s_fmt_vid_out,
.vidioc_try_fmt_vid_out = fimc_try_fmt_vid_out,
.vidioc_g_fbuf = fimc_g_fbuf,
.vidioc_s_fbuf = fimc_s_fbuf,
.vidioc_try_fmt_vid_overlay = fimc_try_fmt_overlay,
.vidioc_g_fmt_vid_overlay = fimc_g_fmt_vid_overlay,
.vidioc_s_fmt_vid_overlay = fimc_s_fmt_vid_overlay,
};

在应用层调用使用查询摄像头支持的格式

ioctl(fd, VIDIOC_QUERYCAP, &cap))

相应驱动函数:

static int fimc_querycap(struct file *filp, void *fh,
struct v4l2_capability *cap)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;


fimc_info1("%s: called\n", __func__);


strcpy(cap->driver, "Samsung FIMC Driver");
strlcpy(cap->card, ctrl->vd->name, sizeof(cap->card));
sprintf(cap->bus_info, "FIMC AHB-bus");


cap->version = 0;
cap->capabilities = (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT |
V4L2_CAP_VIDEO_OVERLAY | V4L2_CAP_STREAMING);


return 0;
}

在应用层调用设置输入设备。该调用后,会注册i2c_subdev,最终会匹配相应的摄像头驱动,从而操作摄像头

struct v4l2_input input;
    input.index = 0;

ioctl(fd, VIDIOC_ENUMINPUT, &input)

相应驱动函数:

int fimc_s_input(struct file *file, void *fh, unsigned int i)
{
struct fimc_global *fimc = get_fimc_dev();
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;
int ret = 0;


fimc_dbg("%s: index %d\n", __func__, i);


if (i < 0 || i >= FIMC_MAXCAMS) {
fimc_err("%s: invalid input index\n", __func__);
return -EINVAL;
}


if (!fimc->camera_isvalid[i])
return -EINVAL;


if (fimc->camera[i].sd && ctrl->id != 2) {
fimc_err("%s: Camera already in use.\n", __func__);
return -EBUSY;
}


mutex_lock(&ctrl->v4l2_lock);
/* If ctrl->cam is not NULL, there is one subdev already registered.
* We need to unregister that subdev first.
*/
if (i != fimc->active_camera) {
fimc_release_subdev(ctrl);
ctrl->cam = &fimc->camera[i];
ret = fimc_configure_subdev(ctrl);
if (ret < 0) {
mutex_unlock(&ctrl->v4l2_lock);
fimc_err("%s: Could not register camera sensor "
"with V4L2.\n", __func__);
return -ENODEV;
}
fimc->active_camera = i;
}


if (ctrl->id == 2) {
if (i == fimc->active_camera) {
ctrl->cam = &fimc->camera[i];
} else {
mutex_unlock(&ctrl->v4l2_lock);
return -EINVAL;
}
}


mutex_unlock(&ctrl->v4l2_lock);


return 0;
}

ret = fimc_configure_subdev(ctrl)配置子设备,就是i2c子设备

static int fimc_configure_subdev(struct fimc_control *ctrl)
{
struct i2c_adapter *i2c_adap;
struct i2c_board_info *i2c_info;
struct v4l2_subdev *sd;
unsigned short addr;
char *name;


/* set parent for mclk */
if (clk_get_parent(ctrl->cam->clk->parent))
clk_set_parent(ctrl->cam->clk->parent, ctrl->cam->srclk);


/* set rate for mclk */
if (clk_get_rate(ctrl->cam->clk))
clk_set_rate(ctrl->cam->clk, ctrl->cam->clk_rate);


i2c_adap = i2c_get_adapter(ctrl->cam->i2c_busnum);
if (!i2c_adap)
fimc_err("subdev i2c_adapter missing-skip registration\n");


i2c_info = ctrl->cam->info;
if (!i2c_info) {
fimc_err("%s: subdev i2c board info missing\n", __func__);
return -ENODEV;
}


name = i2c_info->type;
if (!name) {
fimc_err("subdev i2c driver name missing-skip registration\n");
return -ENODEV;
}


addr = i2c_info->addr;
if (!addr) {
fimc_err("subdev i2c address missing-skip registration\n");
return -ENODEV;
}
/*
* NOTE: first time subdev being registered,
* s_config is called and try to initialize subdev device
* but in this point, we are not giving MCLK and power to subdev
* so nothing happens but pass platform data through
*/
sd = v4l2_i2c_new_subdev_board(&ctrl->v4l2_dev, i2c_adap,
name, i2c_info, &addr);
if (!sd) {
fimc_err("%s: v4l2 subdev board registering failed\n",
__func__);
}


/* Assign subdev to proper camera device pointer */
ctrl->cam->sd = sd;


return 0;
}

设置camera时钟和i2c 信息,最后调用sd = v4l2_i2c_new_subdev_board(&ctrl->v4l2_dev, i2c_adap,
name, i2c_info, &addr);将该i2c子设备注册到内核




/* Load an i2c sub-device. */
struct v4l2_subdev *v4l2_i2c_new_subdev_board(struct v4l2_device *v4l2_dev,
struct i2c_adapter *adapter, const char *module_name,
struct i2c_board_info *info, const unsigned short *probe_addrs)
{
struct v4l2_subdev *sd = NULL;
struct i2c_client *client;


BUG_ON(!v4l2_dev);


if (module_name)
request_module(module_name);


/* Create the i2c client */
if (info->addr == 0 && probe_addrs)
client = i2c_new_probed_device(adapter, info, probe_addrs);
else
client = i2c_new_device(adapter, info);


/* Note: by loading the module first we are certain that c->driver
  will be set if the driver was found. If the module was not loaded
  first, then the i2c core tries to delay-load the module for us,
  and then c->driver is still NULL until the module is finally
  loaded. This delay-load mechanism doesn't work if other drivers
  want to use the i2c device, so explicitly loading the module
  is the best alternative. */
if (client == NULL || client->driver == NULL)
goto error;


/* Lock the module so we can safely get the v4l2_subdev pointer */
if (!try_module_get(client->driver->driver.owner))
goto error;
sd = i2c_get_clientdata(client);


/* Register with the v4l2_device which increases the module's
  use count as well. */
if (v4l2_device_register_subdev(v4l2_dev, sd))
sd = NULL;
/* Decrease the module use count to match the first try_module_get. */
module_put(client->driver->driver.owner);


if (sd) {
/* We return errors from v4l2_subdev_call only if we have the
  callback as the .s_config is not mandatory */
int err = v4l2_subdev_call(sd, core, s_config,
info->irq, info->platform_data);


if (err && err != -ENOIOCTLCMD) {
v4l2_device_unregister_subdev(sd);
sd = NULL;
}
}


error:
/* If we have a client but no subdev, then something went wrong and
  we must unregister the client. */
if (client && sd == NULL)
i2c_unregister_device(client);
return sd;
}


分配并注册i2c  client信息



struct i2c_client *
i2c_new_device(struct i2c_adapter *adap, struct i2c_board_info const *info)
{
struct i2c_client *client;
int status;


client = kzalloc(sizeof *client, GFP_KERNEL);
if (!client)
return NULL;


client->adapter = adap;


client->dev.platform_data = info->platform_data;


if (info->archdata)
client->dev.archdata = *info->archdata;


client->flags = info->flags;
client->addr = info->addr;
client->irq = info->irq;


strlcpy(client->name, info->type, sizeof(client->name));


/* Check for address validity */
status = i2c_check_client_addr_validity(client);
if (status) {
dev_err(&adap->dev, "Invalid %d-bit I2C address 0x%02hx\n",
client->flags & I2C_CLIENT_TEN ? 10 : 7, client->addr);
goto out_err_silent;
}


/* Check for address business */
status = i2c_check_addr_busy(adap, client->addr);
if (status)
goto out_err;


client->dev.parent = &client->adapter->dev;
client->dev.bus = &i2c_bus_type;
client->dev.type = &i2c_client_type;
#ifdef CONFIG_OF
client->dev.of_node = info->of_node;
#endif


dev_set_name(&client->dev, "%d-%04x", i2c_adapter_id(adap),
    client->addr);
status = device_register(&client->dev);
if (status)
goto out_err;


dev_dbg(&adap->dev, "client [%s] registered with bus id %s\n",
client->name, dev_name(&client->dev));


return client;


out_err:
dev_err(&adap->dev, "Failed to register i2c client %s at 0x%02x "
"(%d)\n", client->name, client->addr, status);
out_err_silent:
kfree(client);
return NULL;
}

==================================

当这个函数执行后,摄像头驱动就可以匹配到了。以ov3649为例:

static int ov3640_probe(struct i2c_client *client,
const struct i2c_device_id *id)
{
struct ov3640_state *state;
struct v4l2_subdev *sd;


state = kzalloc(sizeof(struct ov3640_state), GFP_KERNEL);
if (state == NULL)
return -ENOMEM;


sd = &state->sd;
strcpy(sd->name, OV3640_DRIVER_NAME);


/* Registering subdev */
v4l2_i2c_subdev_init(sd, client, &ov3640_ops);


dev_info(&client->dev, "ov3640 has been probed\n");
return 0;
}

void v4l2_i2c_subdev_init(struct v4l2_subdev *sd, struct i2c_client *client,
const struct v4l2_subdev_ops *ops)
{
v4l2_subdev_init(sd, ops);
sd->flags |= V4L2_SUBDEV_FL_IS_I2C;
/* the owner is the same as the i2c_client's driver owner */
sd->owner = client->driver->driver.owner;
/* i2c_client and v4l2_subdev point to one another */
v4l2_set_subdevdata(sd, client);
i2c_set_clientdata(client, sd);
/* initialize name */
snprintf(sd->name, sizeof(sd->name), "%s %d-%04x",
client->driver->driver.name, i2c_adapter_id(client->adapter),
client->addr);
}

初始化v4l2子设备,并保存子设备到client中和保存client到子设备中

v4l2_set_subdevdata(sd, client);

i2c_set_clientdata(client, sd);

===========================

sd = i2c_get_clientdata(client);获取驱动设置好的sd

注册子设备:

int v4l2_device_register_subdev(struct v4l2_device *v4l2_dev,
struct v4l2_subdev *sd)
{
/* Check for valid input */
if (v4l2_dev == NULL || sd == NULL || !sd->name[0])
return -EINVAL;
/* Warn if we apparently re-register a subdev */
WARN_ON(sd->v4l2_dev != NULL);
if (!try_module_get(sd->owner))
return -ENODEV;
sd->v4l2_dev = v4l2_dev;
spin_lock(&v4l2_dev->lock);
list_add_tail(&sd->list, &v4l2_dev->subdevs);
spin_unlock(&v4l2_dev->lock);
return 0;
}

完成后,调用子设备的函数,即驱动里设置的方法

int err = v4l2_subdev_call(sd, core, s_config,
info->irq, info->platform_data);

#define v4l2_subdev_call(sd, o, f, args...) \
(!(sd) ? -ENODEV : (((sd)->ops->o && (sd)->ops->o->f) ? \
(sd)->ops->o->f((sd) , ##args) : -ENOIOCTLCMD))

该函数对应ov3640中:

=====================

static const struct v4l2_subdev_core_ops ov3640_core_ops = {
.init = ov3640_init, /* initializing API */
.s_config = ov3640_s_config, /* Fetch platform data */
.queryctrl = ov3640_queryctrl,
.querymenu = ov3640_querymenu,
.g_ctrl = ov3640_g_ctrl,
.s_ctrl = ov3640_s_ctrl,
};


static const struct v4l2_subdev_video_ops ov3640_video_ops = {
.g_fmt = ov3640_g_fmt,
.s_fmt = ov3640_s_fmt,
.enum_framesizes = ov3640_enum_framesizes,
.enum_frameintervals = ov3640_enum_frameintervals,
.enum_fmt = ov3640_enum_fmt,
.try_fmt = ov3640_try_fmt,
.g_parm = ov3640_g_parm,
.s_parm = ov3640_s_parm,
};


static const struct v4l2_subdev_ops ov3640_ops = {
.core = &ov3640_core_ops,
.video = &ov3640_video_ops,
};

static int ov3640_s_config(struct v4l2_subdev *sd, int irq, void *platform_data)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640_state *state = to_state(sd);
struct ov3640_platform_data *pdata;


dev_info(&client->dev, "fetching platform data\n");


pdata = client->dev.platform_data;


if (!pdata) {
dev_err(&client->dev, "%s: no platform data\n", __func__);
return -ENODEV;
}


/*
* Assign default format and resolution
* Use configured default information in platform data
* or without them, use default information in driver
*/
if (!(pdata->default_width && pdata->default_height)) {
/* TODO: assign driver default resolution */
} else {
state->pix.width = pdata->default_width;
state->pix.height = pdata->default_height;
}


if (!pdata->pixelformat)
state->pix.pixelformat = DEFAULT_FMT;
else
state->pix.pixelformat = pdata->pixelformat;


if (!pdata->freq)
state->freq = 24000000; /* 24MHz default */
else
state->freq = pdata->freq;


if (!pdata->is_mipi) {
state->is_mipi = 0;
dev_info(&client->dev, "parallel mode\n");
} else
state->is_mipi = pdata->is_mipi;


return 0;
}

设置摄像头的基本信息

=====================

在应用层调用 设置格式

   fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width       = width;
    fmt.fmt.pix.height      = height;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;

xioctl(fd, VIDIOC_S_FMT, &fmt)

相应驱动函数:

int fimc_s_fmt_vid_capture(struct file *file, void *fh, struct v4l2_format *f)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;
struct fimc_capinfo *cap;
int ret = 0;
int depth;


fimc_dbg("%s\n", __func__);


if (!ctrl->cam || !ctrl->cam->sd) {
fimc_err("%s: No capture device.\n", __func__);
return -ENODEV;
}
/*
* The first time alloc for struct cap_info, and will be
* released at the file close.
* Anyone has better idea to do this?
*/
mutex_lock(&ctrl->v4l2_lock);


if (!ctrl->cap) {
ctrl->cap = kmalloc(sizeof(*cap), GFP_KERNEL);
if (!ctrl->cap) {
mutex_unlock(&ctrl->v4l2_lock);
fimc_err("%s: no memory for "
"capture device info\n", __func__);
return -ENOMEM;
}


}
cap = ctrl->cap;
memset(cap, 0, sizeof(*cap));
memcpy(&cap->fmt, &f->fmt.pix, sizeof(cap->fmt));


/*
* Note that expecting format only can be with
* available output format from FIMC
* Following items should be handled in driver
* bytesperline = width * depth / 8
* sizeimage = bytesperline * height
*/
/* This function may return 0 or -1 in case of error, hence need to
* check here.
*/
depth = fimc_fmt_depth(ctrl, f);
if (depth == 0) {
mutex_unlock(&ctrl->v4l2_lock);
fimc_err("%s: Invalid pixel format\n", __func__);
return -EINVAL;
} else if (depth < 0) {
/*
* When the pixelformat is JPEG, the application is requesting
* for data in JPEG compressed format.
*/
ret = subdev_call(ctrl, video, try_fmt, f);
if (ret < 0) {
mutex_unlock(&ctrl->v4l2_lock);
return -EINVAL;
}
cap->fmt.colorspace = V4L2_COLORSPACE_JPEG;
} else {
cap->fmt.bytesperline = (cap->fmt.width * depth) >> 3;
cap->fmt.sizeimage = (cap->fmt.bytesperline * cap->fmt.height);
}


if (cap->fmt.colorspace == V4L2_COLORSPACE_JPEG) {
ctrl->sc.bypass = 1;
cap->lastirq = 1;
}


if (ctrl->id != 2)
ret = subdev_call(ctrl, video, s_fmt, f);


mutex_unlock(&ctrl->v4l2_lock);


return ret;
}


为ctrl->cap分配空间并赋值,为后面申请buffer做准备

在应用层调用请求buffer

   struct v4l2_requestbuffers req;


    CLEAR (req);


    req.count               = 4;
    req.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory              = V4L2_MEMORY_MMAP;

相应驱动函数:

static int fimc_reqbufs(struct file *filp, void *fh,
struct v4l2_requestbuffers *b)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;
int ret = -1;


if (b->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
ret = fimc_reqbufs_capture(fh, b);
} else if (b->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
ret = fimc_reqbufs_output(fh, b);
} else {
fimc_err("V4L2_BUF_TYPE_VIDEO_CAPTURE and "
"V4L2_BUF_TYPE_VIDEO_OUTPUT are only supported\n");
ret = -EINVAL;
}


return ret;
}


int fimc_querybuf_capture(void *fh, struct v4l2_buffer *b)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;


if (!ctrl->cap || !ctrl->cap->bufs) {
fimc_err("%s: no capture device info\n", __func__);
return -ENODEV;
}


if (ctrl->status != FIMC_STREAMOFF) {
fimc_err("fimc is running\n");
return -EBUSY;
}


mutex_lock(&ctrl->v4l2_lock);


b->length = ctrl->cap->bufs[b->index].length[FIMC_ADDR_Y]
+ ctrl->cap->bufs[b->index].length[FIMC_ADDR_CB]
+ ctrl->cap->bufs[b->index].length[FIMC_ADDR_CR];


b->m.offset = b->index * PAGE_SIZE;


ctrl->cap->bufs[b->index].state = VIDEOBUF_IDLE;


mutex_unlock(&ctrl->v4l2_lock);


fimc_dbg("%s: %d bytes at index: %d\n", __func__, b->length, b->index);


return 0;
}

应用层调用ioctl(fd, VIDIOC_STREAMON, &type)开始采集

对应底层驱动:

static int fimc_streamon(struct file *filp, void *fh, enum v4l2_buf_type i)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;
struct s3c_platform_fimc *pdata;
int ret = -1;


pdata = to_fimc_plat(ctrl->dev);


if (i == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
ret = fimc_streamon_capture(fh);
} else if (i == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
ret = fimc_streamon_output(fh);
} else {
fimc_err("V4L2_BUF_TYPE_VIDEO_CAPTURE and "
"V4L2_BUF_TYPE_VIDEO_OUTPUT are only supported\n");
ret = -EINVAL;
}


return ret;
}
 int fimc_streamon_capture(void *fh)
{
struct fimc_control *ctrl = ((struct fimc_prv_data *)fh)->ctrl;
struct fimc_capinfo *cap = ctrl->cap;
int rot;
int ret;


fimc_dbg("%s\n", __func__);


if (!ctrl->cam || !ctrl->cam->sd) {
fimc_err("%s: No capture device.\n", __func__);
return -ENODEV;
}


if (ctrl->status == FIMC_STREAMON) {
fimc_err("%s: Camera already running.\n", __func__);
return -EBUSY;
}


mutex_lock(&ctrl->v4l2_lock);


if (0 != ctrl->id)
fimc_clk_en(ctrl, true);


ctrl->status = FIMC_READY_ON;
cap->irq = 0;


fimc_hwset_enable_irq(ctrl, 0, 1);


if (!ctrl->cam->initialized)
fimc_camera_init(ctrl);


if (ctrl->id != 2 &&
ctrl->cap->fmt.colorspace != V4L2_COLORSPACE_JPEG) {
ret = fimc_camera_start(ctrl);
if (ret < 0) {
fimc_reset_capture(ctrl);
mutex_unlock(&ctrl->v4l2_lock);
return ret;
}
}


fimc_hwset_camera_type(ctrl);
fimc_hwset_camera_polarity(ctrl);
fimc_update_hwaddr(ctrl);


if (cap->fmt.pixelformat != V4L2_PIX_FMT_JPEG) {
fimc_hwset_camera_source(ctrl);
fimc_hwset_camera_offset(ctrl);


fimc_capture_scaler_info(ctrl);
fimc_hwset_prescaler(ctrl, &ctrl->sc);
fimc_hwset_scaler(ctrl, &ctrl->sc);


fimc_hwset_output_colorspace(ctrl, cap->fmt.pixelformat);
fimc_hwset_output_addr_style(ctrl, cap->fmt.pixelformat);
fimc_hwset_output_area(ctrl, cap->fmt.width, cap->fmt.height);


if (cap->fmt.pixelformat == V4L2_PIX_FMT_RGB32 ||
cap->fmt.pixelformat == V4L2_PIX_FMT_RGB565)
fimc_hwset_output_rgb(ctrl, cap->fmt.pixelformat);
else
fimc_hwset_output_yuv(ctrl, cap->fmt.pixelformat);


fimc_hwset_output_size(ctrl, cap->fmt.width, cap->fmt.height);


fimc_hwset_output_scan(ctrl, &cap->fmt);
fimc_hwset_output_rot_flip(ctrl, cap->rotate, cap->flip);
rot = fimc_mapping_rot_flip(cap->rotate, cap->flip);


if (rot & FIMC_ROT) {
fimc_hwset_org_output_size(ctrl, cap->fmt.height,
cap->fmt.width);
} else {
fimc_hwset_org_output_size(ctrl, cap->fmt.width,
cap->fmt.height);
}
fimc_hwset_jpeg_mode(ctrl, false);
} else {
fimc_hwset_output_area_size(ctrl, \
fimc_camera_get_jpeg_memsize(ctrl)/2);
fimc_hwset_jpeg_mode(ctrl, true);
}


if (ctrl->cap->fmt.colorspace == V4L2_COLORSPACE_JPEG)
fimc_hwset_scaler_bypass(ctrl);


fimc_start_capture(ctrl);


if (ctrl->cap->fmt.colorspace == V4L2_COLORSPACE_JPEG &&
ctrl->id != 2) {
struct v4l2_control cam_ctrl;


cam_ctrl.id = V4L2_CID_CAM_CAPTURE;
ret = subdev_call(ctrl, core, s_ctrl, &cam_ctrl);
if (ret < 0 && ret != -ENOIOCTLCMD) {
fimc_reset_capture(ctrl);
mutex_unlock(&ctrl->v4l2_lock);
fimc_err("%s: Error in V4L2_CID_CAM_CAPTURE\n", \
__func__);
return -EPERM;
}
}


ctrl->status = FIMC_STREAMON;


mutex_unlock(&ctrl->v4l2_lock);


return 0;
}

static int fimc_camera_init(struct fimc_control *ctrl)
{
int ret;


fimc_dbg("%s\n", __func__);


/* do nothing if already initialized */
if (ctrl->cam->initialized)
return 0;


/* enable camera power if needed */
if (ctrl->cam->cam_power)
ctrl->cam->cam_power(1);


/* subdev call for init */
ret = subdev_call(ctrl, core, init, 0);
if (ret == -ENOIOCTLCMD) {
fimc_err("%s: init subdev api not supported\n",
__func__);
return ret;
}

if (ctrl->cam->type == CAM_TYPE_MIPI) {
/* subdev call for sleep/wakeup:
* no error although no s_stream api support
*/
u32 pixelformat;
if (ctrl->cap->fmt.pixelformat == V4L2_PIX_FMT_JPEG)
pixelformat = V4L2_PIX_FMT_JPEG;
else
pixelformat = ctrl->cam->pixelformat;


subdev_call(ctrl, video, s_stream, 0);
s3c_csis_start(ctrl->cam->mipi_lanes, ctrl->cam->mipi_settle, \
ctrl->cam->mipi_align, ctrl->cam->width, \
ctrl->cam->height, pixelformat);
subdev_call(ctrl, video, s_stream, 1);
}


ctrl->cam->initialized = 1;


return 0;
}

===============================

调用ov3640中的init

static int ov3640_init(struct v4l2_subdev *sd, u32 val)
{
struct i2c_client *client = v4l2_get_subdevdata(sd);
struct ov3640_state *state = to_state(sd);
int err = -EINVAL, i;


v4l_info(client, "%s: camera initialization start\n", __func__);


for (i = 0; i < OV3640_INIT_REGS; i++) {
err = ov3640_i2c_write(sd, ov3640_init_reg[i],
sizeof(ov3640_init_reg[i]));
if (err < 0)
v4l_info(client, "%s: register set failed\n", __func__);
}


if (err < 0) {
/* This is preview fail */
state->check_previewdata = 100;
v4l_err(client,
"%s: camera initialization failed. err(%d)\n",
__func__, state->check_previewdata);
return -EIO;
}


/* This is preview success */
state->check_previewdata = 0;
return 0;
}

初始化摄像头

===========================

类推,所有v4l2命令,都调用到fimc_ioctrl,然后再调用subdev_call来调j驱动例juti 方法

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值