html5用什么代替framtset,简单的摄像头自写驱动程序总结

框架:linux

USB总线驱动程序的做用数组

1.识别USB设备缓存

1.1分配地址框架

1.2并告诉USB设备(setaddress)异步

1.3发出命令获取描述符ide

2.查找并安装对应的设备驱动程序函数

3.提供USB读写函数测试

usb总线驱动程序在咱们接入USB设备的时候会帮咱们构造一个新的usb_device.注册到总线里面来。this

本驱动程序总共须要构造spa

1个usb驱动结构体:usb_driver  /*用于操做接入的USB设备*/

2个设备结构体:usb_device /*存储识别后的USB的设备信息*/

video_device /*存储驱动程序对video_device设置和操做函数*/

USB摄像头插入以后,首先是做为一个USB设备被内核和驱动程序识别,USB总线驱动程序会查找到对应的设备驱动程序并调用驱动程序。驱动程序中的usb_drive结构体中的.id_table被调用,识别是否支持该USB设备,若是支持usb_drive结构体中的.probe被调用,驱动程序执行相关的USB设备操做,包括将识别的USB设备信息存放在usb_device结构体中,而后,驱动程序再设置该USB设备的“子”类型是video型设备,并将设置信息存储在video_device结构体中,而后在进行video设备操做。应用程序对USB摄像头设备的函数操做都会经过系统调用转化为对video_device结构体中的成员函数调用

所以本驱动程序应该包含三块内容:1、USB设备的操做;2、video设备的操做;3、USB设备与video设备之间的数据传输???

1、首先是USB设备的操做

一、构造USB设备结构体usb_driver,用于存储识别的usb设备信息和对此USB设备的操做函数,主要包括三个:

. id_table:插入设备时,内核程序会调用结构体中的.id_table,识别该USB设是否支持。

.probe:若是支持该设备类型内核程序会自动调用.probe函数执行具体对该USB设备的操做。

. disconnect:拔出USB设备

static struct video_device *myuvc_udev;

二、设置usb_driver结构体

staticstruct usb_driver myuvc_driver = {

.name                             ="myuvc",

.probe                             = myuvc_probe,   /*插入设备后,判断是否支持该设备*/

.disconnect           = myuvc_disconnect,

.id_table               = myuvc_ids,

};

三、注册/卸载usb_driver

static int myuvc_init(void)

{

usb_register(&myuvc_driver);

return0;

}

staticvoid myuvc_exit(void)

{

usb_deregister(&myuvc_driver);

}

四、修饰

module_init(myuvc_init);

module_exit(myuvc_exit);

MODULE_LICENSE("GPL");

以上是USB设备的总体框架

五、编写usb_driver成员函数:. id_table,只支持一种USB设备类型

staticstruct usb_device_id myuvc_ids[] = {

/* Generic USB Video Class */

/*对于VCI和VSI,.prboe都会被调用*/

{ USB_INTERFACE_INFO(USB_CLASS_VIDEO,1, 0) },  /* VCI */

{USB_INTERFACE_INFO(USB_CLASS_VIDEO, 2, 0) }, /* VSI */

{}

};

识别USB设备后调用.probe函数,编写.probe函数代码

structusb_device *myuvc_udev;        /*定义全局变量myuvc_udev*/

staticstruct usb_device *myuvc_vdev;  /*定义全局变量myuvc_vdev*/

staticint myuvc_probe(struct usb_interface *intf, const struct usb_device_id *id)

{

staticint cnt = 0;

printk("myuvc_probe: cnt = %d\n", cnt++);

if(cnt == 2){  /* VSI调用时*/

/*从接口得到USB设备信息,并赋给myuvc_udev*/

structusb_device *dev = interface_to_usbdev(intf);

myuvc_udev  =  dev;

/*申请video_device结构体内存myuvc_vdev*/

myuvc_vdev= video_device_alloc();

/*2. 设置video_device ,APP对USB摄像头的操做都会经过系统调用转

换为执行video_device结构体成员函数的操做*/

myuvc_vdev->release    = myuvc_release;

myuvc_vdev->fops        = &myuvc_fops;

myuvc_vdev->ioctl_ops          = &myuvc_ioctl_ops;

/*3. 注册video_device */

video_register_device(myuvc_vdev,VFL_TYPE_GRABBER, -1)

}

return0;

}

编写 . disconnect

staticvoid myuvc_disconnect(struct usb_interface *intf)

{

static int cnt = 0;

printk("myuvc_disconnect : cnt =%d\n", cnt++);

if (cnt == 2)

{

video_unregister_device(myuvc_vdev);

video_device_release(myuvc_vdev);

}

}

7. 编写video_device的成员函数,用于对video设备的操做

myuvc_release:/*释放*/

staticvoid myuvc_release(struct video_device *vdev)

{

}

myuvc_ioctl_ops:ioctl是设备驱动程序中对设备I/O通道进行管理的函数,用于向设备发控制和配置命令。包括:数据格式、设置内存、    启动/中止数据传输。

staticint myuvc_vidioc_querycap(struct file *file, void  *priv, struct v4l2_capability *cap) {

cap->capabilities =        V4L2_CAP_VIDEO_CAPTURE |  /*视频捕捉设备*/

V4L2_CAP_STREAMING;   /*获取视频数据的方式ioctl*/

return 0;

}

staticint myuvc_vidioc_enum_fmt_vid_cap(struct file *file, void  *priv,

structv4l2_fmtdesc *f)

{

return 0;

}

staticint myuvc_vidioc_g_fmt_vid_cap(struct file *file, void *priv, structv4l2_format *f)

{

return (0);

}

staticint myuvc_vidioc_try_fmt_vid_cap(struct file *file, void *priv,

structv4l2_format *f)

{

return 0;

}

staticint myuvc_vidioc_s_fmt_vid_cap(struct file *file, void *priv,   /*设置格式*/

structv4l2_format *f)

{

return 0;

}

staticint myuvc_vidioc_reqbufs(struct file *file, void *priv,

struct v4l2_requestbuffers *p)

{

return 0;

}

staticint  myuvc_vidioc_querybuf(struct file*file, void *priv, struct v4l2_buffer *p)

{

return 0;

}

staticint  myuvc_vidioc_qbuf(struct file *file,void *priv, struct v4l2_buffer *p)

{

return 0;

}

staticint myuvc_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i)

{

return 0;

}

staticint myuvc_vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer *p)

{

return 0;

}

staticint myuvc_vidioc_streamoff(struct file *file, void *priv, enum v4l2_buf_type i)

{

return 0;

}

/*APP调用ioctl(cmd)函数时,经过系统调用执行video_device中的video_ioctl2,根据cmd内核程序会转换为执行myuvc_ioctl_ops结构体中的某成员函数*/

staticconst struct v4l2_ioctl_ops myuvc_ioctl_ops = {

// 表示它是一个摄像头设备

.vidioc_querycap      = myuvc_vidioc_querycap,

/* 用于列举、得到、测试、设置摄像头的数据的格式*/

.vidioc_enum_fmt_vid_cap  = myuvc_vidioc_enum_fmt_vid_cap,

.vidioc_g_fmt_vid_cap         = myuvc_vidioc_g_fmt_vid_cap,

.vidioc_try_fmt_vid_cap      = myuvc_vidioc_try_fmt_vid_cap,

.vidioc_s_fmt_vid_cap         = myuvc_vidioc_s_fmt_vid_cap,

/* 缓冲区操做:申请/查询/放入队列/取出队列*/

.vidioc_reqbufs        = myuvc_vidioc_reqbufs,

.vidioc_querybuf      = myuvc_vidioc_querybuf,

.vidioc_qbuf             = myuvc_vidioc_qbuf,

.vidioc_dqbuf           = myuvc_vidioc_dqbuf,

// 启动/中止

.vidioc_streamon      = myuvc_vidioc_streamon,

.vidioc_streamoff     = myuvc_vidioc_streamoff,

};

myuvc_fops:暂时用不到的函数都写成空函数

/*APP调用open函数打开设备文件,系统调用调用此函数*/

staticint myuvc_open(struct file *file)

{

return 0;

}

staticint myuvc_close(struct file *file)

{

return 0;

}

staticint myuvc_mmap(struct file *file, struct vm_area_struct *vma)

{

return0;

}

staticunsigned int myuvc_poll(struct file *file, struct poll_table_struct *wait)

{

return0;

}

staticconst struct v4l2_file_operations myuvc_fops = {

.owner        = THIS_MODULE,

.open          = myuvc_open,

.release        =myuvc_close,

.mmap       =myuvc_mmap,

.ioctl           =video_ioctl2, /* V4L2 ioctl handler */

.poll            =myuvc_poll,

};

八、编写myuvc_ioctl_ops的成员函数

structframe_desc {

int width;

int height;

};

staticstruct v4l2_format         myuvc_format;  /*用于保存本驱动程序支持的数据格式*/

staticstruct frame_desc frames[] = {{640, 480}, {352, 288}, {320, 240}, {176, 144},{160, 120}};  /*本驱动支持的像素格式*/

staticint frame_idx = 1;  /*本驱动像素默认值是{640,480}*/

/*A2 参考 uvc_v4l2_do_ioctl */

staticint myuvc_vidioc_querycap(struct file *file, void  *priv, struct v4l2_capability *cap)

{

memset(cap,0, sizeof *cap);/*将传入的cap所有清零,而后从新设置cap*/

strcpy(cap->driver,"myuvc");

strcpy(cap->card,"myuvc");

cap->version= 1;

cap->capabilities= V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING;

return 0;

}

/*A3 列举本摄像头支持哪一种数据格式:参考: uvc_fmts 数组*/

staticint myuvc_vidioc_enum_fmt_vid_cap(struct file *file, void  *priv,

structv4l2_fmtdesc *f)

{

/* 本驱动程序只支持一种数据格式*/

if (f->index >= 1)

return -EINVAL;

strcpy(f->description, "4:2:2,packed, YUYV");

f->pixelformat =V4L2_PIX_FMT_YUYV;

return 0;

}

/*A4 返回当前所使用的格式*/

staticint myuvc_vidioc_g_fmt_vid_cap(struct file *file, void *priv,

structv4l2_format *f)

{

memcpy(f, &myuvc_format,sizeof(myuvc_format));

return (0);

}

/*A5 测试驱动程序是否支持某种格式,强制设置为一种格式*/

staticint myuvc_vidioc_try_fmt_vid_cap(struct file *file, void *priv,

struct v4l2_format*f)

{

if(f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)

return -EINVAL;

if(f->fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV)

return-EINVAL;

/*人工查看描述符, 肯定支持哪几种分辨率*/

f->fmt.pix.width  = frames[frame_idx].width;

f->fmt.pix.height= frames[frame_idx].height;

f->fmt.pix.bytesperline = (f->fmt.pix.width* bBitsPerPixel) >> 3;

f->fmt.pix.sizeimage = f->fmt.pix.height* f->fmt.pix.bytesperline;

return 0;

}

/*A6 参考myvivi_vidioc_s_fmt_vid_cap */

staticint myuvc_vidioc_s_fmt_vid_cap(struct file *file, void *priv,

structv4l2_format *f)

{

int ret =myuvc_vidioc_try_fmt_vid_cap(file, NULL, f);

if (ret < 0)

return ret;

memcpy(&myuvc_format, f,sizeof(myuvc_format));

return 0;

}

staticint myuvc_free_buffers(void)

{

if (myuvc_queue.mem)

{

vfree(myuvc_queue.mem);

memset(&myuvc_queue, 0,sizeof(myuvc_queue));

myuvc_queue.mem = NULL;

}

return 0;

}

/*参考uvc_video_queue定义一些结构体*/

structmyuvc_buffer {

structv4l2_buffer buf;

intstate;

intvma_use_count; /* 表示是否已经被mmap */

wait_queue_head_twait;  /* APP要读某个缓冲区,若是无数据,在此休眠*/

struct list_head stream;

struct list_head irq;

};

structmyuvc_queue {

void*mem;

intcount;

intbuf_size;

structmyuvc_buffer buffer[32];

structlist_head mainqueue;   /* 供APP消费用*/

structlist_head irqqueue;    /* 供底层驱动生产用*/

};

staticstruct myuvc_queue myuvc_queue;

/*A7分配若干个缓存, APP将从这些缓存中读到视频数据

* 参考:uvc_alloc_buffers*/

staticint myuvc_vidioc_reqbufs(struct file *file, void *priv,

struct v4l2_requestbuffers *p)

{

int nbuffers = p->count;

int bufsize = PAGE_ALIGN(myuvc_format.fmt.pix.sizeimage);

unsigned int i;

void *mem = NULL;

int ret;

if ((ret = myuvc_free_buffers()) < 0)

goto done;

/* Bail out if no buffers should beallocated. */

if (nbuffers == 0)

goto done;

/* Decrement the number of buffers untilallocation succeeds. */

for (; nbuffers > 0; --nbuffers) {

mem = vmalloc_32(nbuffers * bufsize);

if (mem != NULL)

break;

}

if (mem == NULL) {

ret = -ENOMEM;

goto done;

}

/* 这些缓存是一次性做为一个总体来分配的*/

memset(&myuvc_queue, 0, sizeof(myuvc_queue));

INIT_LIST_HEAD(&myuvc_queue.mainqueue);

INIT_LIST_HEAD(&myuvc_queue.irqqueue);

for (i = 0; i < nbuffers; ++i) {

myuvc_queue.buffer[i].buf.index = i;

myuvc_queue.buffer[i].buf.m.offset = i* bufsize;

myuvc_queue.buffer[i].buf.length =myuvc_format.fmt.pix.sizeimage;

myuvc_queue.buffer[i].buf.type =V4L2_BUF_TYPE_VIDEO_CAPTURE;

myuvc_queue.buffer[i].buf.sequence = 0;

myuvc_queue.buffer[i].buf.field =V4L2_FIELD_NONE;

myuvc_queue.buffer[i].buf.memory =V4L2_MEMORY_MMAP;

myuvc_queue.buffer[i].buf.flags = 0;

myuvc_queue.buffer[i].state     = VIDEOBUF_IDLE;

init_waitqueue_head(&myuvc_queue.buffer[i].wait);

}

myuvc_queue.mem = mem;

myuvc_queue.count = nbuffers;

myuvc_queue.buf_size = bufsize;

ret = nbuffers;

done:

returnret;

}

/*A8 查询缓存状态, 好比地址信息(APP能够用mmap进行映射)

参考uvc_query_buffe */

staticint myuvc_vidioc_querybuf(struct file *file, void *priv,

structv4l2_buffer *v4l2_buf)

{

int ret = 0;

if (v4l2_buf->index >=myuvc_queue.count) {

ret = -EINVAL;

goto done;

}

memcpy(v4l2_buf,&myuvc_queue.buffer[v4l2_buf->index].buf, sizeof(*v4l2_buf));

/* 更新flags*/

if(myuvc_queue.buffer[v4l2_buf->index].vma_use_count)

v4l2_buf->flags |=V4L2_BUF_FLAG_MAPPED;

switch(myuvc_queue.buffer[v4l2_buf->index].state) {

caseVIDEOBUF_ERROR:

caseVIDEOBUF_DONE:

v4l2_buf->flags|= V4L2_BUF_FLAG_DONE;

break;

caseVIDEOBUF_QUEUED:

caseVIDEOBUF_ACTIVE:

v4l2_buf->flags|= V4L2_BUF_FLAG_QUEUED;

break;

caseVIDEOBUF_IDLE:

default:

break;

}

done:

return ret;

}

/*A10 把缓冲区放入队列, 底层的硬件操做函数将会把数据放入这个队列的缓存

* 参考:uvc_queue_buffer*/

staticint myuvc_vidioc_qbuf(struct file *file, void *priv, struct v4l2_buffer*v4l2_buf)

{

struct myuvc_buffer *buf;

int ret;

/* 0. APP传入的v4l2_buf可能有问题,要作判断 */

if (v4l2_buf->type !=V4L2_BUF_TYPE_VIDEO_CAPTURE ||

v4l2_buf->memory != V4L2_MEMORY_MMAP) {

return -EINVAL;

}

if (v4l2_buf->index >=myuvc_queue.count) {

return -EINVAL;

}

buf =&myuvc_queue.buffer[v4l2_buf->index];

if (buf->state != VIDEOBUF_IDLE) {

return -EINVAL;

}

/* 1. 修改状态*/

buf->state = VIDEOBUF_QUEUED;

buf->buf.bytesused = 0;

/* 2. 放入2个队列*/

/* 队列1:供APP使用

* 当缓冲区没有数据时,放入mainqueue队列

* 当缓冲区有数据时,APP从mainqueue队列中取出

*/

list_add_tail(&buf->stream,&myuvc_queue.mainqueue);

/* 队列2:供产生数据的函数使用

* 当采集到数据时,从irqqueue队列中取出第1个缓冲区,存入数据

*/

list_add_tail(&buf->irq,&myuvc_queue.irqqueue);

return 0;

}

/*A11 启动传输

* 参考:uvc_video_enable(video, 1): uvc_commit_video /uvc_init_video*/

staticint myuvc_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i)

{

/* 1. 向USB摄像头设置参数*/

/* 2. 分配设置URB*/

/* 3. 提交URB以接收数据*/

return0;

}

/*A13 APP经过poll/select肯定有数据后,把缓存从队列中取出来

* 参考:uvc_dequeue_buffer*/

staticint myuvc_vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer*v4l2_buf)

{

/* APP发现数据就绪后,从mainqueue里取出这个buffer*/

struct myuvc_buffer *buf;

int ret = 0;

if(list_empty(&myuvc_queue.mainqueue)) {

ret = -EINVAL;

goto done;

}

buf =list_first_entry(&myuvc_queue.mainqueue, struct myuvc_buffer, stream);

switch (buf->state) {

case VIDEOBUF_ERROR:

ret = -EIO;

case VIDEOBUF_DONE:

buf->state =VIDEOBUF_IDLE;

break;

case VIDEOBUF_IDLE:

case VIDEOBUF_QUEUED:

case VIDEOBUF_ACTIVE:

default:

ret = -EINVAL;

goto done;

}

list_del(&buf->stream);

done:

return ret;

}

/*A13 APP经过poll/select肯定有数据后,把缓存从队列中取出来

* 参考:uvc_dequeue_buffer */

staticint myuvc_vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer*v4l2_buf)

{

/*APP发现数据就绪后, 从mainqueue里取出这个buffer*/

structmyuvc_buffer *buf;

intret = 0;

if(list_empty(&myuvc_queue.mainqueue)) {

ret = -EINVAL;

goto done;

}

buf =list_first_entry(&myuvc_queue.mainqueue, struct myuvc_buffer, stream);

switch (buf->state) {

case VIDEOBUF_ERROR:

ret = -EIO;

case VIDEOBUF_DONE:

buf->state =VIDEOBUF_IDLE;

break;

case VIDEOBUF_IDLE:

case VIDEOBUF_QUEUED:

case VIDEOBUF_ACTIVE:

default:

ret = -EINVAL;

goto done;

}

list_del(&buf->stream);

done:

return ret;

}

/*A17 中止

* 参考: uvc_video_enable(video, 0) */

staticint myuvc_vidioc_streamoff(struct file *file, void *priv, enum v4l2_buf_type t)

{

/* 1. kill URB */

/* 2. free URB */

/* 3. 设置VideoStreamingInterface为setting 0 */

usb_set_interface(myuvc_udev,myuvc_streaming_intf, 0);

return 0;

}

九、编写数据传输函数myuvc_vidioc_streamon

9.1设置USB摄像头参数:好比使用哪一个format,format下的哪个framt,设置USB使用的接口和altsetting等

/*uvc_streaming_control结构体描述摄像头须要设置的参数种类*/

structmyuvc_streaming_control {

__u16 bmHint;

__u8 bFormatIndex; /* Video format index from a Formatdescriptor for this video interface. */

__u8 bFrameIndex; /* Video frame index from a framedescriptor */

__u32 dwFrameInterval;

__u16 wKeyFrameRate;

__u16 wPFrameRate;

__u16 wCompQuality;

__u16 wCompWindowSize;

__u16 wDelay;

__u32 dwMaxVideoFrameSize;

__u32 dwMaxPayloadTransferSize;

__u32 dwClockFrequency;

__u8 bmFramingInfo;

__u8 bPreferedVersion;

__u8 bMinVersion;

__u8 bMaxVersion;

};

staticint      myuvc_streaming_intf; /*VSI接口下当前设置使用的接口索引值,在.probe中初始化*/

staticint      myuvc_control_intf; /*/*VCI接口下当前设置使用的接口索引值*/*/

/*参考: uvc_v4l2_try_format/uvc_probe_video/uvc_set_video_ctrl(video, probe, 1) */

staticint myuvc_try_streaming_params(struct myuvc_streaming_control *ctrl)

{

__u8 *data; /*存储USB摄像头设置参数的数据*/

__u16 size; /*数据大小*/

intret;

__u8 type = USB_TYPE_CLASS|USB_RECIP_INTERFACE;/*USB TYPE*/

unsigned int pipe; /*USB设备的通讯通道*/

memset(ctrl, 0, sizeof *ctrl);  /*将全部摄像头参数设置为0*/

ctrl->bmHint = 1; /* dwFrameInterval */

ctrl->bFormatIndex = 1;

ctrl->bFrameIndex  = frame_idx + 1;

ctrl->dwFrameInterval = 333333;

size= uvc_version >= 0x0110 ? 34 : 26;

/*申请内存*/

data = kzalloc(size, GFP_KERNEL);

if(data == NULL)

return-ENOMEM;

/*将设置的摄像头参数保存到数据包data中,用于发送*/

*(__le16 *)&data[0] =cpu_to_le16(ctrl->bmHint);/* CPU数据格式转为小端格式*/

data[2] = ctrl->bFormatIndex;

data[3] = ctrl->bFrameIndex;

*(__le32 *)&data[4] =cpu_to_le32(ctrl->dwFrameInterval);

*(__le16 *)&data[8] =cpu_to_le16(ctrl->wKeyFrameRate);

*(__le16 *)&data[10] =cpu_to_le16(ctrl->wPFrameRate);

*(__le16 *)&data[12] =cpu_to_le16(ctrl->wCompQuality);

*(__le16 *)&data[14] =cpu_to_le16(ctrl->wCompWindowSize);

*(__le16 *)&data[16] =cpu_to_le16(ctrl->wDelay);

put_unaligned_le32(ctrl->dwMaxVideoFrameSize,&data[18]);

put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[22]);

if (size == 34) {

put_unaligned_le32(ctrl->dwClockFrequency,&data[26]);

data[30]= ctrl->bmFramingInfo;

data[31]= ctrl->bPreferedVersion;

data[32]= ctrl->bMinVersion;

data[33]= ctrl->bMaxVersion;

}

/*设置USB数据管道*/

pipe = (SET_CUR & 0x80) ?usb_rcvctrlpipe(myuvc_udev, 0)

: usb_sndctrlpipe(myuvc_udev,0);

type |= (SET_CUR & 0x80) ? USB_DIR_IN :USB_DIR_OUT;

/*发送参数数据包*/

ret= usb_control_msg(myuvc_udev, pipe, SET_CUR, type,

VS_PROBE_CONTROL<< 8, 0 << 8 | myuvc_streaming_intf, data, size, 5000);

/*发送数据包后释放data内存空间*/

kfree(data);

return (ret < 0) ? ret : 0;

}

staticint myuvc_get_streaming_params(struct myuvc_streaming_control *ctrl)

{

__u8 *data;

__u16 size;

int ret;

__u8 type = USB_TYPE_CLASS |USB_RECIP_INTERFACE;

unsigned int pipe;

size = uvc_version >= 0x0110 ? 34 :26;

data = kmalloc(size, GFP_KERNEL);

if (data == NULL)

return -ENOMEM;

pipe = (GET_CUR & 0x80) ?usb_rcvctrlpipe(myuvc_udev, 0)

: usb_sndctrlpipe(myuvc_udev, 0);

type |= (GET_CUR & 0x80) ?USB_DIR_IN : USB_DIR_OUT;

ret = usb_control_msg(myuvc_udev,pipe, GET_CUR, type,

VS_PROBE_CONTROL<< 8,

0 << 8 |myuvc_streaming_intf, data, size, 5000);

if(ret < 0)

gotodone;

ctrl->bmHint = le16_to_cpup((__le16*)&data[0]);

ctrl->bFormatIndex = data[2];

ctrl->bFrameIndex = data[3];

ctrl->dwFrameInterval =le32_to_cpup((__le32 *)&data[4]);

ctrl->wKeyFrameRate =le16_to_cpup((__le16 *)&data[8]);

ctrl->wPFrameRate =le16_to_cpup((__le16 *)&data[10]);

ctrl->wCompQuality =le16_to_cpup((__le16 *)&data[12]);

ctrl->wCompWindowSize =le16_to_cpup((__le16 *)&data[14]);

ctrl->wDelay = le16_to_cpup((__le16*)&data[16]);

ctrl->dwMaxVideoFrameSize =get_unaligned_le32(&data[18]);

ctrl->dwMaxPayloadTransferSize =get_unaligned_le32(&data[22]);

if (size == 34) {

ctrl->dwClockFrequency =get_unaligned_le32(&data[26]);

ctrl->bmFramingInfo =data[30];

ctrl->bPreferedVersion =data[31];

ctrl->bMinVersion =data[32];

ctrl->bMaxVersion =data[33];

} else {

//ctrl->dwClockFrequency =video->dev->clock_frequency;

ctrl->bmFramingInfo = 0;

ctrl->bPreferedVersion =0;

ctrl->bMinVersion = 0;

ctrl->bMaxVersion = 0;

}

done:

kfree(data);

return (ret < 0) ? ret : 0;

}

staticint myuvc_set_streaming_params(struct myuvc_streaming_control *ctrl)

{

__u8 *data;

__u16 size;

int ret;

__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;

unsigned int pipe;

size = uvc_version >= 0x0110 ? 34 : 26;

data = kzalloc(size, GFP_KERNEL);

if (data == NULL)

return -ENOMEM;

*(__le16 *)&data[0] =cpu_to_le16(ctrl->bmHint);

data[2] = ctrl->bFormatIndex;

data[3] = ctrl->bFrameIndex;

*(__le32 *)&data[4] =cpu_to_le32(ctrl->dwFrameInterval);

*(__le16 *)&data[8] =cpu_to_le16(ctrl->wKeyFrameRate);

*(__le16 *)&data[10] =cpu_to_le16(ctrl->wPFrameRate);

*(__le16 *)&data[12] = cpu_to_le16(ctrl->wCompQuality);

*(__le16 *)&data[14] =cpu_to_le16(ctrl->wCompWindowSize);

*(__le16 *)&data[16] =cpu_to_le16(ctrl->wDelay);

put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[18]);

put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[22]);

if (size == 34) {

put_unaligned_le32(ctrl->dwClockFrequency, &data[26]);

data[30] = ctrl->bmFramingInfo;

data[31] = ctrl->bPreferedVersion;

data[32] = ctrl->bMinVersion;

data[33] = ctrl->bMaxVersion;

}

pipe = (SET_CUR & 0x80) ?usb_rcvctrlpipe(myuvc_udev, 0)

: usb_sndctrlpipe(myuvc_udev,0);

type |= (SET_CUR & 0x80) ? USB_DIR_IN :USB_DIR_OUT;

ret= usb_control_msg(myuvc_udev, pipe, SET_CUR, type,

VS_COMMIT_CONTROL<< 8,

0 << 8 | myuvc_streaming_intf, data,size, 5000);

kfree(data);

return (ret < 0) ? ret : 0;

}

/*A11 启动传输

* 参考:uvc_video_enable(video, 1):  uvc_commit_video/uvc_init_video */

staticint myuvc_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i)

{

int ret;

/*1. 向USB摄像头设置参数:好比使用哪一个format, 使用这个format下的哪一个

frame(分辨率); 参考: uvc_set_video_ctrl /uvc_get_video_ctrl

* 1.1 根据结构体uvc_streaming_control设置数据包data:手工设置/读出后再修改

* 1.2 调用usb_control_msg发出摄像头参数数据包data

*/

/* a. 测试想要设置的摄像头参数是否被支持*/

myuvc_try_streaming_params(&myuvc_params);

/* b. 取出参数*/

myuvc_get_streaming_params(&myuvc_params);

/* c. 设置参数*/

myuvc_set_streaming_params(&myuvc_params);

/* d. 设置VideoStreamingInterface所使用的setting*/

* d.1 从myuvc_params肯定带宽

* d.2 根据setting的endpoint.wMaxPacketSize找到能知足该带宽的setting

* 手工肯定:

*调用myuvc_get_streaming_params()得到摄像头当前dwMaxPayloadTransferSize

* bandwidth =myuvc_params.dwMaxPayloadTransferSize = 800

* 观察lsusb-v -d 0x1b3b:的结果:

*                wMaxPacketSize     0x0320 1x 800 bytes

* bAlternateSetting       5

*/

usb_set_interface(myuvc_udev,myuvc_streaming_intf,

myuvc_streaming_bAlternateSetting);

/* 2. 分配设置URB*/

/* 3. 提交URB以接收数据*/

return 0;

}

staticint myuvc_probe(struct usb_interface *intf,

const struct usb_device_id *id)

{

static int cnt = 0;

structusb_device *dev = interface_to_usbdev(intf);

myuvc_udev= dev

printk("myuvc_probe: cnt = %d\n", cnt++);

if (cnt == 1)

{

myuvc_control_intf= intf->cur_altsetting->desc.bInterfaceNumber;

}

else if (cnt == 2)

{

myuvc_streaming_intf= intf->cur_altsetting->desc.bInterfaceNumber;

myuvc_vdev= video_device_alloc();

myuvc_vdev->release= myuvc_release;

myuvc_vdev->fops    = &myuvc_fops;

myuvc_vdev->ioctl_ops= &myuvc_ioctl_ops;

video_register_device(myuvc_vdev,VFL_TYPE_GRABBER, -1);

}

return0;

}

9.2   分配设置URB

linux内核中的 USB 代码和全部的USB 设备通信使用称为 urb( USBrequest block). 这个请求块用 struct urb 结构描述而且可在include/linux/usb.h 中找到.

一个urb用来发送或接受数据到或者从一个特定USB 设备上的特定的 USB 端点,以一种异步的方式.一个USB 设备驱动可能分配许多urb 给一个端点或者可能重用单个urb 给多个不一样的端点, 根据驱动的须要.设备中的每一个端点都处理一个urb 队列, 以致于多个urb 可被发送到相同的端点,在队列清空以前. 一个urb 的典型生命循环以下:

被一个USB 设备驱动建立.

安排给一个特定USB 设备的特定端点.

提交给USB 核心, 被USB 设备驱动.

提交给特定设备的被USB 核心指定的 USB 主机控制器驱动.

被USB 主机控制器处理, 它作一个USB 传送到设备.

当urb 完成, USB 主机控制器驱动通知USB 设备驱动.

urb也可被提交这个 urb 的驱动在任什么时候间取消,或者被 USB 核心若是设备被从系统中移出.urb 被动态建立而且包含一个内部引用计数,使它们在这个 urb 的最后一个用户释放它时被自动释放.

structmyuvc_queue {

void*mem;

intcount;

intbuf_size;

structmyuvc_buffer buffer[32];/*存储视频数据*/

structurb *urb[32];   /*在内存中设置urb缓存*/

char*urb_buffer[32];

dma_addr_turb_dma[32];

unsignedint urb_size;

struct list_head mainqueue;   /* 供APP消费用*/

struct list_head irqqueue;    /* 供底层驱动生产用*/

};

/*参考: uvc_video_complete /uvc_video_decode_isoc */

staticvoid myuvc_video_complete(struct urb *urb)

{

u8 *src; /*数据源地址*/

u8 *dest; /*数据目的地址*/

int ret, i;

int len;

int maxlen;

int nbytes;

struct myuvc_buffer *buf;

switch (urb->status) {

case 0:

break;

default:

printk("Non-zero status(%d) in video "

"completionhandler.\n", urb->status);

return;

}

/* 从irqqueue队列中取出第1个缓冲区*/

if(!list_empty(&myuvc_queue.irqqueue))

{

buf =list_first_entry(&myuvc_queue.irqqueue, struct myuvc_buffer, irq);

for (i = 0; i number_of_packets; ++i) {

if (urb->iso_frame_desc[i].status< 0) {

printk("USB isochronous frame"

"lost(%d).\n", urb->iso_frame_desc[i].status);

continue;

}

/*摄像头得到的画面数据存放在urb_buffers中*/

src = urb->transfer_buffer + urb->iso_frame_desc[i].offset;

dest = myuvc_queue.mem +buf->buf.m.offset + buf->buf.bytesused;

len =urb->iso_frame_desc[i].actual_length;

/* 判断数据是否有效*/

/* URB数据含义:

* data[0] : 头部长度

* data[1] : 错误状态

*/

if (len < 2 || src[0] < 2 ||src[0] > len)

continue;

/* Skip payloads marked with theerror bit ("error frames"). */

if (src[1] & UVC_STREAM_ERR) {

printk("Dropping payload(error bit set).\n");

continue;

}

/* 除去头部后的数据长度*/

len -= src[0];

/* 缓冲区最多还能存多少数据*/

maxlen = buf->buf.length -buf->buf.bytesused;

nbytes = min(len, maxlen);

/* 复制数据*/

memcpy(dest, src + src[0], nbytes);

buf->buf.bytesused += nbytes;

/* 判断一帧数据是否已经所有接收到*/

if (len > maxlen) {

buf->state = VIDEOBUF_DONE;

}

/* Mark the buffer as done if theEOF marker is set. */

if (src[1] & UVC_STREAM_EOF&& buf->buf.bytesused != 0) {

printk("Frame complete(EOF found).\n");

if (len == 0)

printk("EOF in emptypayload.\n");

buf->state = VIDEOBUF_DONE;

}

}

/* 当接收完一帧数据,

* 从irqqueue中删除这个缓冲区

* 唤醒等待数据的进程

*/

if (buf->state == VIDEOBUF_DONE ||

buf->state == VIDEOBUF_ERROR)

{

list_del(&buf->irq);

wake_up(&buf->wait);

}

}

/* 再次提交URB*/

if ((ret = usb_submit_urb(urb,GFP_ATOMIC)) < 0) {

printk("Failed toresubmit video URB (%d).\n", ret);

}

/*urb初始化参考:uvc_init_video_isoc */

staticint myuvc_alloc_init_urbs(void)

{

u16 psize;           /* 实时传输端点一次能传输的最大字节数,即每一个packet的大小*/

u32 size; /*一帧数据的最大长度 */

int i,  j, npackets;

struct urb           *urb;

psize= wMaxPacketSize;

size = myuvc_params.dwMaxVideoFrameSize;

npackets = DIV_ROUND_UP(size, psize);

if (npackets > 32)

npackets= 32;

myuvc_queue.urb_size= psize * npackets;

/* 1.申请usb_buffers*/

for (i = 0; i < MYUVC_URBS; ++i) {

/*1. 以packet为单位申请第i个packet的usb_buffers

用于存储数据的缓冲区,在myuvc_queue结构体中定义*/

myuvc_queue.urb_buffer[i]= usb_buffer_alloc(

myuvc_udev,size,GFP_KERNEL | __GFP_NOWARN,

&myuvc_queue.urb_dma[i]);

/*2. 分配urb结构构体,结构体成员transfer_buffer指向urb缓冲区,将urb结构体提交给usb总线驱动程序*/

myuvc_queue.urb[i]= usb_alloc_urb(npackets, GFP_KERNEL);

if(!myuvc_queue.urb_buffer[i] || !myuvc_queue.urb[i])

{

myuvc_uninit_urbs();

return-ENOMEM;

}

}

/* 3. 设置urb*/

for (i = 0; i < MYUVC_URBS; ++i) {

urb = myuvc_queue.urb[i];

urb->dev = myuvc_udev;

urb->context = NULL;

urb->pipe =usb_rcvisocpipe(myuvc_udev,myuvc_bEndpointAddress);

urb->transfer_flags = URB_ISO_ASAP |URB_NO_TRANSFER_DMA_MAP;

urb->interval = 1;

urb->transfer_buffer =myuvc_queue.urb_buffer[i];

urb->transfer_dma =myuvc_queue.urb_dma[i];

/*驱动程序收完一帧数据以后会产生一个中断,myuvc_video_complete是中断处理函数*/

urb->complete =myuvc_video_complete;

urb->number_of_packets = npackets;

urb->transfer_buffer_length = size;

for (j = 0; j < npackets; ++j) {

urb->iso_frame_desc[j].offset = j * psize;

urb->iso_frame_desc[j].length= psize;

}

}

return 0;

}

static int myuvc_vidioc_streamon(struct file *file, void *priv, enumv4l2_buf_type i)

{

int ret;

myuvc_try_streaming_params(&myuvc_params);

myuvc_get_streaming_params(&myuvc_params);

myuvc_set_streaming_params(&myuvc_params);

myuvc_print_streaming_params(&myuvc_params);

/*d. 设置VSI所使用的setting

*d.1 从myuvc_params肯定带宽

*d.2 根据altsetting端描述符的wMaxPacketSize找到知足该带宽的setting

* 手工肯定带宽:

* bandwidth =myubc_params.dwMaxPayloadTransferSize = 800

* 观察lsusb-v -d 0x1b3b:的结果

*                wMaxPacketSize     0x0320 1x 800 bytes

*                         bAlternateSetting       5*/

usb_set_interface(myuvc_udev,myuvc_streaming_intf,

myuvc_streaming_bAlternateSetting);

/*二、分配设置URB(USBRequest Block)*/

myuvc_alloc_init_urbs();

/*三、提交向URB接受数据*/

return 0;

}

9.3向USB总线驱动程序提交URB

staticint myuvc_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i)

{

int ret;

myuvc_try_streaming_params(&myuvc_params);

myuvc_get_streaming_params(&myuvc_params);

myuvc_set_streaming_params(&myuvc_params);

myuvc_print_streaming_params(&myuvc_params);

usb_set_interface(myuvc_udev,myuvc_streaming_intf,

myuvc_streaming_bAlternateSetting);

/*二、分配设置URB(USBRequest Block)*/

myuvc_alloc_init_urbs();

/*三、提交向URB接受数据*/

for(i = 0; i < MYUVC_URBS; ++i) {

if ((ret =usb_submit_urb(myuvc_queue.urb[i], GFP_KERNEL)) < 0) {

printk("Failedto submit URB %u (%d).\n", i, ret);

myuvc_uninit_urbs();

return ret;

}

}

return 0;

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值