spcaview.spcaserver源码详解(1)

struct vdIn {
 int fd;
 char *videodevice ;                                                          
 struct video_mmap vmmap;                                   //内存影射
 struct video_capability videocap;
 int mmapsize;
 struct video_mbuf videombuf;
 struct video_picture videopict;
 struct video_window videowin;
 struct video_channel videochan;
 struct video_param videoparam; 
 int cameratype ;
 char *cameraname;
 char bridge[9];
 int sizenative;                                       // available size in jpeg
 int sizeothers;                                      // others palette
 int palette;                                             // available palette
 int norme ;                                            // set spca506 usb video grabber
 int channel ;                                         // set spca506 usb video grabber
 int grabMethod ;
 unsigned char *pFramebuffer;         //内存影射后指向数据指针
 unsigned char *ptframe[4];                //frameconverse以后指向将要发送到网络的数据指针
 int framelock[4];                                     //占用与否标识
 pthread_mutex_t grabmutex;             
 int framesizeIn ;                                    //在init中确定
 volatile int frame_cour;                        //目前要传输到网络的frame
 int bppIn;                                                //depth
 int  hdrwidth;
 int  hdrheight;
 int  formatIn;                                         //palette
 int signalquit; 
 };
在linux内核源代码中的/include/linux/videodev.h中定义了各个format格式的值

从main函数出发,跟踪int format

1.1 初始化int format

int format = VIDEO_PALETTE_YUV420P;

1.2 根据输入的参数识别用户要求的format

if (strcmp (argv[i], "-f") == 0) {
   if (i + 1 >= argc) {
    printf ("No parameter specified with -f, aborting./n");
    exit (1);
   }
   mode = strdup (argv[i + 1]);

 if (strncmp (mode, "r32", 3) == 0) {
    format = VIDEO_PALETTE_RGB32;
    } else if (strncmp (mode, "r24", 3) == 0) {
    format = VIDEO_PALETTE_RGB24;
    } else if (strncmp (mode, "r16", 3) == 0) {
    format = VIDEO_PALETTE_RGB565;
    } else if (strncmp (mode, "yuv", 3) == 0) {
    format = VIDEO_PALETTE_YUV420P;
    } else if (strncmp (mode, "jpg", 3) == 0) {
    format = VIDEO_PALETTE_JPEG;
    }else {
    format = VIDEO_PALETTE_YUV420P;
    }
 }

2. 初始化设备的时候

init_videoIn (&videoIn, videodevice, width, height, format,grabmethod)

2.1

vd->formatIn = format; 
vd->bppIn = GetDepth (vd->formatIn);            //根据format值确定vd.depth

2.2 进入init_v4l (vd)

2.2.1

probePalette(vd )                               //将五个palette类型传到video_picture数据结构里面,set之后在get一次,比较前后palette值,如果两者一致,说明该palette类型为可用

probeSize(vd )                                 //同上理,将7个width*height结构传到video_window里面,察看是否可用

check_palettesize(vd)                   //   首先转换大小int needsize = convertsize(vd->hdrwidth,vd->hdrheight),convertsize(),根据w*h返回7个类型:VGA,PAL,SIF,CIF,QPAL,QSIF,QCIF,上述七个宏定义在spcav4l.h中:

#define MASQ 1
#define VGA MASQ
#define PAL (MASQ << 1)
#define SIF (MASQ << 2)
#define CIF (MASQ << 3)
#define QPAL (MASQ << 4)
#define QSIF (MASQ << 5)
#define QCIF (MASQ << 6)            int needsize的智应该为这七个值之一

int needpalette=0,needpalette = checkpalette(vd),在checkpalette(vd)中,convertpalette(vd->formatIn); see is the palette available? 根据vd->formatIn返回jpeg yuv420p rbg24 rgb565 and rgb32

#define JPG MASQ    //JPEG 1
#define YUV420P (MASQ << 1)
#define RGB24 (MASQ << 2)
#define RGB565 (MASQ << 3)
#define RGB32 (MASQ << 4)   

根据needpalette的值察看是否available?将测试结果写入函数palette = paletteconvert( needpalette),if (palette),对palette的返回将它赋值到vd->vmmap.height = vd->hdrheight;
      vd->vmmap.width = vd->hdrwidth;
      vd->vmmap.format = palette;设置VIDIOCMCAPTURE,测试一下是否可以采集,ok的话vd->formatIn = palette;

根据needsize和vd.sizeother察看是否还有别的palettesize可以支持,test is palette and size are available otherwhise return the next available palette and size palette is set by preference order jpeg yuv420p rbg24 rgb565 and rgb32

2.2.2

vd->videopict.palette = vd->formatIn; 设置video_picture数据结构

vd->videopict.depth = GetDepth (vd->formatIn);
vd->bppIn = GetDepth (vd->formatIn);

2.2.3

vd->framesizeIn = (vd->hdrwidth * vd->hdrheight * vd->bppIn) >> 3;       设置framesize大小

erreur = SetVideoPict (vd);
erreur = GetVideoPict (vd);
if (vd->formatIn != vd->videopict.palette ||
vd->bppIn != vd->videopict.depth)
exit_fatal ("could't set video palette Abort !");
if (erreur < 0)
exit_fatal ("could't set video palette Abort !");

2.2.4

开始采集两个frame的视频数据,指向数据的指针为pFramebuffer

3 打开采集视频线程 pthread_create (&w1, NULL, (void *) grab, NULL),进入grab函数

3.1 vd->vmmap.format = vd->formatIn;

VIDIOCSYNC:开始check在init时候采集的数据是否已经完成

3.2 采集完成,进行jpeg压缩处理,里面大有文章

jpegsize= convertframe(vd->ptframe[vd->frame_cour]+ sizeof(struct frame_t),
    vd->pFramebuffer + vd->videombuf.offsets[vd->vmmap.frame],
   vd->hdrwidth,vd->hdrheight,vd->formatIn,qualite);

在 int convertframe(unsigned char *dst,unsigned char *src, int width,int height, int formatIn, int qualite)中

switch (formatIn)根据不同的palette值做不同的数据压缩处理,返回压缩后的数据大小值,如果是JPEG格式,意味着硬件采集近来的数据已经做了压缩,不需要再用软件进行压缩处理,而除了VIDEO_PALETTE_JPEG以外的palette,都需要进行encode,函数为UINT32 encode_image (UINT8 * input_ptr, UINT8 * output_ptr,UINT32 quality_factor, UINT32 image_format,UINT32 image_width, UINT32 image_height),UINT32 image_format为输入的palette,压缩使用huffman编码,详细代码在huffman.c和encode.c中

4. 打开数据远程网络传输线程,在accept阻塞处代开线程pthread_create(&server_th, NULL, (void *)service, &new_sock),

首先在连接处读取frame_t message数据结构的内容,read(sock,(unsigned char*)&message,sizeof(struct client_t)),根据message的内容决定如何传输,下一步做循环发送,根据frame_lock和frame_cour发送没有被占用的compressed frame

 

 
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值