DM8148 arm 端采集与显示程序设计记录

/*
 *      
 *
 *      
 *           
 *
 *      
 *
 */

/*
 * WARNING: This is just a test application. Don't fill bug reports, flame me,
 * curse me on 7 generations :-).
 */
extern "C"
{
#include <stdio.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdint.h>
#include <stdlib.h>
#include <errno.h>
#include <getopt.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/select.h>
#include <sys/time.h>
    
#include <linux/fb.h>
    
#include <linux/videodev.h>


}

#include "opencv/highgui.h"
#include "opencv/cv.h"
#include "opencv/cxcore.h"
#include "opencv/cvaux.h"


#include "video_cap.h"

#include "displayFb.h"

int main( int argc, char** argv )
{
    //声明IplImage指针
    IplImage* pImg = NULL; 
    IplImage* pCannyImg = NULL;
    IplImage* pGRAYImg = NULL;

    VideoDevice * m_pVideoDev = new VideoDevice;

   

    
    //载入图像,强制转化为Gray
   // if( argc == 2 )
    {
        if((pImg = cvLoadImage("lena.bmp")) != NULL )
{
        
            unsigned char *  image = (unsigned char * )pImg->imageData;
            //为canny边缘图像申请空间
            pCannyImg = cvCreateImage(cvGetSize(pImg),
                                      IPL_DEPTH_8U,
                                      1);
           pGRAYImg= cvCreateImage(cvGetSize(pImg),
                                      IPL_DEPTH_8U,
                                      1);
            //canny边缘检测
            cvCvtColor(pImg,pGRAYImg,CV_RGB2GRAY);
            cvCanny(pGRAYImg, pCannyImg, 50, 150, 3);

            //for(int i=0; i< )

           for(int h = 0; h < (pImg->height-1)/2; h++)
             for(int w = 0; w < (pImg->width-1)/2; w++)
               {
                   image[h*(pImg->width)*3+3*w+0] = 255;
                   image[h*(pImg->width)*3+3*w+1] = 255;
                   image[h*(pImg->width)*3+3*w+2] = 255;
                    }
              cvSaveImage("dd.bmp",pImg);
              cvSaveImage("cc.bmp",pCannyImg);
          
            
            //创建窗口
           // cvNamedWindow("src", 1);
           // cvNamedWindow("canny",1);
            
            
            //显示图像
           // cvShowImage( "src", pImg );
           // cvShowImage( "canny", pCannyImg );
            
            //cvWaitKey(0); //等待按键
            
            //销毁窗口
          //  cvDestroyWindow( "src" );
           // cvDestroyWindow( "canny" );
            //释放图像
            cvReleaseImage( &pImg ); 
            cvReleaseImage( &pCannyImg ); 
            cvReleaseImage( &pGRAYImg ); 

            
             goto end;
        }
        else
        { 
           
            goto end;
        }
    }
    
   end:
    delete [] m_pVideoDev;
    return -1;
}

以上的主程序

采集部分usb:

头文件

#ifndef VIDEO_CAP_H
#define VIDEO_CAP_H
extern "C"
{
#include <string.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>

#include <sys/ioctl.h>
#include <sys/mman.h>


#include <stdio.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdint.h>
#include <stdlib.h>
#include <errno.h>
#include <getopt.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/select.h>
#include <sys/time.h>
    
#include <linux/fb.h>
    
#include <linux/videodev.h>
}

#include <asm/types.h>
#include <linux/videodev2.h>


#define NUM_OF_BUF  2
#define WIDTH       320
#define HEIGHT      240
#define IMAGE_SIZE_OF_CAM  (WIDTH * HEIGHT *2)

#define CLEAR(x) memset(&(x), 0, sizeof(x))

static const signed short redAdjust[] = {
		-161,-160,-159,-158,-157,-156,-155,-153,
		-152,-151,-150,-149,-148,-147,-145,-144,
		-143,-142,-141,-140,-139,-137,-136,-135,
		-134,-133,-132,-131,-129,-128,-127,-126,
		-125,-124,-123,-122,-120,-119,-118,-117,
		-116,-115,-114,-112,-111,-110,-109,-108,
		-107,-106,-104,-103,-102,-101,-100, -99,
		-98, -96, -95, -94, -93, -92, -91, -90,
		-88, -87, -86, -85, -84, -83, -82, -80,
		-79, -78, -77, -76, -75, -74, -72, -71,
		-70, -69, -68, -67, -66, -65, -63, -62,
		-61, -60, -59, -58, -57, -55, -54, -53,
		-52, -51, -50, -49, -47, -46, -45, -44,
		-43, -42, -41, -39, -38, -37, -36, -35,
		-34, -33, -31, -30, -29, -28, -27, -26,
		-25, -23, -22, -21, -20, -19, -18, -17,
		-16, -14, -13, -12, -11, -10,  -9,  -8,
		-6,  -5,  -4,  -3,  -2,  -1,   0,   1,
		2,   3,   4,   5,   6,   7,   9,  10,
		11,  12,  13,  14,  15,  17,  18,  19,
		20,  21,  22,  23,  25,  26,  27,  28,
		29,  30,  31,  33,  34,  35,  36,  37,
		38,  39,  40,  42,  43,  44,  45,  46,
		47,  48,  50,  51,  52,  53,  54,  55,
		56,  58,  59,  60,  61,  62,  63,  64,
		66,  67,  68,  69,  70,  71,  72,  74,
		75,  76,  77,  78,  79,  80,  82,  83,
		84,  85,  86,  87,  88,  90,  91,  92,
		93,  94,  95,  96,  97,  99, 100, 101,
		102, 103, 104, 105, 107, 108, 109, 110,
		111, 112, 113, 115, 116, 117, 118, 119,
		120, 121, 123, 124, 125, 126, 127, 128,
};

static const signed short greenAdjust1[] = {
		34,  34,  33,  33,  32,  32,  32,  31,
		31,  30,  30,  30,  29,  29,  28,  28,
		28,  27,  27,  27,  26,  26,  25,  25,
		25,  24,  24,  23,  23,  23,  22,  22,
		21,  21,  21,  20,  20,  19,  19,  19,
		18,  18,  17,  17,  17,  16,  16,  15,
		15,  15,  14,  14,  13,  13,  13,  12,
		12,  12,  11,  11,  10,  10,  10,   9,
		9,   8,   8,   8,   7,   7,   6,   6,
		6,   5,   5,   4,   4,   4,   3,   3,
		2,   2,   2,   1,   1,   0,   0,   0,
		0,   0,  -1,  -1,  -1,  -2,  -2,  -2,
		-3,  -3,  -4,  -4,  -4,  -5,  -5,  -6,
		-6,  -6,  -7,  -7,  -8,  -8,  -8,  -9,
		-9, -10, -10, -10, -11, -11, -12, -12,
		-12, -13, -13, -14, -14, -14, -15, -15,
		-16, -16, -16, -17, -17, -17, -18, -18,
		-19, -19, -19, -20, -20, -21, -21, -21,
		-22, -22, -23, -23, -23, -24, -24, -25,
		-25, -25, -26, -26, -27, -27, -27, -28,
		-28, -29, -29, -29, -30, -30, -30, -31,
		-31, -32, -32, -32, -33, -33, -34, -34,
		-34, -35, -35, -36, -36, -36, -37, -37,
		-38, -38, -38, -39, -39, -40, -40, -40,
		-41, -41, -42, -42, -42, -43, -43, -44,
		-44, -44, -45, -45, -45, -46, -46, -47,
		-47, -47, -48, -48, -49, -49, -49, -50,
		-50, -51, -51, -51, -52, -52, -53, -53,
		-53, -54, -54, -55, -55, -55, -56, -56,
		-57, -57, -57, -58, -58, -59, -59, -59,
		-60, -60, -60, -61, -61, -62, -62, -62,
		-63, -63, -64, -64, -64, -65, -65, -66,
};

static const signed short greenAdjust2[] = {
		74,  73,  73,  72,  71,  71,  70,  70,
		69,  69,  68,  67,  67,  66,  66,  65,
		65,  64,  63,  63,  62,  62,  61,  60,
		60,  59,  59,  58,  58,  57,  56,  56,
		55,  55,  54,  53,  53,  52,  52,  51,
		51,  50,  49,  49,  48,  48,  47,  47,
		46,  45,  45,  44,  44,  43,  42,  42,
		41,  41,  40,  40,  39,  38,  38,  37,
		37,  36,  35,  35,  34,  34,  33,  33,
		32,  31,  31,  30,  30,  29,  29,  28,
		27,  27,  26,  26,  25,  24,  24,  23,
		23,  22,  22,  21,  20,  20,  19,  19,
		18,  17,  17,  16,  16,  15,  15,  14,
		13,  13,  12,  12,  11,  11,  10,   9,
		9,   8,   8,   7,   6,   6,   5,   5,
		4,   4,   3,   2,   2,   1,   1,   0,
		0,   0,  -1,  -1,  -2,  -2,  -3,  -4,
		-4,  -5,  -5,  -6,  -6,  -7,  -8,  -8,
		-9,  -9, -10, -11, -11, -12, -12, -13,
		-13, -14, -15, -15, -16, -16, -17, -17,
		-18, -19, -19, -20, -20, -21, -22, -22,
		-23, -23, -24, -24, -25, -26, -26, -27,
		-27, -28, -29, -29, -30, -30, -31, -31,
		-32, -33, -33, -34, -34, -35, -35, -36,
		-37, -37, -38, -38, -39, -40, -40, -41,
		-41, -42, -42, -43, -44, -44, -45, -45,
		-46, -47, -47, -48, -48, -49, -49, -50,
		-51, -51, -52, -52, -53, -53, -54, -55,
		-55, -56, -56, -57, -58, -58, -59, -59,
		-60, -60, -61, -62, -62, -63, -63, -64,
		-65, -65, -66, -66, -67, -67, -68, -69,
		-69, -70, -70, -71, -71, -72, -73, -73,
};

static const signed short blueAdjust[] = {
		-276,-274,-272,-270,-267,-265,-263,-261,
		-259,-257,-255,-253,-251,-249,-247,-245,
		-243,-241,-239,-237,-235,-233,-231,-229,
		-227,-225,-223,-221,-219,-217,-215,-213,
		-211,-209,-207,-204,-202,-200,-198,-196,
		-194,-192,-190,-188,-186,-184,-182,-180,
		-178,-176,-174,-172,-170,-168,-166,-164,
		-162,-160,-158,-156,-154,-152,-150,-148,
		-146,-144,-141,-139,-137,-135,-133,-131,
		-129,-127,-125,-123,-121,-119,-117,-115,
		-113,-111,-109,-107,-105,-103,-101, -99,
		-97, -95, -93, -91, -89, -87, -85, -83,
		-81, -78, -76, -74, -72, -70, -68, -66,
		-64, -62, -60, -58, -56, -54, -52, -50,
		-48, -46, -44, -42, -40, -38, -36, -34,
		-32, -30, -28, -26, -24, -22, -20, -18,
		-16, -13, -11,  -9,  -7,  -5,  -3,  -1,
		0,   2,   4,   6,   8,  10,  12,  14,
		16,  18,  20,  22,  24,  26,  28,  30,
		32,  34,  36,  38,  40,  42,  44,  46,
		49,  51,  53,  55,  57,  59,  61,  63,
		65,  67,  69,  71,  73,  75,  77,  79,
		81,  83,  85,  87,  89,  91,  93,  95,
		97,  99, 101, 103, 105, 107, 109, 112,
		114, 116, 118, 120, 122, 124, 126, 128,
		130, 132, 134, 136, 138, 140, 142, 144,
		146, 148, 150, 152, 154, 156, 158, 160,
		162, 164, 166, 168, 170, 172, 175, 177,
		179, 181, 183, 185, 187, 189, 191, 193,
		195, 197, 199, 201, 203, 205, 207, 209,
		211, 213, 215, 217, 219, 221, 223, 225,
		227, 229, 231, 233, 235, 238, 240, 242,
};

#define SIZE 256

#define XSIZE 320

#define YSIZE 240

#define IMGSIZE XSIZE * YSIZE

typedef struct RGB

{

	unsigned char r;

	unsigned char g;

	unsigned char b;

}RGB;



class VideoDevice 
{
   
public:
    VideoDevice();
    //VideoDevice();
    int open_device(char * dev_name);
    int close_device();
    int init_device();
    int start_capturing();
    int stop_capturing();
    int uninit_device();
    int get_frame(void **, size_t*);
    int unget_frame();
public:

    inline void yuv_to_rgb24_N(unsigned char y, unsigned char u, unsigned char v, unsigned char *rgb);
    void YUVToRGB24_N(unsigned char *buf, unsigned char *rgb, int width, int height);
    void YUVToRGB24_8_N(unsigned char *buf, unsigned char *rgb, int width, int height);
    void rgb24_to_rgb565_N(unsigned char  *rgb24, unsigned char *rgb16, int width, int height);
    void rgb24_to_rgb565_6_N(unsigned char  *rgb24, unsigned char *rgb16, int width, int height);
    int convert_yuv_to_rgb_pixel_N(int y, int u, int v);
    int convert_yuv_to_rgb_buffer_N(unsigned char *yuv, unsigned char *rgb,unsigned int width, unsigned int height);
    int yuv_to_rgb16_N(unsigned char y,unsigned char u,unsigned char v,unsigned char * rgb);
    int convert_m_N(unsigned char * buf,unsigned char * rgb,int width,int height);
    inline void  yuv2rgb565_N(const int y, const int u, const int v, unsigned short &rgb565);
    void  ConvertYUYVtoRGB565_N(const void *yuyv_data, void *rgb565_data,const unsigned int w, const unsigned int h);
    void table_init_N();
    inline void calc_lum_N();


private:
    int init_mmap();

    struct buffer
    {
        void * start;
        size_t length;
    };
     
    int fd;
    buffer* buffers;
    unsigned int n_buffers;
    int index;
};

#endif // VIDEO_CAP_H
源文件:

#include "video_cap.h"


VideoDevice::VideoDevice()
{
   // this->dev_name = dev_name;
    this->fd = -1;
    this->buffers = NULL;
    this->n_buffers = 0;
    this->index = -1;
}

int VideoDevice::open_device(char * dev_name)
{
    fd = open(dev_name, O_RDWR/*|O_NONBLOCK*/, 0);
    // fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0);

    if(-1 == fd)
    {
        //emit display_error(tr("open: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}

int VideoDevice::close_device()
{
    if(-1 == close(fd))
    {
       // emit display_error(tr("close: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}

int VideoDevice::init_device()
{
    v4l2_capability cap;
    v4l2_cropcap cropcap;
    v4l2_crop crop;
    v4l2_format fmt;

    if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))
    {
        if(EINVAL == errno)
        {
           // emit display_error(tr("%1 is no V4l2 device").arg(dev_name));
        }
        else
        {
            //emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno))));
        }
        return -1;
    }

    if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
       // emit display_error(tr("%1 is no video capture device").arg(dev_name));
        return -1;
    }

    if(!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        //emit display_error(tr("%1 does not support streaming i/o").arg(dev_name));
        return -1;
    }

    CLEAR(fmt);

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = WIDTH;
    fmt.fmt.pix.height = HEIGHT;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
#if 1
    if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))
    {
        //emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno))));
        return -1;
    }
#endif

    if(-1 == init_mmap())
    {
        return -1;
    }

    return 0;
}


int VideoDevice::init_mmap()
{
    v4l2_requestbuffers req;
    CLEAR(req);
    //NUM_OF_BUF
    //req.count = 4;
    req.count = NUM_OF_BUF;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req))
    {
        if(EINVAL == errno)
        {
            //emit display_error(tr("%1 does not support memory mapping").arg(dev_name));
            return -1;
        }
        else
        {
           // emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno))));
            return -1;
        }
    }

    if(req.count < 2)
    {
        //emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name));
        return -1;
    }

    buffers = (buffer*)calloc(req.count, sizeof(*buffers));

    if(!buffers)
    {
        //emit display_error(tr("out of memory"));
        return -1;
    }

    for(n_buffers = 0; n_buffers < req.count; ++n_buffers)
    {
        v4l2_buffer buf;
        CLEAR(buf);

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;

        if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))
        {
            //emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }

        buffers[n_buffers].length = buf.length;
        buffers[n_buffers].start =
                mmap(NULL, // start anywhere
                     buf.length,
                     PROT_READ | PROT_WRITE,
                     MAP_SHARED,
                     fd, buf.m.offset);

        if(MAP_FAILED == buffers[n_buffers].start)
        {
            //emit display_error(tr("mmap %1").arg(QString(strerror(errno))));
            return -1;
        }
    }
    return 0;

}


int VideoDevice::start_capturing()
{
    unsigned int i;
    for(i = 0; i < n_buffers; ++i)
    {
        v4l2_buffer buf;
        CLEAR(buf);

        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory =V4L2_MEMORY_MMAP;
        buf.index = i;
        //        fprintf(stderr, "n_buffers: %d\n", i);

        if(-1 == ioctl(fd, VIDIOC_QBUF, &buf))
        {
            //emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
    }

    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(-1 == ioctl(fd, VIDIOC_STREAMON, &type))
    {
        //emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}


int VideoDevice::stop_capturing()
{
    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(-1 == ioctl(fd, VIDIOC_STREAMOFF, &type))
    {
        //emit display_error(tr("VIDIOC_STREAMOFF: %1").arg(QString(strerror(errno))));
        return -1;
    }
    return 0;
}

int VideoDevice::uninit_device()
{
    unsigned int i;
    for(i = 0; i < n_buffers; ++i)
    {
        if(-1 == munmap(buffers[i].start, buffers[i].length))
        {
           // emit display_error(tr("munmap: %1").arg(QString(strerror(errno))));
            return -1;
        }

    }
    free(buffers);
    return 0;
}

int VideoDevice::get_frame(void **frame_buf, size_t* len)
{
    v4l2_buffer queue_buf;
    CLEAR(queue_buf);

    queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    queue_buf.memory = V4L2_MEMORY_MMAP;

    if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))
    {
        switch(errno)
        {
        case EAGAIN:
            //            perror("dqbuf");
            return -1;
        case EIO:
            return -1 ;
        default:
           // emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
    }

    *frame_buf = buffers[queue_buf.index].start;
    *len = buffers[queue_buf.index].length;
    index = queue_buf.index;

    return 0;

}

int VideoDevice::unget_frame()
{
    if(index != -1)
    {
        v4l2_buffer queue_buf;
        CLEAR(queue_buf);

        queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        queue_buf.index = index;

        if(-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf))
        {
            //emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return -1;
        }
        return 0;
    }
    return -1;
}




RGB in[IMGSIZE]; //需要计算的原始数据
unsigned char out[IMGSIZE * 3]; //计算后的结果

 unsigned short Y_R[SIZE],Y_G[SIZE],Y_B[SIZE];
 unsigned short U_R[SIZE],U_G[SIZE],U_B[SIZE];
 unsigned short V_R[SIZE],V_G[SIZE],V_B[SIZE]; //查表数组

    inline void VideoDevice::yuv_to_rgb24_N(unsigned char y, unsigned char u, unsigned char v,  unsigned char *rgb) {

    register int r, g, b;


    r = (1192 * (y - 16) + 1634 * (v - 128)) >> 10;
    g = (1192 * (y - 16) - 833 * (v - 128) - 400 * (u - 128)) >> 10;
    b = (1192 * (y - 16) + 2066 * (u - 128)) >> 10;

    r = r > 255 ? 255 : r < 0 ? 0 : r;
    g = g > 255 ? 255 : g < 0 ? 0 : g;
    b = b > 255 ? 255 : b < 0 ? 0 : b;

    *rgb = r;
    rgb++;
    *rgb = g;
    rgb++;
    *rgb = b;



#if 0
    rgb16 = (int) (((r >> 3) << 11) | ((g >> 2) << 5) | ((b >> 3) << 0));

    *rgb = (unsigned char) (rgb16 & 0xFF);
    rgb++;
    *rgb = (unsigned char) ((rgb16 & 0xFF00) >> 8);
#endif
}

     void VideoDevice::YUVToRGB24_N(unsigned char *buf, unsigned char *rgb, int width, int height) {


    int  y = 0;
    int blocks;

    blocks = (width * height) * 2;
    int j = 0;

    for (y = 0; y < blocks; y += 4) {
        unsigned char Y1, Y2, U1, V1;

        Y1 = buf[y + 0];
        U1 = buf[y + 1];
        Y2 = buf[y + 2];
        V1 = buf[y + 3];


        yuv_to_rgb24_N(Y1, U1, V1, &rgb[j]);
        yuv_to_rgb24_N(Y2, U1, V1, &rgb[j + 3]);
        j +=6;
    }

}

     void VideoDevice::YUVToRGB24_8_N(unsigned char *buf, unsigned char *rgb, int width, int height) {


    int  y = 0;
    int blocks;

    blocks = (width * height) * 2;
    int j = 0;

    for (y = 0; y < blocks; y += 8) {
        unsigned char Y1, Y2, U1, V1;
        unsigned char Y1_1, Y2_1, U1_1, V1_1;

        Y1 = buf[y + 0];
        U1 = buf[y + 1];
        Y2 = buf[y + 2];
        V1 = buf[y + 3];

        // V1 = buf[y + 1];
        //U1 = buf[y + 3];

        Y1_1 = buf[y + 4];
        U1_1 = buf[y + 5];
        Y2_1 = buf[y + 6];
        V1_1 = buf[y + 7];

        // V1_1 = buf[y + 7];
        // U1_1 = buf[y + 5];




        yuv_to_rgb24_N(Y1, U1, V1, &rgb[j]);
        yuv_to_rgb24_N(Y2, U1, V1, &rgb[j + 3]);

        yuv_to_rgb24_N(Y1_1, U1_1, V1_1, &rgb[j + 6]);
        yuv_to_rgb24_N(Y2_1, U1_1, V1_1, &rgb[j + 9]);
        j +=12;
    }

}


     void VideoDevice::rgb24_to_rgb565_N(unsigned char  *rgb24, unsigned char *rgb16, int width, int height)
{
    int i = 0, j = 0;
    for (i = 0; i < width*height*3; i += 3)
    {
        rgb16[j] = rgb24[i] >> 3; // B
        rgb16[j] |= ((rgb24[i+1] & 0x1C) << 3); // G
        rgb16[j+1] = rgb24[i+2] & 0xF8; // R
        rgb16[j+1] |= (rgb24[i+1] >> 5); // G
        j += 2;
    }
}


     void VideoDevice::rgb24_to_rgb565_6_N(unsigned char  *rgb24, unsigned char *rgb16, int width, int height)
{
    int i = 0, j = 0;
    for (i = 0; i < width*height*3; i += 6)
    {
        rgb16[j] = rgb24[i] >> 3; // B
        rgb16[j] |= ((rgb24[i+1] & 0x1C) << 3); // G
        rgb16[j+1] = rgb24[i+2] & 0xF8; // R
        rgb16[j+1] |= (rgb24[i+1] >> 5); // G

        rgb16[j+2] = rgb24[i+3] >> 3; // B
        rgb16[j+2] |= ((rgb24[i+4] & 0x1C) << 3); // G
        rgb16[j+3] = rgb24[i+5] & 0xF8; // R
        rgb16[j+3] |= (rgb24[i+4] >> 5); // G

        j += 4;
    }
}

     int VideoDevice::convert_yuv_to_rgb_pixel_N(int y, int u, int v)
{
    unsigned int pixel32 = 0;
    unsigned char *pixel = (unsigned char *)&pixel32;
    int r, g, b;
    r = y + (1.370705 * (v-128));
    g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
    b = y + (1.732446 * (u-128));
    if(r > 255) r = 255;
    if(g > 255) g = 255;
    if(b > 255) b = 255;
    if(r < 0) r = 0;
    if(g < 0) g = 0;
    if(b < 0) b = 0;
    pixel[0] = r * 220 / 256;
    pixel[1] = g * 220 / 256;
    pixel[2] = b * 220 / 256;
    return pixel32;
}
/*yuv格式转换为rgb格式*/
     int VideoDevice::convert_yuv_to_rgb_buffer_N(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
    unsigned int in, out = 0;
    unsigned int pixel_16;
    unsigned char pixel_24[3];
    unsigned int pixel32;
    int y0, u, y1, v;
    for(in = 0; in < width * height * 2; in += 4) {
        pixel_16 =
                yuv[in + 3] << 24 |
                yuv[in + 2] << 16 |
                yuv[in + 1] <<  8 |
                yuv[in + 0];
        y0 = (pixel_16 & 0x000000ff);
        u  = (pixel_16 & 0x0000ff00) >>  8;
        y1 = (pixel_16 & 0x00ff0000) >> 16;
        v  = (pixel_16 & 0xff000000) >> 24;
        pixel32 = convert_yuv_to_rgb_pixel_N(y0, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
        pixel32 = convert_yuv_to_rgb_pixel_N(y1, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
    }
    return 0;
}


     int VideoDevice::yuv_to_rgb16_N(unsigned char y,unsigned char u,unsigned char v,unsigned char * rgb)
{

    register int r,g,b;
    int rgb16;

    r = (1192 * (y - 16) + 1634 * (v - 128) ) >> 10;
    g = (1192 * (y - 16) - 833 * (v - 128) - 400 * (u -128) ) >> 10;
    b = (1192 * (y - 16) + 2066 * (u - 128) ) >> 10;

    r = r > 255 ? 255 : r < 0 ? 0 : r;
    g = g > 255 ? 255 : g < 0 ? 0 : g;
    b = b > 255 ? 255 : b < 0 ? 0 : b;

    rgb16 = (int)(((r >> 3)<<11) | ((g >> 2) << 5)| ((b >> 3) << 0));

    *rgb = (unsigned char)(rgb16 & 0xFF);
    rgb++;
    *rgb = (unsigned char)((rgb16 & 0xFF00) >> 8);
    return 0;

}
    int VideoDevice::convert_m_N(unsigned char * buf,unsigned char * rgb,int width,int height)
{
    int y=0;
    int blocks;

    blocks = (width * height) * 2;

    for (y = 0; y < blocks; y+=4) {
        unsigned char Y1, Y2, U, V;

        ///
        Y1 = buf[y + 0];
        U = buf[y + 1];
        Y2 = buf[y + 2];
        V = buf[y + 3];
        //

        yuv_to_rgb16_N(Y1, U, V, &rgb[y]);
        yuv_to_rgb16_N(Y2, U, V, &rgb[y + 2]);
        //yuv_to_rgb16(Y1, 0x80, 0x80, &rgb[y]);
        //yuv_to_rgb16(Y2, 0x80, 0x80, &rgb[y + 2]);
    }

    return 0;
}

/*yuv格式转换为rgb格式*/


    inline void  VideoDevice::yuv2rgb565_N(const int y, const int u, const int v, unsigned short &rgb565)
{
    int r, g, b;

    r = y + redAdjust[v];
    g = y + greenAdjust1[u] + greenAdjust2[v];
    b = y + blueAdjust[u];

#define CLAMP(x) if (x < 0) x = 0 ; else if (x > 255) x = 255
    CLAMP(r);
    CLAMP(g);
    CLAMP(b);
#undef CLAMP
    rgb565 = (unsigned short)(((r >> 3) << 11) | ((g >> 2) << 5) | (b >> 3));
}

    void  VideoDevice::ConvertYUYVtoRGB565_N(const void *yuyv_data, void *rgb565_data, const unsigned int w, const unsigned int h)
{

    const unsigned char *src = (unsigned char *)yuyv_data;
    unsigned short *dst = (unsigned short *)rgb565_data;

    for (unsigned int i = 0, j = 0; i < w * h * 2; i += 4, j += 2) {
        int y, u, v;
        unsigned short rgb565;

        y = src[i + 0];
        u = src[i + 1];
        v = src[i + 3];
        yuv2rgb565_N(y, u, v, rgb565);
        dst[j + 0] = rgb565;


        y = src[i + 2];
        yuv2rgb565_N(y, u, v, rgb565);
        dst[j + 1] = rgb565;

    }
}



    void VideoDevice::table_init_N()

{

    int i;

    for(i = 0; i < SIZE; i++)

    {

        Y_R[i] = (i * 1224) >> 12; //Y对应的查表数组

        Y_G[i] = (i * 2404) >> 12;

        Y_B[i] = (i * 467)  >> 12;

        U_R[i] = (i * 602)  >> 12; //U对应的查表数组

        U_G[i] = (i * 1183) >> 12;

        U_B[i] = (i * 1785) >> 12;

        V_R[i] = (i * 2519) >> 12; //V对应的查表数组

        V_G[i] = (i * 2109) >> 12;

        V_B[i] = (i * 409)  >> 12;

    }

}



   inline void VideoDevice::calc_lum_N()

{

    int i;

    for(i = 0; i < IMGSIZE; i += 2) //一次并行处理2个数据

    {

        out[i]               = Y_R[in[i].r] + Y_G[in[i].g] + Y_B[in[i].b]; //Y

        out[i + IMGSIZE]     = U_B[in[i].b] - U_R[in[i].r] - U_G[in[i].g]; //U

        out[i + 2 * IMGSIZE] = V_R[in[i].r] - V_G[in[i].g] - V_B[in[i].b]; //V



        out[i + 1]                = Y_R[in[i + 1].r] + Y_G[in[i + 1].g] + Y_B[in[i + 1].b]; //Y

        out[i  + 1 + IMGSIZE]     = U_B[in[i + 1].b] - U_R[in[i + 1].r] - U_G[in[i + 1].g]; //U

        out[i  + 1 + 2 * IMGSIZE] = V_R[in[i + 1].r] - V_G[in[i + 1].g] - V_B[in[i + 1].b]; //V

    }

}



makefile:

#LIB = -L/usr/lib/mysql -lmysqlclient -L./ -lcfg
#INCLUDE = -I/usr/include/mysql 
#LIB = -L/home/omap/omap3530/dvsdk/dvsdk_3_00_02_44/linuxlibs/lib -ljpeg
#INCLUDE = -I/home/omap/omap3530/dvsdk/dvsdk_3_00_02_44/linuxlibs/include
#LIB = -L/home/omap/lib -ljpeg
#INCLUDE = -I/home/omap/work/opencv/build/include
#

ROOTDIR = .

KERNEL_DIR =/opt/DM8148/ezsdk/board-support/linux-2.6.37-psp04.04.00.01
#This is root directory where Source is available. Change this path
#to the one where source files are kept
INSTALL_DIR = ./

#this is the path to the executable where all the executables are kept
EXE_DIR = ./bin

#This is path to the Include files of Library
#LIB_INC = $(INSTALL_DIR)/User_Library

#This is the path the include folder of the kernel directory. The include
#folder should contain all the necessary header files
INCLUDE_DIR = $(KERNEL_DIR)/include -I$(KERNEL_DIR)/arch/arm/include/ -I$(KERNEL_DIR)/arch/arm/plat-omap/include/

#This is the prefix applied to the gcc when compiling applications and library
COMPILER_PREFIX = arm-none-linux-gnueabi
CROSS_COMPILE = $(COMPILER_PREFIX)-

#This is the toolchain base folder
#TOOLCHAIN_DIR = /opt/codesourcery/2009q1-203
TOOLCHAIN_DIR = /opt/DM8148/arm-2009q1

#add cflags
CPPFLAGS += -march=armv7-a -mcpu=cortex-a8 -mfpu=neon -mfloat-abi=softfp
CPPFLAGS += -I$(TOOLCHAIN_DIR)/$(COMPILER_PREFIX)/libc/usr/include -I$(KERNEL_DIR)

#Libc path
LDFLAGS="-isystem $(TOOLCHAIN_DIR)/$(COMPILER_PREFIX)/libc/lib"

#This is the path to the Directory containing Library
LIB_DIR = $(TOOLCHAIN_DIR)/$(COMPILER_PREFIX)/libc/lib


CFLAGS = -g -Os \
	-march=armv7-a \
	-fno-strict-aliasing \
	-fno-common \
	-ffixed-r8 \
	-msoft-float \
	-I$(INCLUDE_DIR) \
	-pipe \
	-mno-thumb-interwork \
	-Wall \
	-Wstrict-prototypes \
	-fno-stack-protector \
	 -D__EXPORTED_HEADERS__

#LDFLAGS = -L/opt/codesourcery/arm-none-linux-gnueabi/libc/lib
LDFLAGS = -L/opt/DM8148/arm-2009q1/arm-none-linux-gnueabi/libc/lib

LIBS := -lpthread

#
# Platform specific options
#
CFLAGS += -DCONFIG_TI81XX




#$(CROSS_COMPILE)gcc $(CFLAGS) $(LDFLAGS) $(LIBS) -o $@ $(addsuffix .c,${@F})



#CC =/opt/DM8148/arm-2009q1/bin/arm-none-linux-gnueabi-gcc


OPENCV2_2_INCLUDE_FILE=/opt/DM8148/opencv/opencv8148/include/
OPENCV2_2_LIB_FILE=/opt/DM8148/opencv/opencv8148/lib


INCLUDE=-I$(OPENCV2_2_INCLUDE_FILE)
LIB+=-L$(OPENCV2_2_LIB_FILE) -lopencv_core -lopencv_ml -lopencv_highgui -lopencv_objdetect -lopencv_imgproc


CPPFLAG=-Wall -O2 -g  -I$(OPENCV2_2_INCLUDE_FILE)

TARGET = exe
SRC = $(wildcard *.cpp *.o)
OBJ = $(patsubst %.cpp, %.o, $(SRC))



%.o : %.c, %.h
	$(CROSS_COMPILE)gcc -c $(CPPFLAGS) $(CFLAGS) $(LDFLAGS) $(LIBS) $< -o $@
%.o : %.cpp
	$(CROSS_COMPILE)gcc -c $(CPPFLAGS) $(CFLAGS) $(LDFLAGS) $(LIBS) $< -o $@
$(TARGET) : $(OBJ)
	$(CROSS_COMPILE)gcc $(CPPFLAGS) $(CFLAGS) $(LDFLAGS) $(LIBS) $^ -o $@ $(INCLUDE) $(LIB)
.PHONY: clean
clean:
	$(RM) $(TARGET) $(OBJ)

显示头文件:

#ifndef _DISPLAYFB_H_
#define _DISPLAYFB_H_


extern "C"
{
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <fcntl.h>
#include <string.h>
#include <stdlib.h>
#include <linux/fb.h>
#include <linux/ti81xxfb.h>
#include <linux/ti81xxvin.h>
#include<linux/videodev2.h>

}


#endif



  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

听海拉拉

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值