v4l2拉流_qt显示


说明:最近在做一个视频显示的项目,采用V4L2拉流,用qt去显示视频,前期对V4L2不熟,直接网上下载资
源,使用别人封装过的库,来进行开发,不用费心,拿来就可以显示视频,只是用QT的知识进行一些功能的添
补,整个工程可参考:https://download.csdn.net/download/u013142545/87778099

工程缺点:在v4l2库的基础上进行了封装,同时只支持RGB24/YUV的原始数据格式,现自己项目需要使用
mjpeg的原始数据流,痛定思痛,得抛弃原有别人封装过V4L2的库,使用原生库来开发,现将自己重新开发的
一些步骤及参考资料记录,为了方便以后自己追溯,同时也希望能给到别人参考


V4L2的介绍
PS:相关参数的配置以及查询,网上有很多写得很好,可直接搜索查询
可参考https://www.cnblogs.com/surpassal/archive/2012/12/19/zed_webcam_lab1.html

因Qt显示只支持RGB的图像格式,需要将V4L2拉流的原始mjpeg数据转成rgb格式,可使用ffmpeg开源库,
也可使用mjpeg开源库,原先自己是使用ffmpeg的来转换,但在网上找了一个博主使用mjpeg库来转换,代
码直接copy就可使用,链接:https://blog.csdn.net/u011736505/article/details/107411528
代码如下
#include <jpeglib.h> // 记得编译的时候-ljpeg
//convert mjpeg frame to RGB24
int v4l2_ops::MJPEG2RGB(uint8_t* data_frame, int bytesused)
{
    // variables:

    struct jpeg_decompress_struct cinfo;
    struct jpeg_error_mgr jerr;
    unsigned int width, height;
    // data points to the mjpeg frame received from v4l2.
    unsigned char *data = data_frame;
    size_t data_size =  bytesused;

    // all the pixels after conversion to RGB.
    unsigned char *pixels;// to store RBG 存放RGB结果
    int pixel_size = 0;//size of one pixel
    if ( data == NULL  || data_size <= 0)
    {
        printf("Empty data!\n");
        return -1;
    }
    uint8_t h1 = 0xFF;
    uint8_t h2 = 0xD8;//jpg的头部两个字节

//	if(*(data)!=h1 || *(data+1)==h2)
//	{
//		// error header
//		printf("wrong header %d\n ",cnt);
//		return -2;
//	}
    // ... In the initialization of the program:
    cinfo.err = jpeg_std_error(&jerr);
    jpeg_create_decompress(&cinfo);
    jpeg_mem_src(&cinfo, data, data_size);
     int rc = jpeg_read_header(&cinfo, true);
     if(!(1==rc))
     {
         printf("Not a jpg frame.\n");
         return -2;
     }
    jpeg_start_decompress(&cinfo);
    width = cinfo.output_width;
    height = cinfo.output_height;
    pixel_size = cinfo.output_components;	//3
    int bmp_size = width * height * pixel_size;
    pixels = (unsigned char *)malloc(bmp_size);

    // ... Every frame:

    while (cinfo.output_scanline < cinfo.output_height)
    {
        unsigned char *temp_array[] ={ pixels + (cinfo.output_scanline) * width * pixel_size };
        jpeg_read_scanlines(&cinfo, temp_array, 1);
    }

    jpeg_finish_decompress(&cinfo);
    jpeg_destroy_decompress(&cinfo);

    // Write the decompressed bitmap out to a ppm file, just to make sure
    //保存为PPM6格式(P6	Pixmap	Binary)


    //static int cnt = 0;

    //char fname[25] = { 0 };        // file name
    if (/*FMT == V4L2_PIX_FMT_MJPEG*/1)
    {
//        sprintf(fname, "output_%d.ppm", cnt++);//cnt 是用来计算的全局变量

//        char buf[50];		//for header
//        rc = sprintf(buf, "P6 %d %d 255\n", width, height);
//        FILE *fd = fopen(fname, "w");
//        fwrite(buf, rc, 1, fd);
//        fwrite(pixels, bmp_size, 1, fd);
//        fflush(fd);
//        fclose(fd);

        QImage img = QImage(pixels, cur_resolutin.width(), cur_resolutin.height(), 		 QImage::Format_RGB888);

        emit get_one_frame(QPixmap::fromImage(img));

    }

    free(pixels);// free

    return 0;
}

mjpeg库的使用及编译相关可参考:https://blog.51cto.com/u_11822586/5631965

至此,前期准备工作已完成,V4L2 设置及拉流核心代码如下:
//2.获取摄像头支持的格式ioctl(文件描述符, 命令, 与命令对应的结构体)
    struct v4l2_fmtdesc v4fmt;
    v4fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //视频捕获设备
    int i=0;
    while(1)
    {
        v4fmt.index = i++;
        int ret = ioctl(fd, VIDIOC_ENUM_FMT, &v4fmt);
        if(ret < 0)
        {
            perror("获取摄像头格式失败");
            break;
        }
        printf("index=%d\n", v4fmt.index);
        printf("flags=%d\n", v4fmt.flags);
        printf("description=%s\n", v4fmt.description);
        unsigned char *p = (unsigned char *)&v4fmt.pixelformat;
        printf("pixelformat=%c%c%c%c\n", p[0],p[1],p[2],p[3]);
        printf("reserved=%d\n", v4fmt.reserved[0]);
    }
    //查看该设备是否为视频采集设备
    struct v4l2_capability vcap;
    ioctl(fd, VIDIOC_QUERYCAP, &vcap);
    if (!(V4L2_CAP_VIDEO_CAPTURE & vcap.capabilities)) {
        perror("Error: 无USB视频采集设备!\n");
        return;
    }
    // 枚举帧格式
    struct v4l2_fmtdesc fmtdesc;
    fmtdesc.index = 0;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    printf("USB摄像头支持所有格式如下:\n");
    while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc) == 0){
        printf("v4l2_format%d:%s\n",fmtdesc.index,fmtdesc.description);
        fmtdesc.index++;
    }
    // 枚举分辨率
    struct v4l2_frmsizeenum frmsize;
    frmsize.index = 0;
    frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    printf("MJPEG格式支持所有分辨率如下:\n");
    // frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
    frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
    while(ioctl(fd,VIDIOC_ENUM_FRAMESIZES,&frmsize) == 0){
        printf("frame_size<%d*%d>\n",frmsize.discrete.width,frmsize.discrete.height);
        frmsize.index++;
    }
    // 枚举某分辨率下的帧速率
    struct v4l2_frmivalenum frmival;
    frmival.index = 0;
    frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    frmival.pixel_format = V4L2_PIX_FMT_MJPEG;
    frmival.width = 640;
    frmival.height = 480;
    while(ioctl(fd,VIDIOC_ENUM_FRAMEINTERVALS,&frmival) == 0){
        printf("frame_interval under frame_size <%d*%d> support %dfps\n",frmival.width,frmival.height,frmival.discrete.denominator / frmival.discrete.numerator);
        frmival.index++;
    }
    //3.设置采集格式
    struct v4l2_format vfmt;
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//摄像头捕获
    vfmt.fmt.pix.width = 640;//设置采集宽度,可根据自己摄像头支持的分辨率设置
    vfmt.fmt.pix.height = 480;//设置采集高度
    vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;//设置视频采集格式
    int ret = ioctl(fd, VIDIOC_S_FMT, &vfmt);
    if(ret < 0)
    {
        perror("设置格式失败");
    }

    memset(&vfmt, 0, sizeof(vfmt));
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret  = ioctl(fd, VIDIOC_G_FMT, &vfmt);
    if(ret < 0)
    {
        perror("USB摄像头获取格式失败");
    }

    if(vfmt.fmt.pix.width == 640&& vfmt.fmt.pix.height == 480&&
        vfmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
    {
        printf("设置成功\n");
    }else
    {
        printf("设置失败\n");
    }


    set_camera_cfg();

    get_fmt_info();


    //4.申请内核空间
    struct v4l2_requestbuffers reqbuffer;
    reqbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuffer.count = 4; //向内核申请4个缓冲区
    reqbuffer.memory = V4L2_MEMORY_MMAP ;//映射方式
    ret  = ioctl(fd, VIDIOC_REQBUFS, &reqbuffer);
    if(ret < 0)
    {
        perror("申请队列缓冲区失败");
    }
    //5.把内核缓冲区队列映射到用户地址空间
    unsigned char *mptr[4];//保存映射后用户空间的首地址
    unsigned int  size[4];
    struct v4l2_buffer mapbuffer;
    //初始化type, index
    mapbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    for(int i=0; i<4; i++)
    {
        mapbuffer.index = i;
        ret = ioctl(fd, VIDIOC_QUERYBUF, &mapbuffer);//从内核空间中查询一个空间做映射
        if(ret < 0)
        {
            perror("查询内核空间队列失败");
        }
        mptr[i] = (unsigned char *)mmap(NULL, mapbuffer.length, PROT_READ|PROT_WRITE,
                                         MAP_SHARED, fd, mapbuffer.m.offset);
        size[i]=mapbuffer.length;

        //入队
        ret  = ioctl(fd, VIDIOC_QBUF, &mapbuffer);
        if(ret < 0)
        {
            perror("入队失败");
        }
    }
    //6.开始采集
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret = ioctl(fd, VIDIOC_STREAMON, &type);
    if(ret < 0)
    {
        perror("采集失败");
    }


    int i_test =0;

    //7.读取帧数
    struct v4l2_buffer  readbuffer;
    readbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    while(1)
    {
        ret = ioctl(fd, VIDIOC_DQBUF, &readbuffer);
        if(ret < 0)
        {
            perror("读取帧数据失败");
        }

        //printf("readbuffer len:%d\n",readbuffer.length);

        // 保存保存采集到的帧数据,格式为jpg
//        char fileNameStr[20];
//        memset(fileNameStr,0,sizeof(fileNameStr));
//        sprintf(fileNameStr,"%d.jpg",i_test);
//        FILE *file=fopen(fileNameStr, "w+");//保存地址为当前目录。如果要保存为其他目录则可以修改代码
//        //例如,保存为/tmp目录 FILE *file=fopen("/tmp/my.jpg", "w+");
//        fwrite(mptr[readbuffer.index], readbuffer.length, 1, file);
//        fclose(file);

        //QImage img = QImage(mptr[readbuffer.index], cur_resolutin.width(), cur_resolutin.height(),2,QImage::Format_RGB888);
        //QImage img = QImage(fileNameStr);

        //MJPEGToRGB(mptr[readbuffer.index],readbuffer.length,outBuffer);
        MJPEG2RGB(mptr[readbuffer.index],readbuffer.length);
        //QImage img = QImage(outBuffer, cur_resolutin.width(), cur_resolutin.height(), QImage::Format_RGB888);

        //emit get_one_frame(QPixmap::fromImage(img));


        //saveVideoFile(mptr[readbuffer.index],readbuffer.length);


        if(myApp()->GetRecordingFlag())
        {

            if(!m_createFileFlag)
            {
                fp = new QFile(myApp()->GetRecordingFileName());
                if(!fp->open(QIODevice::ReadWrite | QIODevice::Append))
                {
                    qDebug()<<"====open "<<myApp()->GetRecordingFileName()<<"error!";
                    myApp()->SetRecordingIconVisble(false);
                    //continue;
                }else{
                    m_createFileFlag = true;
                    myApp()->SetRecordingIconVisble(true);
                    qDebug()<<"====open "<<myApp()->GetRecordingFileName()<<"successful!";

//                    QThread::msleep(2);
//                    fp->flush();
//                    fp->close();
//                    fp = nullptr;

                }
            }

            if(m_createFileFlag)
            {

                int queueLen = m_mpeg_pic_data.length();
                int removeIndex = 0;

                srand(time(NULL));


                ///缓存大于等于10帧,不继续往里写入
                if(queueLen >= 10)
                {
                    qDebug()<<"====m_mpeg_pic_data.len:"<<m_mpeg_pic_data.length()<<"nothing to do";
                }else if(queueLen > 5 && queueLen <10 ){
                    ///随机删两帧,再写入一帧

                    ///随机丢2帧
                    Mjpeg_Pic_Data MjpegData;

                    for(int i = 0;i<2;i++)
                    {
                        removeIndex = rand() % 10 - 1;
                        if(removeIndex >= m_mpeg_pic_data.length())
                            removeIndex = m_mpeg_pic_data.length() - 1;


                        mpeg_pic_dataMutex.lock();
                        MjpegData = m_mpeg_pic_data.takeAt(removeIndex);
                        mpeg_pic_dataMutex.unlock();

                        if(MjpegData.picDataBuffer != nullptr)
                        {
                            free(MjpegData.picDataBuffer);
                            MjpegData.picDataBuffer = nullptr;
                        }

                    }


                    再写入一帧
                    MjpegData.picDataLen = readbuffer.length;
                    MjpegData.picDataBuffer = (unsigned char *)malloc(readbuffer.length);

                    memcpy(MjpegData.picDataBuffer,mptr[readbuffer.index],readbuffer.length);

                    mpeg_pic_dataMutex.lock();
                    m_mpeg_pic_data.push_back(MjpegData);
                    mpeg_pic_dataMutex.unlock();


                }else{

                    //unsigned char *buf = (unsigned char *)malloc(readbuffer.length);
                    remember free
                    Mjpeg_Pic_Data tmpMjpegData;
                    tmpMjpegData.picDataLen = readbuffer.length;
                    tmpMjpegData.picDataBuffer = (unsigned char *)malloc(readbuffer.length);

                    memcpy(tmpMjpegData.picDataBuffer,mptr[readbuffer.index],readbuffer.length);

                    mpeg_pic_dataMutex.lock();
                    m_mpeg_pic_data.push_back(tmpMjpegData);
                    //qDebug()<<"**************m_mpeg_pic_data len:"<<m_mpeg_pic_data.length();
                    mpeg_pic_dataMutex.unlock();


                    //                QThread::msleep(2);
                    //                fp->flush();
                    //                fp->close();
                    //                fp = nullptr;

                }
                emit save_video_logic();
            }
        }

        //再次入队
        ret = ioctl(fd, VIDIOC_QBUF, &readbuffer);
        if(ret < 0)
        {
            perror("放回队列失败");
            goto END;
        }

        i_test++;

//        QFile removeFile(fileNameStr);
//        removeFile.remove();


    }

    /线程退出逻辑

//    if(fp != nullptr)
//    {
//        fp->flush();
//        fp->close();
//        fp = nullptr;
//    }

END:
    //8.停止采集
    ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
    //9.释放映射
    for(int i=0; i<4; i++){
        munmap(mptr[i], size[i]);
    }
    //10.关闭设备
    close(fd);
    quit();
    

V4L2代码参考:https://blog.csdn.net/weixin_45107609/article/details/126966097

PS:本人不才,都是参考前人的,站在前人的肩膀上,才能快速把项目框架更换过来,有些参考未能贴出,
在此一一谢过!

在linux(ubuntu环境下运行),能达到30fps/s,在嵌入式arm板中运行,也能达到15fps/s
在arm板中运行,截图以及录像都会占用资源,截图的时候是12fpg/s左右,录像是9fps/s,录像掉帧严重,
但不会卡,使用前面讲过别人封装过的v4l2库,录像的时候,有明显的卡顿感

现贴出截图的图片,大小为640*480

在这里插入图片描述

整个工程下载链接:点击使用积分下载

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值