V4L2采集UVC摄像头数据,并经过x264编码成H264视频/YUV视频/BMP图像保存,选择性通过Socket转发到服务端

1 篇文章 0 订阅
1 篇文章 0 订阅

V4L2采集UVC摄像头数据,并经过x264编码成H264视频/YUV视频/BMP图像保存,选择性通过Socket转发到服务端

参考链接

https://blog.csdn.net/qq_21193563/article/details/78692908
https://blog.csdn.net/li_wen01/article/details/56282443
https://blog.csdn.net/li_wen01/article/details/56282443

说明

  1. 基于Ubuntu 18.04系统,工程代码编写IDE为Clion,基于CMake编译。
  2. x264的各个版本的下载链接如示:http://download.videolan.org/pub/videolan/x264/snapshots/ 本工程所用x264-snapshot-20170903-2245,版本为152,适用于ubuntu18.04自带的libx264.so.152动态库。
  3. 如果用H264进行编码,需要安装ffmpeg,本工程用x264进行编码,视频压缩后是yuv原视频大小的1/10左右。
  4. 工程代码共涵盖了以下几部分:
    a. V4L2图像数据采集,需要注意采集格式有YUYV(YUV422)/YUV420/BGR24等。采集格式不同,后面转换到RGB图像的时候会有色差。
    b.采集到的原视数据YUV保存,可以用yuv播放器进行查看,注意需要选择好对应的格式和分辨率。YUV播放器在本章节末给出。
    c.yuv数据转RGB,如果单纯需要保存成BMP,则需要对YUV数据进行转换到RGB,此处最好看一下yuv和RGB的图像结构体,转换错误会有很大色差。
    d.BMP图像保存,需要BMP文件头等信息。
    e.如果在本地保存视频,需要确定好分辨率和帧数(fps),本工程用x264进行编码,注意编码的格式i_csp = x264_CSP_I422。在对yuv数据编码时,需要对数据进行分离,参考本工程代码:convertFrameToX264Img函数段。编码有错误,会出现绿屏、花屏。

代码段(仅关键代码)-V4L2数据采集

//1.打开设备
    gv4l2.fd = open(cameraID,O_RDWR);
    if (gv4l2.fd < 0)
    {
        std::cout << "open failed" << std::endl;
        return -1;
    }
    //2.获取摄像头支持的格式
    gv4l2.gfmtdesc.index = 0;
    gv4l2.gfmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret = ioctl(gv4l2.fd,VIDIOC_ENUM_FMT,&gv4l2.gfmtdesc);
    if (ret < 0)
    {
        std::cout << "获取摄像头支持的格式 Failed" << std::endl;
        return -1;
    }
    std::cout << "v4l2 camera description: " << gv4l2.gfmtdesc.description << std::endl;
    //3.设置视频格式
    gv4l2.gfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    gv4l2.gfmt.fmt.pix.width = CARAMER_WEIGHT;
    gv4l2.gfmt.fmt.pix.height = CARAMER_HIGHT;
    gv4l2.gfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//422
    //gv4l2.gfmt.fmt.pix.pixelformat= V4L2_PIX_FMT_YUV420;
    //gv4l2.gfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR32;
    ret = ioctl(gv4l2.fd,VIDIOC_S_FMT,&gv4l2.gfmt);
    if (ret < 0)
    {
        std::cout << "设置视频格式 FAILED" << std::endl;
        return -1;
    }
    std::cout << "v4l2 camera description: " << gv4l2.gfmtdesc.description << std::endl;
    //4.申请内核缓冲区队列
    gv4l2.greqbuff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    gv4l2.greqbuff.count = 4;
    gv4l2.greqbuff.memory = V4L2_MEMORY_MMAP;
    ret = ioctl(gv4l2.fd,VIDIOC_REQBUFS,&gv4l2.greqbuff);
    if (ret < 0)
    {
        std::cout << "申请内核缓冲区队列 FAILED" << std::endl;
        return -1;
    }
    //5.把内核缓冲区队列映射到用户空间
    //6.添加采集队列
    gv4l2.gmapbuff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    for (int i = 0; i < 4; i++)
    {
        gv4l2.gmapbuff.index = i;
        ret = ioctl(gv4l2.fd, VIDIOC_QUERYBUF, &gv4l2.gmapbuff);
        if (ret < 0)
        {
            printf("查询内核空间队列失败\r\n");
            return -1;
        }
        gv4l2.mptr[i] = (unsigned char *)mmap(NULL, gv4l2.gmapbuff.length, PROT_READ | PROT_WRITE, MAP_SHARED, gv4l2.fd, gv4l2.gmapbuff.m.offset);
        gv4l2.size[i] = gv4l2.gmapbuff.length;
        ret = ioctl(gv4l2.fd, VIDIOC_QBUF, &gv4l2.gmapbuff);
        if (ret < 0)
        {
            printf("内核空间队列放回失败\r\n");
            return -1;
        }
    }
    //7.开始采集
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret = ioctl(gv4l2.fd,VIDIOC_STREAMON,&type);
    if (ret < 0)
    {
        std::cout << "采集 FAILED" << std::endl;
        return -1;
    }

    // save yuv
    //std::ofstream _yuvOut;
    //_yuvOut.open("./yuvCamera.yuv",std::ios::out|std::ios::binary);

    //Init x264Encoder
    this->initX264Encoder(this->x264Encoder,"./myx264.h264");

    while (true)
    {
        //8.处理图像数据
        gv4l2.greadbuff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        // 从内核队列中取出->得到数据->放入内核队列
        ret = ioctl(gv4l2.fd,VIDIOC_DQBUF,&gv4l2.greadbuff);
        if (ret < 0)
        {
            std::cout << "图像数据 GET FAILED" << std::endl;
            break;
        }
        std::cout << "图像数据 LENGTH = " << gv4l2.greadbuff.length << std::endl;
        //YUYV -> RGB
        this->YuyvToRGB(gv4l2.mptr[gv4l2.greadbuff.index],gv4l2.rgbdata,CARAMER_WEIGHT,CARAMER_HIGHT);
        std::cout << "RGB LENGTH = " << sizeof(gv4l2.rgbdata)<< std::endl;

        //Encoder Image
        encoderImg(this->x264Encoder,(char *)gv4l2.mptr[gv4l2.greadbuff.index]);

        try
        {
            //Send RGBData To Server
            if (this->_socketConnectFlag)
            {
                ret = send(this->_socket_fd,gv4l2.rgbdata,sizeof(gv4l2.rgbdata),0);
                if (ret <0)
                {
                    std::cout<<"send to server is error"<<std::endl;
                }
            }
        }catch(std::exception e1){
            std::cout<<"CONNECT is error"<<std::endl;
        }

        /*
         * save yuv
         * */
        //_yuvOut.write((char *)gv4l2.mptr[gv4l2.greadbuff.index],gv4l2.greadbuff.length);
        //_yuvOut.seekp(0,std::ios::end);

        //save bmp
        //_rgbToBMP.rgbaToBmpFile((char *)this->_path.data(),(char *)gv4l2.rgbdata,CARAMER_WEIGHT,CARAMER_HIGHT,3);

        //放入内核队列
        ret = ioctl(gv4l2.fd,VIDIOC_QBUF,&gv4l2.greadbuff);
        if (ret < 0)
        {
            std::cout << "放入内核队列 FAILED" << std::endl;
            break;
        }

    }

    //9.停止采集
    //_yuvOut.close();
    ret = ioctl(gv4l2.fd,VIDIOC_STREAMOFF,&type);
    if (ret < 0)
    {
        std::cout << "停止采集 FAILED" << std::endl;
        return -1;
    }
    //10.释放映射
    for (int i = 0; i < 4; i++) {
        ret = munmap(gv4l2.mptr[i],gv4l2.size[i]);
    }
    //11.关闭设备
    close(gv4l2.fd);

x264编码

void v4l2::initX264Encoder(X264Encoder &x264Encoder,char *filePath)
{
    x264Encoder.m_x264Fp = fopen(filePath, "wb");
    x264Encoder.m_pX264Param = (x264_param_t *)malloc(sizeof(x264_param_t));
    assert(x264Encoder.m_pX264Param);
    x264_param_default(x264Encoder.m_pX264Param);
    x264_param_default_preset(x264Encoder.m_pX264Param, "veryfast", "zerolatency");
    x264_param_apply_profile(x264Encoder.m_pX264Param, "baseline");
    x264Encoder.m_pX264Param->i_threads = X264_THREADS_AUTO;//X264_SYNC_LOOKAHEAD_AUTO; // 取空缓冲区继续使用不死锁的保证

    // 视频选项
    x264Encoder.m_pX264Param->i_width = CARAMER_WEIGHT; // 要编码的图像宽度.
    x264Encoder.m_pX264Param->i_height = CARAMER_HIGHT; // 要编码的图像高度

    // 帧率
    x264Encoder.m_pX264Param->b_vfr_input = 0;//0时只使用fps控制帧率
    int m_frameRate = VENC_FPS;
    x264Encoder.m_pX264Param->i_fps_num = m_frameRate; // 帧率分子
    x264Encoder.m_pX264Param->i_fps_den = 1; // 帧率分母
    x264Encoder.m_pX264Param->i_timebase_den = x264Encoder.m_pX264Param->i_fps_num;
    x264Encoder.m_pX264Param->i_timebase_num = x264Encoder.m_pX264Param->i_fps_den;
    x264Encoder.m_pX264Param->b_intra_refresh = 0;
    x264Encoder.m_pX264Param->b_annexb = 1;
    //m_pX264Param->b_repeat_headers = 0;
    x264Encoder.m_pX264Param->i_keyint_max = m_frameRate;

    x264Encoder.m_pX264Param->i_csp = X264_CSP_I422;//X264_CSP_I420;//
    x264Encoder.m_pX264Param->i_log_level = X264_LOG_INFO;//X264_LOG_DEBUG;

    x264Encoder.m_x264iNal = 0;
    x264Encoder.m_pX264Nals = NULL;
    x264Encoder.m_pX264Pic_in = (x264_picture_t *)malloc(sizeof(x264_picture_t));
    if (x264Encoder.m_pX264Pic_in == NULL)
        exit(1);
    else
    memset(x264Encoder.m_pX264Pic_in, 0, sizeof(x264_picture_t));
    //x264_picture_alloc(m_pX264Pic_in, X264_CSP_I420, m_pX264Param->i_width, m_pX264Param->i_height);
    x264_picture_alloc(x264Encoder.m_pX264Pic_in, X264_CSP_I422, x264Encoder.m_pX264Param->i_width, x264Encoder.m_pX264Param->i_height);
    x264Encoder.m_pX264Pic_in->i_type = X264_TYPE_AUTO;

    x264Encoder.m_pX264Pic_out = (x264_picture_t *)malloc(sizeof(x264_picture_t));
    if (x264Encoder.m_pX264Pic_out == NULL)
        exit(1);
    else
    memset(x264Encoder.m_pX264Pic_out, 0, sizeof(x264_picture_t));
    x264_picture_init(x264Encoder.m_pX264Pic_out);
    x264Encoder.m_pX264Handle = x264_encoder_open(x264Encoder.m_pX264Param);
    assert(x264Encoder.m_pX264Handle);
}

void convertFrameToX264Img(x264_image_t *x264InImg,char *RGBData)
{
    //RGB方式
//    int srcSize = CARAMER_HIGHT*CARAMER_WEIGHT;
//    x264InImg->plane[0] = (uint8_t *)RGBData;
//    x264InImg->plane[1] = (uint8_t *)RGBData + srcSize;
//    x264InImg->plane[2] = (uint8_t *)RGBData + srcSize;
    //YUV方式
    char *y = (char *)x264InImg->plane[0];
    char *u = (char *)x264InImg->plane[1];
    char *v = (char *)x264InImg->plane[2];
    char * ptr;
    int index_y = 0;
    int index_u = 0;
    int index_v = 0;
    int num = CARAMER_WEIGHT * CARAMER_HIGHT * 2 - 4;
    for(int i=0; i<num; i=i+4)
    {
        *(y + (index_y++)) = *(RGBData + i);
        *(u + (index_u++)) = *(RGBData + i + 1);
        *(y + (index_y++)) = *(RGBData + i + 2);
        *(v + (index_v++)) = *(RGBData + i + 3);
    }
}

播放效果

在这里插入图片描述

工程下载链接,没有积分请留言

工程代码:
https://download.csdn.net/download/yo_ike/16013568
yuv图像播放器:
https://download.csdn.net/download/yo_ike/16013598

不妥的地方,请大佬们给予指点,感激不尽。

  • 2
    点赞
  • 22
    收藏
    觉得还不错? 一键收藏
  • 8
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 8
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值