Linux下的x11录屏、V4l2摄像头获取、alsa录音、及视频拼接

工作中需在Linux下完成双录功能开发,在这里记录一下用到的相关知识点做一个备份。
1、录屏基于x11的linux底层库进行开发,注意img的内存释放问题。
2、基于V4l2的linux底层库进行开发,相对而言流程比较赋值,注意可以查看摄像头对应的VID&PID设备号,同一型号的摄像头他们的ID是一致的。
3、基于alsa库进行录音设备采集,生成的PCM文件经过ffmpeg重采样就可以完成编码了。注意编码的时候最好保持采用率的一致,不然声音听起来会比较怪的。

1、x11录屏

查看博客和ffmpeg中X11grap中的录屏部分源码提取

xcb_get_image_reply_t *img;
xcb_get_image_cookie_t iq;
xcb_generic_error_t *e = NULL;
xcb_connection_t *conn;
xcb_screen_t *screen;
int m_iDesktopWidth = 1280;
int m_iDesktopHeight = 1024;
bool InitVideoInput()
{
    int screen_num = 1;

    conn = xcb_connect(NULL, &screen_num);

    if (xcb_connection_has_error(conn))
        qDebug() << "xcb_connection_has_error faild!";

    qDebug() << "xcb_connection_has_error success!" << screen_num;

    const xcb_setup_t * setup = xcb_get_setup(conn);
    xcb_screen_iterator_t iter = xcb_setup_roots_iterator (setup);

    for (int i = 0; i < screen_num; ++i)
    {
        xcb_screen_next (&iter);
    }

    screen = iter.data;

    qDebug() << "Informations of screen " << screen->root << screen->width_in_pixels << screen->height_in_pixels;
    m_iDesktopWidth = screen->width_in_pixels;
    m_iDesktopHeight = screen->height_in_pixels;

    qDebug() << "xcb success!";
}
while(1)
{
		//xgrab方法
        iq = xcb_get_image(conn, XCB_IMAGE_FORMAT_Z_PIXMAP, screen->root, 0, 0, m_iDesktopWidth, m_iDesktopHeight, ~0);
        img = xcb_get_image_reply(conn, iq, &e);
        if (e)
        {
            qDebug() << "xcb_get_image_reply faild!";
            continue;
        }
        uint8_t *data;
        if (!img)
        {
            qDebug() << "img = NULL faild!";
            continue;
        }
        data   = xcb_get_image_data(img);
        int length = xcb_get_image_data_length(img);
}

2、V4l2摄像头获取

这个还需要深究V4L2库进行分析


FILE* fpfile;
char* filename;

int CMBCamera::StartCamera(int &width, int &height, std::string strCameraVPid)
{
    int ret = 0;

    //输出YUV文件测试
    QDateTime localTime = QDateTime::currentDateTime();//获取系统时 间
    QString currentTime = localTime.toString("yyyyMMddhhmmss");//格式转换
    char* LocalTime = qstringToChar(currentTime); //QString 转char*
    filename = new char[50];
    strcpy(filename, SAVEPICTURE); //拼接字 符串
    strcat(filename, LocalTime);
    strcat(filename, PICTURETAIL);
    fpfile = fopen(filename, "wb+"); //打开文件, 以写格式
    if(fpfile == NULL)
    {
        qDebug()<< "create yuv file failure ";
        return 0;
    }
	
    qDebug() << "StartCamera Start! width = " << width << " height = " << height << " strCameraVPid = " << strCameraVPid.c_str();
    m_CurrentWidth = width;
    m_CurrentHeight = height;
    m_strCameraVPid = GetVPID(strCameraVPid);
    bStart = true;
    ret = open_device();
    if(ret == 0)
    {
        qDebug() << "StartCamera Fail -> open_device Fail!";
        return 0;
    }
    printSolution(fd);
    ret = init_device();
    if(ret == 0)
    {
        qDebug() << "StartCamera Fail -> init_device Fail!";
        return 0;
    }
    start_capturing();
    sleep(1);
    initFrameCallBackFun();//读取数据

    //适配摄像头大小
    if(m_CurrentWidth != width || m_CurrentHeight != height)
    {
        width = m_CurrentWidth;
        height = m_CurrentHeight;
    }

    m_pCacheThread = new Poco::Thread();
    m_pCacheRa = new Poco::RunnableAdapter<CMBCamera>(*this,&CMBCamera::CacheVideoFrame);//处理数据
    m_pCacheThread->start(*m_pCacheRa);

    qDebug() << "StartCamera Success! m_CurrentWidth = " << m_CurrentWidth << " m_CurrentHeight = " << m_CurrentHeight;
    return 1;
}

int CMBCamera::StopCamera()
{
      //输出YUV文件测试
    delete []filename;
    filename = NULL;
    fclose(fpfile);
    fpfile = NULL;

    bStart = false;
    StopReadThread();
    stop_capturing();
    uninit_device();
    close_device();
    this->fd = -1;
    free(setfps);
    setfps = NULL;
    //free(CaptureVideoBuffer);
    sleep(1);
    qDebug() << "StopCamera Success!";

    return 0;
}

void CMBCamera::close_device(void)
{
    g_vcV4l2Pix.clear();

    if (-1 == close(fd))
        qDebug() << "close";
    fd = -1;
    qDebug() << "close_device() ok\n";
}

void CMBCamera::uninit_device(void)
{
    unsigned int i;
    switch (io)
    {
    case IO_METHOD_READ:
        free(buffers[0].start);
        break;
    case IO_METHOD_MMAP:
        for (i = 0; i < n_buffers; ++i)
            if (-1 == munmap(buffers[i].start, buffers[i].length))
                qDebug() << "munmap";
        break;
    case IO_METHOD_USERPTR:
        for (i = 0; i < n_buffers; ++i)
            free(buffers[i].start);
        break;
    }
    free(buffers);
    buffers = NULL;
    qDebug() << "uninit_device() ok\n";
    //MBLogInfo("uninit_device() ok");
}


void CMBCamera::stop_capturing(void)
{
    enum v4l2_buf_type type;
    switch (io)
    {
    case IO_METHOD_READ:

        break;

    case IO_METHOD_MMAP:
    case IO_METHOD_USERPTR:
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type))
            qDebug() << "VIDIOC_STREAMOFF";
        break;
    }
    qDebug() << "stop_capturing() ok\n";
    //MBLogInfo("stop_capturing() ok");
}

bool CMBCamera::StopReadThread()
{

    qDebug() << "StopReadThread()\n";
    //CMediaEngineUtils::MBSleepThread(100);
    QThread::msleep(100);
    //	C_CONFIG::CConfig::SleepMS(100);

    //MBLogInfo("enter");
    if (m_ThreadRead)
    {
        //MBLogInfo("Release Thread[%p].", m_ThreadRead);
        m_ThreadRead->join();
        delete (m_ThreadRead);
        m_ThreadRead = NULL;
    }
    //MBLogInfo("leave");
    return true;
}

void CMBCamera::initFrameCallBackFun()
{
    StartReadThread();
}

bool CMBCamera::StartReadThread()
{
    qDebug() << "StartReadThread()";
    if (NULL == m_ThreadRead)
    {
        m_ThreadRead = new Poco::Thread();
        qDebug() << "Create m_Thread=" << m_ThreadRead;
    }
    m_ThreadRead->start(*this);

    //bInit = true;
    qDebug() << "bReadThreadStarted leave.";
    return true;
}


void CMBCamera::run()
{
    ThreadWrapperThreadFunc(this);
    printf("run() end!!\n");
}

void* CMBCamera::ThreadWrapperThreadFunc(void* me)
{
    CMBCamera* reporter = static_cast<CMBCamera*>(me);
    if (NULL != reporter)
        reporter->FrameThreadFunc();
    else
        qDebug() << "me = " << me ;
    return NULL;
}

void CMBCamera::FrameThreadFunc()
{
    void* buf = NULL;

    lock_dev.lock();
    for (;;)
    {
        // printf("FrameThreadFunc() bStart:%d\n", bStart);
        fd_set fds;
        struct timeval tv;
        int r;
        if (fd <= 0 || !bStart)
        {
            break;
            //          usleep(100);
            //          continue;
        }
        FD_ZERO(&fds);
        FD_SET(fd, &fds);

        tv.tv_sec = 1;
        tv.tv_usec = 0;
        if (fd <= 0 || !bStart)
        {
            break;
        }
        r = select(fd + 1, &fds, NULL, NULL, &tv);
        if (-1 == r)
        {
            if (EINTR == errno)
                continue;
            // errno_exit("=======================================select");
        }
        if (0 == r)
        {
            fprintf(stderr, "======================================select timeout\n");
            exit(EXIT_FAILURE);
            continue;
        }

        if (fd <= 0 || !bStart)
        {
            break;
        }
        read_frame();
    }
    qDebug() << "FrameThreadFunc() return";
    //MBLogInfo("FrameThreadFunc() return");
    // StopReadThread();
    lock_dev.unlock();
    return;
}


int CMBCamera::read_frame(void)
{
    //printf("read_frame()\n");
    struct v4l2_buffer buf;
    unsigned int i;

    switch (io)
    {
    case IO_METHOD_READ:
        if (-1 == read(fd, buffers[0].start, buffers[0].length))
        {
            switch (errno)
            {
            case EAGAIN:
                return 0;
            case EIO:

            default:
                qDebug() << "read";
                return 0;
            }
        }
        if (fd <= 0 || !bStart)
            break;
        printf("--------222-----\r\n");
        //videoFrameCB(buffers[0].start, buffers[0].length, NULL, NULL);
        m_cacheBufMutex.lock();
        m_cacheBuf.assign((const unsigned char*)buffers[0].start, buffers[0].length);
        m_cacheBufMutex.unlock();
        m_cacheEvent.set();
        // process_image(buffers[0].start, buffers[0].length);
        break;
    case IO_METHOD_MMAP:
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf))//从队列中取出帧
        {
            switch (errno)
            {
            case EAGAIN:
                return 0;
            case EIO:

            default:
                char buf_[50] = { 0x0 };
                sprintf(buf_, "VIDIOC_DQBUF fd:%d,isStart:%d", fd, bStart);
                //errno_exit(buf_);
                if (errno == 22)
                    return 0;
                // return 0;
            }
        }
        if (fd <= 0 || !bStart)
            break;
        assert(buf.index < n_buffers);
        //      printf("---------3333----\r\n");
        //videoFrameCB(buffers[buf.index].start, buffers[buf.index].length, NULL,NULL);
        m_cacheBufMutex.lock();
        m_cacheBuf.assign((const unsigned char *)(buffers[buf.index].start), buffers[buf.index].length);
        m_cacheBufMutex.unlock();
        m_cacheEvent.set();

        // process_image(buffers[buf.index].start, buffers[buf.index].length);
        if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) //把帧放入队列
        {
            qDebug() << "VIDIOC_QBUF";
            return 0;
        }


        break;
    case IO_METHOD_USERPTR:
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_USERPTR;
        if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf))
        {
            switch (errno)
            {
            case EAGAIN:
                return 0;
            case EIO:

            default:
                qDebug() << "VIDIOC_DQBUF";
                return 0;
            }
        }
        for (i = 0; i < n_buffers; ++i)
            if (buf.m.userptr == (unsigned long)buffers[i].start &&
                buf.length == buffers[i].length)
                break;
        if (fd <= 0 || !bStart)
            break;
        assert(i < n_buffers);
        printf("----------hhjjj=======\r\n");
        //videoFrameCB((void*)buf.m.userptr, 0, NULL, NULL);
        // process_image((void*)buf.m.userptr, 0);
        if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
        {
            qDebug() << "VIDIOC_QBUF";
            return 0;
        }
        break;
    }
    return 1;
}
int CMBCamera::open_device(void)
{
    qDebug() << "CMBCamera::open_device Start!";
    bool bFind = false;
    int nCameraId = 0;
    for (int i = 0; i < 10; i++)
    {
        if (isVaildCamera(i, m_strCameraVPid) == 0)
        {
            bFind = true;
            nCameraId = i;
            break;
        }
    }
    if(!bFind)
    {
        qDebug() << "CMBCamera::open_device isVaildCamera Fail!";
        return 0;
    }

   // nCameraId = SmartSeeMediaConfigManager::GetInstance()->mVideoCameraId;
    char szDevName[256] = {0};
    sprintf(szDevName, "/dev/video%d", nCameraId);

    qDebug() << "CMBCamera::open_device" << szDevName;

    struct stat st;
    dev_name = szDevName;//"/dev/video0";
    if (-1 == stat(szDevName, &st))
    {
        fprintf(stderr, "Cannot identify %s: %d, %s\n", szDevName, errno,strerror(errno));
        qDebug() << "open_device() stat Fail!";
        return 0;
        //exit(EXIT_FAILURE);
    }
    if (!S_ISCHR(st.st_mode))
    {
        fprintf(stderr, "%s is no device\n", szDevName);
        qDebug() << "open_device() S_ISCHR Fail!";
        return 0;
        //exit(EXIT_FAILURE);
    }
    fd = open(szDevName, O_RDWR | O_NONBLOCK, 0);
    if (-1 == fd)
    {
        fprintf(stderr, "Cannot open \E2\80?%s\E2\80?: %d, %s\n", szDevName, errno,
            strerror(errno));
        qDebug() << "open_device() open Fail!";
        return 0;
        //exit(EXIT_FAILURE);
    }
    qDebug() << "open_device() ok";
    return 1;
}

void CMBCamera::printSolution(int fd)
{  //获取摄像头所支持的分辨率
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    struct v4l2_fmtdesc fmt_1;
    struct v4l2_frmsizeenum frmsize;
    structFromatRecord formatRecord;

   // struct v4l2_pix_format pixFormat;

   // ClearVideoInParamsList();
    qDebug() << "printSolution Camera Support param";
    fmt_1.index = 0;
    fmt_1.type = type;
    while (ioctl(fd, VIDIOC_ENUM_FMT, &fmt_1) >= 0)
    {
        frmsize.pixel_format = fmt_1.pixelformat;
        frmsize.index = 0;
        while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) >= 0)
        {
            //获取支持的宽高格式
            //V4L2_PIX_FMT_YUYV 1448695129
            //V4L2_PIX_FMT_YYUV 1448434009
            //V4L2_PIX_FMT_MJPEG    1196444237
            formatRecord.width = frmsize.discrete.width;
            formatRecord.height = frmsize.discrete.height;
            formatRecord.pixfromat = fmt_1.pixelformat;//fmt_1.pixelformat;
            //formatRecord.fps = getfps.parm.capture.timeperframe.denominator;
            g_vcV4l2Pix.push_back(formatRecord);

            if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE)
            {
                qDebug() << "1.V4L2_FRMSIZE_TYPE_DISCRETE line:" << __LINE__ << " : " << formatRecord.pixfromat << "-  " << frmsize.discrete.width << "X" << frmsize.discrete.height;
            }
            else if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE)
            {
                qDebug() << "2.V4L2_FRMSIZE_TYPE_STEPWISE line:" << __LINE__ << " : " << formatRecord.pixfromat << "-  " << frmsize.discrete.width << "X" << frmsize.discrete.height;
            }

            frmsize.index++;
        }

        fmt_1.index++;
    }
}

int CMBCamera::xioctl(int fd, int request, void* arg)
{
    int r;
    do
        r = ioctl(fd, request, arg);
    while (-1 == r && EINTR == errno);
    return r;
}

int CMBCamera::init_device()
{
    struct v4l2_capability cap;
    struct v4l2_cropcap cropcap;
    struct v4l2_crop crop;
    struct v4l2_format fmt;
    unsigned int min;
    int i_pixret = -1;
    if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap))
    {
        if (EINVAL == errno)
        {
            fprintf(stderr, "%s is no V4L2 device\n", dev_name);
             //  exit(EXIT_FAILURE);
            qDebug() << "no V4L2 device!";
            return 0;
        }
        else
        {
            qDebug() << "VIDIOC_QUERYCAP Success";
        }
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        fprintf(stderr, "%s is no video capture device\n", dev_name);
        qDebug() << "no video capture device";
        //exit(EXIT_FAILURE);
        return 0;
    }

    switch (io)
    {
    case IO_METHOD_READ:
        if (!(cap.capabilities & V4L2_CAP_READWRITE))
        {
            fprintf(stderr, "%s does not support read i/o\n", dev_name);
            qDebug() << "not support read i/o";
            return 0;
        }
        break;
    case IO_METHOD_MMAP:
    case IO_METHOD_USERPTR:
        if (!(cap.capabilities & V4L2_CAP_STREAMING))
        {
            fprintf(stderr, "%s does not support streaming i/o\n", dev_name);
            qDebug() << "not support streaming i/o";
            return 0;
        }
        break;
    }

    CLEAR(cropcap);
    cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap))
    {
        crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        crop.c = cropcap.defrect;
        if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop))
        {
            switch (errno)
            {
            case EINVAL:

                break;
            default:

                break;
            }
        }
    }
    else
    {
    }
    CLEAR(fmt);
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//    int w = WWIDTH;
//    int h = HHEIGHT;
//    getSolution(params, w, h);
//    setCurrentVideo(m_bDisplayBG, w, h, m_bRestartEncode);

//    fmt.fmt.pix.width = w;
//    fmt.fmt.pix.height = h;
//    //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
//    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
   // m_iCameraFormat = fmt.fmt.pix.pixelformat;



    //设置宽高采集样式
//    fmt.fmt.pix.width = g_vcV4l2Pix[0].width;
//    fmt.fmt.pix.height = g_vcV4l2Pix[0].height;
    //返回合适的宽高
    i_pixret = GetCurrentCamera();
    if(i_pixret == 1)
    {
        fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
    }
    else if(i_pixret == 0)
    {
        fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    }
    else {
        qDebug() << "GetCurrentCamera error!";
        return 0;
    }
    fmt.fmt.pix.width = m_CurrentWidth;
    fmt.fmt.pix.height = m_CurrentHeight;
    //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    //fmt.fmt.pix.pixelformat = g_vcV4l2Pix[0].pixfromat;
    //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
    m_iCameraFormat = fmt.fmt.pix.pixelformat;
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
    if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt))
        qDebug() << "VIDIOC_S_FMT";

    //设置帧率
    setfps = (struct v4l2_streamparm*)calloc(1, sizeof(struct v4l2_streamparm));
    if(set_camera_streamparm(fd, setfps,  30) != 0)
    {
        qDebug() << "set_camera_streamparm  30FPS fail!";
    }

    min = fmt.fmt.pix.width * 2;
    if (fmt.fmt.pix.bytesperline < min)
        fmt.fmt.pix.bytesperline = min;
    min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
    if (fmt.fmt.pix.sizeimage < min)
        fmt.fmt.pix.sizeimage = min;

    struct v4l2_streamparm  getfps;
    getfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    int ret = ioctl(fd, VIDIOC_G_PARM, &getfps);
    if(ret)
    {
        qDebug() << "VIDIOC_G_PARM to get fps failed : " << ret;
    }
    else
    {
        qDebug() << "VIDIOC_G_PARM to get fps succeed : FPS = " << getfps.parm.capture.timeperframe.denominator;
    }

    switch (io)
    {
    case IO_METHOD_READ:
        init_read(fmt.fmt.pix.sizeimage);
        break;
    case IO_METHOD_MMAP:
        init_mmap();
        break;
    case IO_METHOD_USERPTR:
        init_userp(fmt.fmt.pix.sizeimage);
        break;
    }

    qDebug() << "init_device() ok\n";
    return 1;
}
//根据传入的摄像头宽高 找到匹配的摄像头支持的宽高
int CMBCamera::GetCurrentCamera()
{
    int iminMJPEG = (std::abs((int)g_vcV4l2Pix[0].width - m_CurrentWidth) + std::abs((int)g_vcV4l2Pix[0].height - m_CurrentHeight));
    int iminYUY2 = (std::abs((int)g_vcV4l2Pix[0].width - m_CurrentWidth) + std::abs((int)g_vcV4l2Pix[0].height - m_CurrentHeight));
    int temp;
    int iIndexMJPEG = 0;
    int iIndexYUY2 = 0;
    bool bIsMJPEG = false;
    bool bIsYUY2 = false;
    for(int i = 0; i < g_vcV4l2Pix.size(); i++)
    {
        if(g_vcV4l2Pix[i].pixfromat == V4L2_PIX_FMT_MJPEG)
        {
            bIsMJPEG = true;
            temp = (std::abs((int)g_vcV4l2Pix[i].width - m_CurrentWidth) + std::abs((int)g_vcV4l2Pix[i].height - m_CurrentHeight));
            if(iminMJPEG > temp)
            {
                iminMJPEG = temp;
                iIndexMJPEG = i;
            }
        }
        else if(g_vcV4l2Pix[i].pixfromat == V4L2_PIX_FMT_YUYV)
        {
            bIsYUY2 = true;
            temp = (std::abs((int)g_vcV4l2Pix[i].width - m_CurrentWidth) + std::abs((int)g_vcV4l2Pix[i].height - m_CurrentHeight));
            if(iminYUY2 > temp)
            {
                iminYUY2 = temp;
                iIndexYUY2 = i;
            }
        }
    }
    if(bIsMJPEG)
    {
        m_CurrentWidth = g_vcV4l2Pix[iIndexMJPEG].width;
        m_CurrentHeight = g_vcV4l2Pix[iIndexMJPEG].height;
        return 1;
    }
    else if (bIsYUY2)
    {
        m_CurrentWidth = g_vcV4l2Pix[iIndexYUY2].width;
        m_CurrentHeight = g_vcV4l2Pix[iIndexYUY2].height;
        return 0;
    }
    return -1;
}

void CMBCamera::start_capturing(void)
{
    unsigned int i;
    enum v4l2_buf_type type;
    switch (io)
    {
    case IO_METHOD_READ:

        break;
    case IO_METHOD_MMAP:
        for (i = 0; i < n_buffers; ++i)
        {
            struct v4l2_buffer buf;
            CLEAR(buf);
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = i;
            if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
                qDebug() << "VIDIOC_QBUF";
        }
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
            qDebug() << "VIDIOC_STREAMON";
        break;
    case IO_METHOD_USERPTR:
        for (i = 0; i < n_buffers; ++i)
        {
            struct v4l2_buffer buf;
            CLEAR(buf);
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_USERPTR;
            buf.m.userptr = (unsigned long)buffers[i].start;
            buf.length = buffers[i].length;
            if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
                qDebug() << "VIDIOC_QBUF";
        }

        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
            qDebug() << "VIDIOC_STREAMON";
        break;
    }

    qDebug() << "start_capturing() ok\n";
}

void CMBCamera::init_read(unsigned int buffer_size)
{
    buffers = (buffer*)calloc(1, sizeof(*buffers));
    if (!buffers)
    {
        fprintf(stderr, "Out of memory\n");
        qDebug() << "Out of memory Fail!";
         exit(EXIT_FAILURE);
    }
    buffers[0].length = buffer_size;
    buffers[0].start = malloc(buffer_size);
    if (!buffers[0].start)
    {
        fprintf(stderr, "Out of memory\n");
        qDebug() << "Out of memory2 Fail!";
         exit(EXIT_FAILURE);
    }
}

void CMBCamera::init_mmap(void)
{
    struct v4l2_requestbuffers req;
    CLEAR(req);
    req.count = 4;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            fprintf(stderr,
                "%s does not support "
                "memory mapping\n",
                dev_name);
            qDebug() << "init_mmap xioctl Fail!";
               exit(EXIT_FAILURE);
        }
        else
        {
            qDebug() << "VIDIOC_REQBUFS";
        }
    }
    if (req.count < 2)
    {
        fprintf(stderr, "Insufficient buffer memory on %s\n", dev_name);
        // exit(EXIT_FAILURE);
    }
    buffers = (buffer*)calloc(req.count, sizeof(*buffers));
    if (!buffers)
    {
        fprintf(stderr, "Out of memory\n");
        qDebug() << "init_mmap Out of memory3 Fail!";
         exit(EXIT_FAILURE);
    }
    for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
    {
        struct v4l2_buffer buf;
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;
        if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf))
            qDebug() << "VIDIOC_QUERYBUF";
        buffers[n_buffers].length = buf.length;
        buffers[n_buffers].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE,
            MAP_SHARED, fd, buf.m.offset);
        if (MAP_FAILED == buffers[n_buffers].start)
           qDebug() << "mmap";
    }

}

void CMBCamera::init_userp(unsigned int buffer_size)
{
    struct v4l2_requestbuffers req;
    CLEAR(req);
    req.count = 4;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_USERPTR;
    if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            fprintf(stderr,
                "%s does not support "
                "user pointer i/o\n",
                dev_name);
            qDebug() << "xioctl does not support user pointer i/o";
               exit(EXIT_FAILURE);
        }
        else
        {
            qDebug() << "VIDIOC_REQBUFS";
        }
    }
    buffers = (buffer*)calloc(4, sizeof(*buffers));
    if (!buffers)
    {
        fprintf(stderr, "Out of memory\n");
        qDebug() << "Out of memory";
        exit(EXIT_FAILURE);
    }
    for (n_buffers = 0; n_buffers < 4; ++n_buffers)
    {
        buffers[n_buffers].length = buffer_size;
        buffers[n_buffers].start = malloc(buffer_size);
        if (!buffers[n_buffers].start)
        {
            fprintf(stderr, "Out of memory\n");
            qDebug() << "Out of memory2";
            exit(EXIT_FAILURE);
        }
    }
}

3、alsa 录音


bool IniitAudioInfo()
{
    int err = -1;
   snd_pcm_hw_params_t *hwParam = NULL;
   snd_pcm_uframes_t frames= 32;

   snd_output_t* log;

   err = snd_output_stdio_attach(&log, stdout, 0);

   const char *name = "plughw:2,0";
    //if ((err = snd_pcm_open(&captureHandle, name, SND_PCM_STREAM_CAPTURE, 0)) < 0)
    if ((err = snd_pcm_open(&captureHandle, "default", SND_PCM_STREAM_CAPTURE, 0)) < 0)
    {
        qDebug() << "snd_pcm_open faild!" << snd_strerror(err);
        return false;
    }

    if ((err = snd_pcm_hw_params_malloc(&hwParam)) < 0)
    {
        qDebug() << "snd_pcm_hw_params_malloc faild!" << snd_strerror(err);
        return false;
    }


    if ((err = snd_pcm_hw_params_any(captureHandle, hwParam)) < 0)
    {
        qDebug() << "snd_pcm_hw_params_any faild!" << snd_strerror(err);
        return false;
    }

    if ((err = snd_pcm_hw_params_set_access(captureHandle, hwParam, SND_PCM_ACCESS_RW_INTERLEAVED)) < 0)
    {
        qDebug() << "snd_pcm_hw_params_set_access faild!" << snd_strerror(err);
        return false;
    }

    //16位
    if ((err = snd_pcm_hw_params_set_format(captureHandle, hwParam, SND_PCM_FORMAT_S16_LE)) < 0)
    {
        qDebug() << "snd_pcm_hw_params_set_format faild!" << snd_strerror(err);
        return false;
    }


    //shuang通道
    if ((err = snd_pcm_hw_params_set_channels(captureHandle, hwParam, 2)) < 0)
    {
        qDebug() << "snd_pcm_hw_params_set_channels faild!" << snd_strerror(err);
        return false;
    }

    int dir = 0;;
    unsigned int val = 48000;
    //设置采样率,如果采样率不支持,会用硬件支持最接近的采样率
    snd_pcm_hw_params_set_rate_near(captureHandle, hwParam,&val, &dir);

    unsigned int buffer_time,period_time;
    //获取最大的缓冲时间,buffer_time单位为us,500000us=0.5s
    snd_pcm_hw_params_get_buffer_time_max(hwParam, &buffer_time, 0);
    //printf("max_buffer_time:%d\n",buffer_time);
    if ( buffer_time >500000)
        buffer_time = 500000;

    //设置缓冲时间
    snd_pcm_hw_params_set_buffer_time_near(captureHandle, hwParam, &buffer_time, 0);

    //设置采样周期时间,计算方法38帧/秒,48000/38=1263点/帧
    period_time = 26315;
    snd_pcm_hw_params_set_period_time_near(captureHandle, hwParam, &period_time, 0);

    //让这些参数设置到PCM设备
    snd_pcm_hw_params(captureHandle, hwParam);

    //这个frames并不是指帧率,而是1263采样点数/帧
    snd_pcm_hw_params_get_period_size(hwParam,&frames, &dir);

   // LenTotal = frames * 4 * 2;//两倍申请
   int  LenTotal = 12000;//两倍申请
   if(m_Audiobuffer  == NULL)
   {
       m_Audiobuffer =  (unsigned char *) malloc(LenTotal);
   }
    if(m_Audiobuffer == NULL)
    {
        qDebug() << "snd_pcm_hw_params_set_channels faild!" << snd_strerror(err);
        return false;
    }
   qDebug() << "snd_pcm_open success!";
//    int size = frames * 4;

//    FILE *fp = NULL;
//    int file_len = 0;

//    fp = fopen("record_dump.raw", "w+");
//    if(fp == NULL) {
//        qDebug() << "open file fail!\n";
//        exit(1);
//    }
//    int rc;
//    int i = 1000;
//    frames = 1263;
//    while (i--) {
//        memset(m_Audiobuffer, 0, size);
//        rc = snd_pcm_readi(captureHandle, m_Audiobuffer, frames);
//       // printf("snd_pcm_readi,frames:%d, m_Audiobuffer:%s\n",frames, m_Audiobuffer);
//        if (rc == -EPIPE)
//        {
//            /* EPIPE means overrun */
//            qDebug() << "overrun occurred\n";
//            snd_pcm_prepare(captureHandle);
//            continue ;
//        } else if (rc < 0) {
//            qDebug() << "error from read: " << snd_strerror(rc);
//            continue ;
//        } else if (rc != (int)frames) {
//            qDebug() << "short read, read %d frames\n";
//            continue ;
//        }

//        if (fp) {
//            //file_len = fwrite(m_Audiobuffer, 1, frames, fp);
//            file_len = fwrite(m_Audiobuffer, 1, size, fp);
//           qDebug() << "fwrite:"<< file_len << " bytes \n" ;  //5024字节

//        } else {
//          qDebug() << "%s[%d] file open fail\n" << __func__<<  __LINE__;
//        }
//    }
//    fclose(fp);
//    //close(fd_f);
//    snd_pcm_drain(captureHandle);
//    snd_pcm_close(captureHandle);
//    free(m_Audiobuffer);
   return true;
}

这里注意参数的设置,并且重采样的时候采样率要保持一致 否则可能声音会不清楚

 //获取数据 还需要重采样
        snd_pcm_uframes_t frames= 1024;
        sizeReadi = snd_pcm_readi(captureHandle, m_Audiobuffer, frames);
        if(sizeReadi < 0)
        {
            continue;
        }
        memcpy(m_pinputFramebuf,m_Audiobuffer,m_inputFrameBuffsize);


        m_AudioTimePts = av_rescale_q(av_gettime()-m_first_aud_time, time_base_q, m_pAudio_st->time_base);
        m_inputFrame->pkt_pts = m_inputFrame->pts = m_AudioTimePts;

        //重采样
        AVFrame *pOutFrame = NULL;
        ret = AudioConvert(m_inputFrame, AV_SAMPLE_FMT_FLTP, 2, 48000, &pOutFrame);
        if(ret != 0)
        {
            qDebug() << "AudioConvert Fail!";
            continue;
        }

/
int32_t CManager::AudioConvert(
    const AVFrame* pInFrame,      // 输入音频帧
    AVSampleFormat eOutSmplFmt,   // 输出音频格式
    int32_t        nOutChannels,  // 输出音频通道数
    int32_t        nOutSmplRate,  // 输出音频采样率
    AVFrame**      ppOutFrame)    // 输出视频帧
{
    //struct SwrContext* pSwrCtx = nullptr;
    AVFrame*           pOutFrame = nullptr;


    // 创建格式转换器,
    int64_t nInChnlLayout = av_get_default_channel_layout(pInFrame->channels);
    int64_t nOutChnlLayout = (nOutChannels == 1) ? AV_CH_LAYOUT_MONO : AV_CH_LAYOUT_STEREO;

    if(m_pSwrCtx == NULL)
    {
        m_pSwrCtx = swr_alloc();
        if (m_pSwrCtx == nullptr)
        {
            qDebug() << "swr_alloc Fail";
            return -1;
        }
        swr_alloc_set_opts(m_pSwrCtx,
            nOutChnlLayout, eOutSmplFmt, nOutSmplRate, nInChnlLayout,
            (enum AVSampleFormat)(pInFrame->format), pInFrame->sample_rate,
            0, nullptr);

        swr_init(m_pSwrCtx);
    }
    // 计算重采样转换后的样本数量,从而分配缓冲区大小
    int64_t nCvtBufSamples = av_rescale_rnd(pInFrame->nb_samples, nOutSmplRate, pInFrame->sample_rate, AV_ROUND_UP);

    // 创建输出音频帧
    pOutFrame = av_frame_alloc();
    pOutFrame->format = eOutSmplFmt;
    pOutFrame->nb_samples = (int)nCvtBufSamples;
    pOutFrame->channel_layout = (uint64_t)nOutChnlLayout;
    int res = av_frame_get_buffer(pOutFrame, 0); // 分配缓冲区
    if (res < 0)
    {
        //swr_free(&m_pSwrCtx);
        qDebug() << "av_frame_get_buffer Fail";
        av_frame_free(&pOutFrame);
        return -2;
    }

    // 进行重采样转换处理,返回转换后的样本数量
    int nCvtedSamples = swr_convert(m_pSwrCtx,
        const_cast<uint8_t**>(pOutFrame->data),
        (int)nCvtBufSamples,
        const_cast<const uint8_t**>(pInFrame->data),
        pInFrame->nb_samples);
    if (nCvtedSamples <= 0)
    {
        //swr_free(&m_pSwrCtx);
        qDebug() << "nCvtedSamples <= 0";
        av_frame_free(&pOutFrame);
        return -3;
    }
    pOutFrame->nb_samples = nCvtedSamples;
    pOutFrame->pts = pInFrame->pts;      // pts等时间戳沿用
    pOutFrame->pkt_pts = pInFrame->pkt_pts;

    (*ppOutFrame) = pOutFrame;
    //swr_free(&m_pSwrCtx); // 释放转换器
    return 0;
}

4、录音和摄像头视频拼接

int nYIndex = 0;
int nUVIndex = 0;
for (int i = 0; i < m_nCameraHeigth; i++)
{
   memcpy(m_pFrameYUV->data[0] + (i)*m_nWidth + (m_nWidth - m_nCameraWidth), m_pVideoYuv420Buffer+ nYIndex*m_nCameraWidth, m_nCameraWidth);
   nYIndex++;
}
for (int i = 0; i < iHalfCameraHeight; i++)
{
   memcpy(m_pFrameYUV->data[1] + (i)*(i_HalfWidht) + (i_HalfWidht - iHalfCameraWidht), m_pVideoYuv420Buffer + iUPos + nUVIndex*iHalfCameraWidht, iHalfCameraWidht);
   memcpy(m_pFrameYUV->data[2] + (i)*(i_HalfWidht) + (i_HalfWidht - iHalfCameraWidht),  m_pVideoYuv420Buffer + iVPos + nUVIndex*iHalfCameraWidht, iHalfCameraWidht);
   nUVIndex++;
}
  • 0
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值