kinect2.0

转载https://www.cnblogs.com/leihui/p/6013386.html

1.获取图像深度数据:

  基于Depth Basic -D2D Example修改


HRESULT CMotionRecognition::GetDepthImage(){

    if (!m_pDepthFrameReader)
    {
        return E_FAIL;
    }

    IDepthFrame    * pDepthFrame = nullptr;

    HRESULT hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);

    if (SUCCEEDED(hr)){
        IFrameDescription * pFrameDescription = nullptr;
        USHORT nDepthMinReliableDistance = 0;
        USHORT nDepthMaxDistance = 0;
        UINT16 *pBuffer = NULL;
        UINT nBufferSize = 0;

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance);
        }

        if (SUCCEEDED(hr))
        {
            // In order to see the full range of depth (including the less reliable far field depth)
            // we are setting nDepthMaxDistance to the extreme potential depth threshold
            nDepthMaxDistance = USHRT_MAX;
            // Note:  If you wish to filter by reliable depth distance, uncomment the following line.
             hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
        }

        if (SUCCEEDED(hr))
        {
            ConvertMat_depth(pBuffer, nDepthMinReliableDistance, nDepthMaxDistance);
        }

        SafeRelease(pFrameDescription);
    }

    SafeRelease(pDepthFrame);

    return hr;
}

2.获取图像颜色数据:

  基于Color Basic-D2D Example修改

HRESULT CMotionRecognition::GetColorImage(){

    if (!m_pColorFrameReader)
    {
        return E_FAIL;
    }

    IColorFrame* pColorFrame = NULL;

    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;
        IFrameDescription* pFrameDescription = NULL;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nBufferSize = 0;
        RGBQUAD *pBuffer = NULL;

        hr = pColorFrame->get_RelativeTime(&nTime);

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer));
            }
            else if (m_pColorRGBX)
            {
                pBuffer = m_pColorRGBX;
                nBufferSize = nColorWidth * nColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        if (SUCCEEDED(hr))
        {
            ConvertMat_color(pBuffer, nColorWidth, nColorHeight);
        }

        SafeRelease(pFrameDescription);

    }

    SafeRelease(pColorFrame);

    return hr;

}

3.处理图像数据函数

1/2中有一个ConvertMat_*函数,他是负责处理获取的图像颜色数据的,因为点云的转换需要深度数据和图像颜色数据,注意在这还可以创建OpenCV的Mat.

但这里只给出将获取的数据转存到pDepthBuffer(类中的一个成员)中的案例.

ConvertMat_depth()

void CMotionRecognition::ConvertMat_depth(const UINT16* _pBuffer, USHORT nMinDepth, USHORT nMaxDepth)
{
    const UINT16
        * pBuffer   = _pBuffer,
        * pBufferEnd = _pBuffer + (nDepthWidth * nDepthHeight);

    UINT16 * pDepthBufferTmp    = pDepthBuffer;

    while (pBuffer < pBufferEnd)
    {
        *pDepthBufferTmp = *pBuffer;

        ++pDepthBufferTmp;
        ++pBuffer;
    }

}
ConvertMat_color()
void CMotionRecognition::ConvertMat_color(const RGBQUAD* _pBuffer, int nWidth, int nHeight)
{
    const RGBQUAD
        * pBuffer     = _pBuffer,
        * pBufferEnd = pBuffer + (nWidth * nHeight);

    RGBQUAD * pBufferTmp = m_pColorRGBX;

    while (pBuffer < pBufferEnd)
    {
        *pBufferTmp = *pBuffer;
        ++pBufferTmp;
        ++pBuffer;
    }

}

4.合成为点云:

  基于CoordinateMappingBasics-D2D Example修改

osg::ref_ptr<osg::Node> CMotionRecognition::AssembleAsPointCloud(float _angle, int _axisX, int _axisY, int _axisZ)
{
    if (!m_pKinectSensor)
    {
        return E_FAIL;
    }
    // osg空间坐标
    osg::ref_ptr<osg::Vec3Array> point3dVec = new osg::Vec3Array();
    // osg颜色值
    osg::ref_ptr<osg::Vec4Array> colorVec = new osg::Vec4Array();

    ICoordinateMapper * m_pCoordinateMapper = nullptr;

    HRESULT hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);

    for (size_t y = 0; y != nDepthHeight; y++)
    {
        for (size_t x = 0; x != nDepthWidth; x++)
        {
            DepthSpacePoint depthSpacePoint = { static_cast<float>(x), static_cast<float>(y) };
            UINT16 currDepth = pDepthBuffer[y * nDepthWidth + x];

       // Coordinate Mapping Depth to Color Space
            ColorSpacePoint colorSpacePoint = { 0.0f, 0.0f };
            m_pCoordinateMapper->MapDepthPointToColorSpace(depthSpacePoint, currDepth, &colorSpacePoint);
            int colorX = static_cast<int>(std::floor(colorSpacePoint.X + 0.5f)),
                colorY = static_cast<int>(std::floor(colorSpacePoint.Y + 0.5f));
            if ((0 <= colorX) && (colorX < nColorWidth) && (0 <= colorY) && (colorY < nColorHeight))
            {
                RGBQUAD color = m_pColorRGBX[colorY * nColorWidth + colorX];
                colorVec->push_back(osg::Vec4f((float)color.rgbBlue / 255, (float)color.rgbGreen / 255, (float)color.rgbRed / 255, 1));
            }

       // Coordinate Mapping Depth to Camera Space
            CameraSpacePoint cameraSpacePoint = { 0.0f, 0.0f, 0.0f };
            m_pCoordinateMapper->MapDepthPointToCameraSpace(depthSpacePoint, currDepth, &cameraSpacePoint);
            if ((0 <= colorX) && (colorX < nColorWidth) && (0 <= colorY) && (colorY < nColorHeight))
            {
                point3dVec->push_back(osg::Vec3(cameraSpacePoint.X, cameraSpacePoint.Y, cameraSpacePoint.Z));
            }
        }
    }

    // 叶节点
    osg::ref_ptr<osg::Geode> geode = new osg::Geode();

    // 用来存储几何数据信息 构造图像 保存了顶点数组数据的渲染指令
    osg::ref_ptr<osg::Geometry> geom = new osg::Geometry();
  
    geom->setVertexArray(point3dVec.get());

    geom->setColorArray(colorVec.get());

    // 每一个颜色对应着一个顶点
    geom->setColorBinding(osg::Geometry::BIND_PER_VERTEX);
    // 指定数据绘制的方式
    geom->addPrimitiveSet(new osg::DrawArrays(osg::PrimitiveSet::POINTS, 0, point3dVec->size()));
    // 加载到Geode中
    geode->addDrawable(geom.get());
    
  return geode;
}







评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值