#include "stdafx.h"
#include "kinect.h"
#include <iostream>
#include <opencv2\opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <time.h>
#include <opencv2/core/core.hpp>
#include <thread>
#include <mutex>
//#include "PracticalSocket.h"
#include <queue>
using namespace cv;
using namespace std;
template<class Interface> //函数模板
inline void SafeRelease(Interface *& pInterfaceToRelease)
{
if (pInterfaceToRelease != NULL)
{
pInterfaceToRelease->Release();
pInterfaceToRelease = NULL;
}
}
int main()
{
setUseOptimized(true);
int depthWidth = 512;
int depthHeight = 424;
HRESULT hResult = S_OK;
//ColorSpacePoint* pointsCloud = nullptr;
IKinectSensor* pSensor;
ICoordinateMapper* pCoordinateMapper;
hResult = GetDefaultKinectSensor(&pSensor);
if (FAILED(hResult)) { cerr << "Error:GetDefaultKinectSensor" << endl; return -1; }
IMultiSourceFrameReader* m_pMultiFrameReader;
// IBodyFrameSource* m_pBodyFrameSource;
//IBodyFrameReader* m_pBodyFrameReader;
hResult = pSensor->Open();
//pSensor->get_BodyFrameSource(&m_pBodyFrameSource);
// 获取多数据源到读取器
pSensor->OpenMultiSourceFrameReader(
FrameSourceTypes::FrameSourceTypes_Color |
FrameSourceTypes::FrameSourceTypes_Infrared |
FrameSourceTypes::FrameSourceTypes_Depth,
&m_pMultiFrameReader);
if (FAILED(hResult)) { cerr << "Error:IKinect
Kinect的RGB和Depth数据融合
最新推荐文章于 2022-11-08 18:54:44 发布
本文详细探讨了Kinect传感器的RGB和Depth数据融合过程,揭示了如何将彩色图像与深度信息有效地结合,用于3D重建、物体识别等领域。通过理解Kinect的工作原理,读者将能够更好地应用RGB-D数据在实际项目中。
摘要由CSDN通过智能技术生成