kinect 读取彩色、深度、骨骼信息

配置Kinect

1、基本设置

能够连接Kinect需先安装其官方控件
https://pan.baidu.com/s/10hA-hDb8S-WivEhCtpe45w
提取码:5ncv
有需要的自取吧~
其中大概如下:
KinectDeveloperToolkit-v1.8.0-Setup+KinectSDK-v1.8-Setup+OpenCV-2.3.1-win-superpack(必备
在这里插入图片描述
在vs2010项目中,需要设置C++目录
首先opencv配置:
vc++目录->包含目录(1)C:\opencv\opencv\build\include\opencv2(2)C:\opencv\opencv\build\include\opencv(3)C:\opencv\opencv\build\include
(2)vc++目录->库目录C:\opencv\opencv\build\x64\vc10\bin(我用64位系统)
(3)链接器->输入->附加依赖:{231为opencv版本号}
opencv_calib3d231d.lib
opencv_contrib231d.lib
opencv_core231d.lib
opencv_features2d231d.lib
opencv_flann231d.lib
opencv_gpu231d.lib
opencv_highgui231d.lib
opencv_imgproc231d.lib
opencv_legacy231d.lib
opencv_ml231d.lib
opencv_ts231d.lib
opencv_video231d.lib
4.relese配置(附加依赖)【未用到】
opencv_contrib231.lib

opencv_core231.lib

opencv_features2d231.lib

opencv_flann231.lib

opencv_gpu231.lib

opencv_highgui231.lib

opencv_imgproc231.lib

pencv_legacy231.lib

opencv_ml231.lib

opencv_objdetect231.lib

opencv_ts231.lib

opencv_video231.lib
然后Kinect配置:
(1)Include files加入C:\Program Files\Microsoft SDKs\Kinect\v1.6\inc;
(2)Library files加入C:\Program Files\Microsoft SDKs\Kinect\v1.6\lib\x86;
(3)还需要在链接器的输入中,增加附加依赖性:Kinect10.lib
头文件包含:(貌似哈哈不是很记得了|ू・ω・` ))
#include <XnCppWrapper.h>
#include “opencv/cv.h”
#include “opencv/highgui.h”
#include “NuiApi.h”

2、代码模块

好了,现在所有前置工作已经做完了现在开始写代码吧:

kinect我的想法是直接与MFC控件相连接实现简单的图像获取骨骼帧获取
所以:
mfcCamera.cpp:
1、打开彩色摄像头

void mfcCamera::OnBnClickedopencamera()
{
	// TODO: 在此添加控件通知处理程序代码
	kinectable = true;
	cv::Mat image;
	image.create(480, 640, CV_8UC3);
	//1、初始化NUI   
	HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR);
	if (FAILED(hr))
	{
		AfxMessageBox(_T("打开kinect失败"));
		return ;
	}
	AfxMessageBox(_T("打开kinect成功"));
	pEdit.SetWindowText(_T("Open this RGB_Camera"));
	//2、定义事件句柄   
	//创建读取下一帧的信号事件句柄,控制KINECT是否可以开始读取下一帧数据  
	HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);//创建一个windows事件对象,创建成功则返回事件的句柄。事件有两个状态,有信号和没有信号!上面说到了。就是拿来等待新数据的。
	HANDLE colorStreamHandle = NULL; //保存图像数据流的句柄,用以提取数据   

	//3、打开KINECT设备的彩色图信息通道,并用colorStreamHandle保存该流的句柄,以便于以后读取  
	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480,
		0, 2, nextColorFrameEvent, &colorStreamHandle);
	if (FAILED(hr))//判断是否提取正确   
	{
		AfxMessageBox(_T("读取错误"));
		NuiShutdown();
		return ;
	}
	//cv::namedWindow("colorImage", CV_WINDOW_AUTOSIZE);
	CRect rect;
	CWnd *pWnd = GetDlgItem(IDC_camera);
	pWnd->GetClientRect(&rect); //获取控件大小 ,将图像显示在静态标签框中
	int x = rect.Width();
	int y = rect.Height();
	//4、开始读取彩色图数据   
	while (flag)
	{
		const NUI_IMAGE_FRAME * pImageFrame = NULL;

		//4.1、无限等待新的数据,等到后返回  
		if (WaitForSingleObject(nextColorFrameEvent, INFINITE) == 0)
		{
			//4.2、从刚才打开数据流的流句柄中得到该帧数据,读取到的数据地址存于pImageFrame  
			hr = NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame);//从流数据获得Frame数据
			if (FAILED(hr))
			{
				AfxMessageBox(_T("不能获取彩色图像"));
				NuiShutdown();
				return;
			}

			INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;//一个容纳图像帧数据的对象,类似于Direct3D纹理,但是只有一层(不支持mip-maping)。
			NUI_LOCKED_RECT LockedRect;//彩色数据:单位数据是32位,对应BGRA深度数据:单位数据是16位。

			//4.3、提取数据帧到LockedRect,它包括两个数据对象:pitch每行字节数,pBits第一个字节地址  
			//并锁定数据,这样当我们读数据的时候,kinect就不会去修改它  提取数据帧到LockedRect并锁定数据
			pTexture->LockRect(0, &LockedRect, NULL, 0);//对于INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;获得的数据彩色数据和深度数据的规格是不一样的
			//4.4、确认获得的数据是否有效  
			if (LockedRect.Pitch != 0)
			{
				//4.5、将数据转换为OpenCV的Mat格式  
				for (int i = 0; i<image.rows; i++)
				{
					uchar *ptr = image.ptr<uchar>(i);  //第i行的指针  

					//每个字节代表一个颜色信息,直接使用uchar  
					uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
					for (int j = 0; j<image.cols; j++)
					{
						ptr[3 * j] = pBuffer[4 * j];  //内部数据是4个字节,0-1-2是BGR,第4个现在未使用   
						ptr[3 * j + 1] = pBuffer[4 * j + 1];
						ptr[3 * j + 2] = pBuffer[4 * j + 2];
					}
				}
				cv::Mat dst;
				cv::resize(image, dst, cv::Size(x, y), 0, 0, 1);
				cv::imshow("view", dst);
				cv::waitKey(30);
				//imshow("colorImage", dst); //显示图像   
			}
			else
			{
				AfxMessageBox(_T("获取数据无效"));
			}

			//5、这帧已经处理完了,所以将其解锁  
			pTexture->UnlockRect(0);
			//6、释放本帧数据,准备迎接下一帧   
			NuiImageStreamReleaseFrame(colorStreamHandle, pImageFrame);
			

		}
		
		if (cvWaitKey(20) == 27)
			break;
		
	}
	//7、关闭NUI链接   
	
	NuiShutdown();
	
}

2、打开深度摄像头

void mfcCamera::OnBnClickedopendepth()
{
	// TODO: 在此添加控件通知处理程序代码
	IplImage *DepthImg = cvCreateImage(cvSize(640, 480),IPL_DEPTH_8U,1 /*8, 1*/);
	cv::Mat image;
	HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH);
	if (FAILED(hr))
	{
		AfxMessageBox(_T("打开kinect失败"));
		return ;
	}
	AfxMessageBox(_T("打开kinect成功"));
	pEdit.SetWindowText(_T("Open this Depth_Camera"));
	CRect rect;
	CWnd *pWnd = GetDlgItem(IDC_opendepth);
	pWnd->GetClientRect(&rect); //获取控件大小
	int x = rect.Width();
	int y = rect.Height();
	HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
	HANDLE depthStreamHandle = NULL;

	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0, 2, nextColorFrameEvent, &depthStreamHandle);
	if (FAILED(hr))
	{
		NuiShutdown();
		return ;
	}


	int a=1;
	CString DepthImgName;
	while (1)
	{
		const NUI_IMAGE_FRAME *pImageFrame = NULL;
		if (WaitForSingleObject(nextColorFrameEvent, INFINITE) == 0)
		{
			hr = NuiImageStreamGetNextFrame(depthStreamHandle, 0, &pImageFrame);
			if (hr != S_OK)
			{
				NuiShutdown();
				return ;
			}

			INuiFrameTexture *pTexture = pImageFrame->pFrameTexture;
			NUI_LOCKED_RECT LockedRect;

			pTexture->LockRect(0, &LockedRect, NULL, 0);
/将保存的深度图调整为与双目一致的//
			if (LockedRect.Pitch != 0)
			{
				for (int j = 0; j < DepthImg->height; j++)
				{
					uchar *ptr = (uchar*)(DepthImg->imageData + j*DepthImg->widthStep);
					uchar *pBufferRun = (uchar*)(LockedRect.pBits + j*LockedRect.Pitch);
					USHORT *pBuffer = (USHORT*)pBufferRun;
					for (int i = 0; i < DepthImg->width; i++)
					{
						ptr[DepthImg->width-1-i] = 255 - (uchar)(256 * pBuffer[i] / 0x0fff);  //直接将数据归一化处理  
					}
					
			
				}
				cvShowImage("depthImage", DepthImg);
			}
/

			pTexture->UnlockRect(0);

			//		Sleep(2000);//延时函数,1/1000,单位为毫秒。此时延时两秒
			NuiImageStreamReleaseFrame(depthStreamHandle, pImageFrame);
		}


		if (cvWaitKey(20) == 27)
		{
			break;
		}

	/*	if (sign == 1)
		{
			break;
		}

		DepthImgName.Format("深度图像\\DepthImg%d.jpg",a);
		if (sign==2)
		{
			cvSaveImage(DepthImgName,DepthImg);
			a++;
			sign=0;
		}*/
		
	}

	NuiShutdown();
	cvDestroyAllWindows();
	cvReleaseImage(&DepthImg);

}

3、定时器:

void mfcCamera::OnTimer(UINT_PTR nIDEvent)
{
	// TODO: 在此添加消息处理程序代码和/或调用默认值
	if (1 == nIDEvent) {
		OnBnClickedopencamera(); //捕捉彩色摄像头
	}
	else if (2 == nIDEvent) {
		 OnBnClickedopendepth();//捕捉深度摄像头
	}


	CDialogEx::OnTimer(nIDEvent);
}

4、关闭摄像头按钮:

void mfcCamera::OnBnClickedContorlcamera()
{
	// TODO: 在此添加控件通知处理程序代码
	
	
	if (S_OK==NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR))//初始化KINEXCT传感器
	
		{	//	NuiCameraElevationGetAngle(&m_NowDegree);
		NuiCameraElevationSetAngle(m_Num);
	

	NuiShutdown();//关闭传感器
	}

}

5、多线程实现骨骼帧获取至MFC上

BOOL kinect_sk::InitInstance()
{
	// TODO: 在此执行任意逐线程初始化

	Mat skeletonImage;  
	skeletonImage.create(240, 320, CV_8UC3);
	CvPoint skeletonPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT]={cvPoint(0,0)};  
	bool tracked[NUI_SKELETON_COUNT]={FALSE};   

	//1、初始化NUI,注意这里是USES_SKELETON  
	HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON);   
	if (FAILED(hr))   
	{   
		cout<<"NuiInitialize failed"<<endl;   
		return TRUE;   
	}   

	//2、定义骨骼信号事件句柄   
	HANDLE skeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );  

	//3、打开骨骼跟踪事件  
	hr = NuiSkeletonTrackingEnable( skeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT );   
	if( FAILED( hr ) )    
	{   
		cout<<"Could not open color image stream video"<<endl;   
		NuiShutdown();   
		return TRUE;   
	}  
	namedWindow("skeletonImage", CV_WINDOW_AUTOSIZE);  

	//4、开始读取骨骼跟踪数据   
	while( 1)   
	{   
		NUI_SKELETON_FRAME skeletonFrame = {0};  //骨骼帧的定义   
		bool bFoundSkeleton = false;   

		//4.1、无限等待新的数据,等到后返回  
		if (WaitForSingleObject(skeletonEvent, INFINITE)==0)   
		{

			//4.2、从刚才打开数据流的流句柄中得到该帧数据,读取到的数据地址存于skeletonFrame  
			hr = NuiSkeletonGetNextFrame( 0, &skeletonFrame);   
			if (SUCCEEDED(hr))  
			{  
				//NUI_SKELETON_COUNT是检测到的骨骼数(即,跟踪到的人数)  
				for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )   
				{   
					NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;  
					//4.3、Kinect最多检测六个人,但只能跟踪两个人的骨骼,再检查每个“人”(有可能是空,不是人)  
					//是否跟踪到了   
					if( trackingState == NUI_SKELETON_TRACKED )  
					{   
						bFoundSkeleton = true;   
					} 
					
				}   
			}  

			if( !bFoundSkeleton )   
			{   
				continue;   
			}   

			//4.4、平滑骨骼帧,消除抖动  
			NuiTransformSmooth(&skeletonFrame, NULL);  
			skeletonImage.setTo(0);  

			for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )     
			{      
				// Show skeleton only if it is tracked, and the center-shoulder joint is at least inferred.   
				//断定是否是一个正确骨骼的条件:骨骼被跟踪到并且肩部中心(颈部位置)必须跟踪到。   
				XnPoint3D skelPointsIn[24],skelPointsOut[24];
				if( skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED &&     
					skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)     
				{     
					float fx, fy;     
					//拿到所有跟踪到的关节点的坐标,并转换为我们的深度空间的坐标,因为我们是在深度图像中  
					//把这些关节点标记出来的  
					//NUI_SKELETON_POSITION_COUNT为跟踪到的一个骨骼的关节点的数目,为20  
					for (int j = 0; j < /*NUI_SKELETON_POSITION_COUNT*/NUI_SKELETON_POSITION_HAND_RIGHT; j++)     
					{     
						NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData[i].SkeletonPositions[j], &fx, &fy );     
						skeletonPoint[i][j].x = (int)fx;     
						skeletonPoint[i][j].y = (int)fy;     
					}     
					
					
					for (int j=0; j</*NUI_SKELETON_POSITION_COUNT*/ NUI_SKELETON_POSITION_HAND_RIGHT; j++)     
					{     
						if (skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED)//跟踪点一用有三种状态:1没有被跟踪到,2跟踪到,3根据跟踪到的估计到     
						{                                     
							circle(skeletonImage, skeletonPoint[i][j], 3, cvScalar(0, 255, 255), 5, 8, 0);             
							tracked[i] = TRUE;   
						}   
					}   

					drawSkeleton(skeletonImage, skeletonPoint[i], i);   
				}   
			}  
			imshow("skeletonImage", skeletonImage); //显示图像   
		}
		else  
		{   
			cout<<"Buffer length of received texture is bogus\r\n"<<endl;   
		}  

		if (!Exit_thread /*cvWaitKey(20) == 27*/)   
			break;   
	}   
	//5、关闭NUI链接   
	NuiShutdown();   
	cvDestroyAllWindows();




	return FALSE;
}
void drawSkeleton(Mat &image, CvPoint pointSet[], int whichone)   
{   
	CvScalar color;   
	switch(whichone) //跟踪不同的人显示不同的颜色   
	{   
	case 0:   
		color = cvScalar(255);   
		break;   
	case 1:   
		color = cvScalar(0,255);   
		break;   
	case 2:   
		color = cvScalar(0, 0, 255);   
		break;   
	case 3:   
		color = cvScalar(255, 255, 0);   
		break;   
	case 4:   
		color = cvScalar(255, 0, 255);   
		break;   
	case 5:   
		color = cvScalar(0, 255, 255);   
		break;   
	}   

	if((pointSet[NUI_SKELETON_POSITION_HEAD].x!=0 || pointSet[NUI_SKELETON_POSITION_HEAD].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_SPINE].x!=0 || pointSet[NUI_SKELETON_POSITION_SPINE].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_SPINE].x!=0 || pointSet[NUI_SKELETON_POSITION_SPINE].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);   

	//左上肢   
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);   

	//右上肢   
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);   

	//左下肢   
	if((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y!=0) &&    
		(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);   

	//右下肢   
	if((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT],color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);   
	if((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y!=0) &&   
		(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y!=0))   
		line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);   
}

骨骼帧部分的代码可以多看看源码例程你会学到很多
这一块刚开始我想的是思路是:
1、让Kinect获取人的骨骼框架然后用手势识别控制启停
2、根据Kinect自带的空间坐标转换获取人手臂的关节角(向量法),与机器手关节角连接控制实现机器人视觉控制♪(o∀o)っ无奈换方向了这块就搁置了。
3、另外自己也自学了SLAM相关知识后面有空的话做几篇出来分享一下(虽然也用不到了〒▽〒)
好吧开篇作写得有点多,应该都是干货,有问题可以相互探讨下。

参考引用:
https://blog.csdn.net/yangtrees/article/details/16106271
https://blog.csdn.net/lingling_1/article/details/40824479
例程我就放下面了有需要自取吧:
https://download.csdn.net/download/weixin_43154360/11164993

评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值