KinectInteraction provides the following high-level features:
· Identification of up to 2 users and identification and tracking of their primary interaction hand.
· Detection services for user's hand location and state.
· Grip and grip release detection.
· Press detection.
· Information on the control targeted by the user.
一、本API提供的主要类型有:
The major types provided by the managed API are:
· HandPointer
· IInteractionClient
· InteractionFrame
· InteractionInfo
· InteractionStream
· KinectRuntimeExtensions
· UserInfo
1.HandPointer提供的功能:
(1)primary hand ,判断手是否为初始用户的手(操作软件的手)
(2)hand coordinates relative to the Physical Interaction Zone ,手在物理坐标下的位置坐标
(3)arm extension ,手臂是否伸展中(判断用户是否做Pressing motion 按下的 动作)
(4) the hand type ,手的类型(左手还是右手)
(5)the hand pointer state (is pressed, is gripping ,is Release,is Scroll)
2.InteractionFrame
An interaction frame, much like the frames provided by the other Kinect data streams (depth stream, skeleton stream, etc.), provides data about the current state of the interaction. An interaction frame contains pointers to the hand pointer and user info.
3.InteractionInfo
InteractionInfo contains information about the targeted control, including whether the control is currently a grip or a press target.
4.InteractionStream(该类管理Interaction stream)
InteractionStream is the class that manages use of the interaction stream. The interaction stream supplies interaction frames as they are generated by KinectInteraction.
The Interaction Stream provides a stream of interaction frames, similar to the stream model of the other data sources (audio stream, depth stream, skeleton stream, etc.). Interaction frames are processed to provide information on the user's interaction with the application, such as hand position, whether the hand is pressing, gripping, or releasing, and the control the user is targeting.
5.KinectRuntimeExtensions
该类包括创建和初始化一个interaction stream.还包括一个访问 raw hand pointer data 的方法 (the hand pointer data provided by the HandPointer class is cleaned up to make the hand's association with the underlying controls clearer).
6.UserInfo
该类提供的功能有:
(1)判断用户是否为初始用户 (recall that KinectInteraction can track up to two users, designating one as primary)
(2) pointers to the HandPointer object corresponding(对应到) to that user.
二、使用
1) CopyInteractionDataTo(UserInfo[]) 4
3. InteractionHandFrameReadyEventArgs 4
1)KinectRuntimeExtensions.GetRawPixelData(this Microsoft.Kinect.DepthImageFrame) 11
1.IInteractionClient
internal class KinectAdapter : IInteractionClient
{
public InteractionInfo GetInteractionInfoAtLocation(int skeletonTrackingId,
InteractionHandType handType, double x, double y)
{
}
}
2.InteractionFrame
1) CopyInteractionDataTo(UserInfo[])
UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
private void InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e)
{
InteractionFrame interactionFrame = e.OpenInteractionFrame();
interactionFrame.CopyInteractionDataTo(this.userInfos);
}
2) Dispose()
3) Timestamp
Long timestamp = interactionFrame.Timestamp;
4) UserInfoArrayLength
UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
3.InteractionHandFrameReadyEventArgs
UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
private void InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e)
{
InteractionFrame interactionFrame = e.OpenInteractionFrame();
interactionFrame.CopyInteractionDataTo(this.userInfos);
}
4.InteractionHandPointer
1) HandEvetType
HandEventType HandEventType;
InteractionHandPointer handPointer;
HandEventType =ConvertHandEventType(handPointer.HandEventType);
2) HandType
InteractionHandPointer handPointer;
HandType HandType=ConvertHandType(handPointer. HandType);
3) IsInteractive
InteractionHandPointer handPointer;
bool IsActive = handPointer.IsInteractive;
4) IsPressed
InteractionHandPointer handPointer;
bool IsAPressed = handPointer.IsPressed;
5) IsPrimaryForUser
InteractionHandPointer handPointer;
bool IsPrimaryHandOfUser= handPointer.IsPrimaryForUser;
6) IsTracked
InteractionHandPointer handPointer;
bool IsTracked= handPointer.IsTracked;
7) PressExtent
Double Z = handPointer.PressExtent;
8) RawX
9) RawY
10) RawZ
11) X
Double x=handPointer.X;
12) Y
Double y=handPointer.Y;
5.InteractionHandType
1) Left
2) Right
3 ) None
HandType ConvertHandType(InteractionHandType interactionHandType)
{
switch (interactionHandType)
{
case InteractionHandType.Left:
return HandType.Left;
case InteractionHandType.Right:
return HandType.Right;
default:
return HandType.None;
}
}
6.InteracionInfo类
1) InteractionInfo()
InteractionInfo interactionInfo = new InteractionInfo
{
IsPressTarget = false,
IsGripTarget = false,
}
2) IsGripTarget
if (!interactionInfo.IsGripTarget)
3) IsPressTarget
if (!interactionInfo.IsPressTarget)
4) PrssAttractionPointX
interactionInfo.PressAttractionPointX =0.5;(赋给一个double类型的数值,0.0到1.0)
5) PrssAttractionPointY
interactionInfo.PressAttractionPointY =0.5;(赋给一个double类型的数值,0.0到1.0)
6) PressTargetContolID
if (interactionInfo.PressTargetControlId == 0)
{
interactionInfo.PressTargetControlId = searchElement.GetHashCode();
}
7.InteractionStream
UserInfo[] userInfos;
userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
Dictionary<Tuple<int, HandType>, HandPointer> handPointers =
new Dictionary<Tuple<int, HandType>, HandPointer>();
InteractionStream interactionStream;
interactionStream = new InteractionStream(KinectSensor,kinectAdapter);
interactionStream.InteractionFrameReady += InteractionFrameReady;
(interactionStream.InteractionFrameReady -= InteractionFrameReady;)
(interactionStream.Dispose();interactionStream = null;)
private void InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e)
{
UserInfo[] localUserInfos = null;
long timestamp = 0;
using (InteractionFrame interactionFrame = e.OpenInteractionFrame())
{
if (interactionFrame != null)
{
interactionFrame.CopyInteractionDataTo(this.userInfos);
timestamp = interactionFrame.Timestamp;
localUserInfos = this.userInfos;
}
}
if (localUserInfos != null)
{
for (int userIndex = 0; userIndex < localUserInfos.Length; userIndex++)
{
UserInfo user = localUserInfos[userIndex];
foreach (InteractionHandPointer handPointer in user.HandPointers)
{
this.HandleHandPointerData(timestamp, user, userIndex,
handPointer);
if (localUserInfos != this.userInfos)
{
// Double-check that user info data being processed is
still valid.
// Client might have invalidated it by changing the
KinectSensor
// while handling a KinectRegion event.
break;
}
}
}
}
}
public class HandPointer : INotifyPropertyChanged
{
bool Updated { get; set; }
}
BeginInteractionFrame()
{
foreach (HandPointer handPointer in this.handPointers.Values)
{
handPointer.Updated = false;
}
}
private void HandleHandPointerData(long timeStamp, UserInfo userInfo, int userIndex, InteractionHandPointer handPointer)
{
var interactionData = new InteractionFrameData
{
TimeStampOfLastUpdate = timeStamp,
TrackingId = userInfo.SkeletonTrackingId,
PlayerIndex = userIndex,
HandType = EnumHelper.ConvertHandType(handPointer.HandType),
IsTracked = handPointer.IsTracked,
IsActive = handPointer.IsActive,
IsInteractive = handPointer.IsInteractive,
IsPressed = handPointer.IsPressed,
IsPrimaryHandOfUser = handPointer.IsPrimaryForUser,
IsPrimaryUser = (userInfo.SkeletonTrackingId == this.PrimaryUserTrackingId) && (userInfo.SkeletonTrackingId != KinectPrimaryUserTracker.InvalidUserTrackingId),
HandEventType = EnumHelper.ConvertHandEventType(handPointer.HandEventType),
X = handPointer.X,
Y = handPointer.Y,
Z = handPointer.PressExtent
};
this.kinectAdapter.HandleHandPointerData(interactionData);
}
1) ProcessDepth()
private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs)
{
if (KinectSensor != sender)
{
return;
}
DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame();
// Hand data to Interaction framework to be processed
interactionStream.ProcessDepth(depthFrame.GetRawPixelData(),depthFrame.Timestamp);
}
2) ProcessSkeleton()
private void SensorSkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs skeletonFrameReadyEventArgs)
{
Skeleton[] skeletons;
SkeletonFrame skeletonFrame = skeletonFrameReadyEventArgs.OpenSkeletonFrame();
var accelerometerReading = KinectSensor.AccelerometerGetCurrentReading();
// Hand data to Interaction framework to be processed
interactionStream.ProcessSkeleton(skeletons,accelerometerReading,skeletonFrame.Timestamp);
3) OpenNextFrame()
public InteractionFrame OpenNextFrame ( int millisecondsWait )
8.KinectRuntimeExtensions
1)KinectRuntimeExtensions.GetRawPixelData(this Microsoft.Kinect.DepthImageFrame)
a.
DepthImagePixel[] depthBuffer;
DepthImageFrame frame=depthImageFrame;
depthBuffer = frame.GetRawPixelData();
b.
interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp);
9.UserInfol类
1) SkeletonTrackingId
userInfol.SkeletonTrackongId
2) HandPointers
foreach (InteractionHandPointer handPointer in userInfol.HandPointers){}
10.枚举
1)InteractionHandType
| Member name | Value | Description |
Grip | 1 | A grip event has been detected. | |
GripRelease | 2 | A grip release event has been detected. | |
None | 0 | Some other hand event has been detected. |
2)InteractionHandType
| Member name | Value | Description |
Left | 1 | The hand is a left hand. | |
None | 0 | The hand cannot be classified as a right or left hand. | |
Right | 2 | The hand is a right hand. |
HandType ConvertHandType(InteractionHandType interactionHandType)
{
switch (interactionHandType)
{
case InteractionHandType.Left:
return HandType.Left;
case InteractionHandType.Right:
return HandType.Right;
default:
return HandType.None;
}
}
以下是我的一段代码,和程序运行得到的结果:
框架:
1、IInteractionClient 接口的实现:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Kinect.Toolkit.Interaction;
namespace HandGesture
{
class IInteractionClientComplement : IInteractionClient
{
public InteractionInfo GetInteractionInfoAtLocation(int skeletonTrackingId, InteractionHandType handType, double x, double y)
{
InteractionInfo interactionInfo = new InteractionInfo
{
IsPressTarget = false,
IsGripTarget = false,
PressAttractionPointX=0.5,
PressAttractionPointY=0.5,
PressTargetControlId=0
};
return interactionInfo;
}
}
}
2、初始化:
using Microsoft.Kinect.Toolkit;
using Microsoft.Kinect.Toolkit.Interaction;
//Interaction
InteractionStream interactionStream;
IInteractionClientComplement iInteractionClientComplement;
//Interaction
iInteractionClientComplement = new IInteractionClientComplement();
interactionStream = new InteractionStream(_KinectSensor, iInteractionClientComplement);
interactionStream.InteractionFrameReady += new EventHandler<InteractionFrameReadyEventArgs>(interactionStream_InteractionFrameReady);//interactionStream.OpenNextFrame(34);
3、在Kinect中数据流事件中调用:
void _KinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
{
//彩色图像
ColorImageFrame colorFrame = e.OpenColorImageFrame();
//深度图像
DepthImageFrame depthFrame = e.OpenDepthImageFrame();
//骨骼帧
SkeletonFrame skeletonFrame = e.OpenSkeletonFrame();
为空则直接返回
if (colorFrame == null || depthFrame == null || skeletonFrame == null)
{
return;
}
if (_KinectSensor != null)
{
Vector4 accelerometerReading = _KinectSensor.AccelerometerGetCurrentReading();
interactionStream.ProcessSkeleton(SkeletonFrameOpetate.skeletons, accelerometerReading, skeletonFrame.Timestamp);//Grip Release
interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp);
}
}
4、Interaction事件处理函数:
void interactionStream_InteractionFrameReady(object sender, InteractionFrameReadyEventArgs e)//Intecration事件处理函数
{
InteractionFrame interactionFrame = e.OpenInteractionFrame();
if (interactionFrame == null) { return; }
UserInfo[] userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
interactionFrame.CopyInteractionDataTo(userInfos);
if (userInfos == null)
{
return;
}
for (int userIndex = 0; userIndex < userInfos.Length; userIndex++)
{
int playerIndex = userIndex;
UserInfo userInfo = userInfos[userIndex];
int skeletonTrackingId = userInfo.SkeletonTrackingId;
foreach (InteractionHandPointer interactionHandPointer in userInfo.HandPointers)
{
if (interactionHandPointer.HandType == InteractionHandType.None && interactionHandPointer.IsPrimaryForUser == false && interactionHandPointer.IsActive == false && interactionHandPointer.IsInteractive == false && interactionHandPointer.IsTracked == false)
{
continue;
}
if (interactionHandPointer.HandType == InteractionHandType.Left)//左手
{
if (interactionHandPointer.HandEventType == InteractionHandEventType.None)
{
continue;
}
if (interactionHandPointer.HandEventType == InteractionHandEventType.Grip)
{
isLeftHandGrip = true;
}
if (interactionHandPointer.HandEventType == InteractionHandEventType.GripRelease)
{
isLeftHandGripRelease = true;
}
}
if (interactionHandPointer.HandType == InteractionHandType.Right)//右手
{
if (interactionHandPointer.HandEventType == InteractionHandEventType.None)
{
continue;
}
if (interactionHandPointer.HandEventType == InteractionHandEventType.Grip)
{
isRightHandGrip = true;
}
if (interactionHandPointer.HandEventType == InteractionHandEventType.GripRelease)
{
isRightHandGripRelease = true;
}
}
}//end--foreach()----
}//end--for()---
}//end---interactionStream_InteractionFrameReady------