Browse Source

initial commit

post-projects
Cailean Finn 1 year ago
commit
b58369a6d1
  1. 58
      .gitattributes
  2. 76
      .gitignore
  3. 6
      .vsconfig
  4. 1625
      Assets/Act-4 Azure.unity
  5. 7
      Assets/Act-4 Azure.unity.meta
  6. 2060
      Assets/Act-4 Barracuda.unity
  7. 7
      Assets/Act-4 Barracuda.unity.meta
  8. 8
      Assets/Azure.meta
  9. 8
      Assets/Azure/KinectScripts.meta
  10. 1725
      Assets/Azure/KinectScripts/AvatarController.cs
  11. 8
      Assets/Azure/KinectScripts/AvatarController.cs.meta
  12. 85
      Assets/Azure/KinectScripts/AvatarControllerClassic.cs
  13. 8
      Assets/Azure/KinectScripts/AvatarControllerClassic.cs.meta
  14. 771
      Assets/Azure/KinectScripts/AvatarScaler.cs
  15. 11
      Assets/Azure/KinectScripts/AvatarScaler.cs.meta
  16. 280
      Assets/Azure/KinectScripts/BackgroundColorCamDepthImage.cs
  17. 12
      Assets/Azure/KinectScripts/BackgroundColorCamDepthImage.cs.meta
  18. 223
      Assets/Azure/KinectScripts/BackgroundColorCamInfraredImage.cs
  19. 12
      Assets/Azure/KinectScripts/BackgroundColorCamInfraredImage.cs.meta
  20. 322
      Assets/Azure/KinectScripts/BackgroundColorCamUserImage.cs
  21. 12
      Assets/Azure/KinectScripts/BackgroundColorCamUserImage.cs.meta
  22. 132
      Assets/Azure/KinectScripts/BackgroundColorImage.cs
  23. 12
      Assets/Azure/KinectScripts/BackgroundColorImage.cs.meta
  24. 146
      Assets/Azure/KinectScripts/BackgroundDepthCamColorImage.cs
  25. 12
      Assets/Azure/KinectScripts/BackgroundDepthCamColorImage.cs.meta
  26. 132
      Assets/Azure/KinectScripts/BackgroundDepthImage.cs
  27. 12
      Assets/Azure/KinectScripts/BackgroundDepthImage.cs.meta
  28. 128
      Assets/Azure/KinectScripts/BackgroundInfraredImage.cs
  29. 12
      Assets/Azure/KinectScripts/BackgroundInfraredImage.cs.meta
  30. 171
      Assets/Azure/KinectScripts/BackgroundRemovalByBodyBounds.cs
  31. 11
      Assets/Azure/KinectScripts/BackgroundRemovalByBodyBounds.cs.meta
  32. 115
      Assets/Azure/KinectScripts/BackgroundRemovalByBodyIndex.cs
  33. 11
      Assets/Azure/KinectScripts/BackgroundRemovalByBodyIndex.cs.meta
  34. 93
      Assets/Azure/KinectScripts/BackgroundRemovalByDist.cs
  35. 11
      Assets/Azure/KinectScripts/BackgroundRemovalByDist.cs.meta
  36. 89
      Assets/Azure/KinectScripts/BackgroundRemovalByGreenScreen.cs
  37. 11
      Assets/Azure/KinectScripts/BackgroundRemovalByGreenScreen.cs.meta
  38. 623
      Assets/Azure/KinectScripts/BackgroundRemovalManager.cs
  39. 12
      Assets/Azure/KinectScripts/BackgroundRemovalManager.cs.meta
  40. 107
      Assets/Azure/KinectScripts/BackgroundStaticImage.cs
  41. 12
      Assets/Azure/KinectScripts/BackgroundStaticImage.cs.meta
  42. 300
      Assets/Azure/KinectScripts/BackgroundUserBodyImage.cs
  43. 12
      Assets/Azure/KinectScripts/BackgroundUserBodyImage.cs.meta
  44. 408
      Assets/Azure/KinectScripts/BodyDataRecorderPlayer.cs
  45. 12
      Assets/Azure/KinectScripts/BodyDataRecorderPlayer.cs.meta
  46. 655
      Assets/Azure/KinectScripts/BodySlicer.cs
  47. 12
      Assets/Azure/KinectScripts/BodySlicer.cs.meta
  48. 328
      Assets/Azure/KinectScripts/CubemanController.cs
  49. 8
      Assets/Azure/KinectScripts/CubemanController.cs.meta
  50. 202
      Assets/Azure/KinectScripts/DepthIrFilterImage.cs
  51. 12
      Assets/Azure/KinectScripts/DepthIrFilterImage.cs.meta
  52. 8
      Assets/Azure/KinectScripts/Filters.meta
  53. 593
      Assets/Azure/KinectScripts/Filters/BodySpinFilter.cs
  54. 11
      Assets/Azure/KinectScripts/Filters/BodySpinFilter.cs.meta
  55. 336
      Assets/Azure/KinectScripts/Filters/BoneOrientationConstraints.cs
  56. 8
      Assets/Azure/KinectScripts/Filters/BoneOrientationConstraints.cs.meta
  57. 447
      Assets/Azure/KinectScripts/Filters/JointPositionsFilter.cs
  58. 11
      Assets/Azure/KinectScripts/Filters/JointPositionsFilter.cs.meta
  59. 398
      Assets/Azure/KinectScripts/Filters/JointVelocitiesFilter.cs
  60. 12
      Assets/Azure/KinectScripts/Filters/JointVelocitiesFilter.cs.meta
  61. 302
      Assets/Azure/KinectScripts/Filters/MahonyAHRS.cs
  62. 11
      Assets/Azure/KinectScripts/Filters/MahonyAHRS.cs.meta
  63. 80
      Assets/Azure/KinectScripts/FollowSensorTransform.cs
  64. 11
      Assets/Azure/KinectScripts/FollowSensorTransform.cs.meta
  65. 140
      Assets/Azure/KinectScripts/FollowUserJointPose.cs
  66. 8
      Assets/Azure/KinectScripts/FollowUserJointPose.cs.meta
  67. 311
      Assets/Azure/KinectScripts/ForegroundBlendRenderer.cs
  68. 11
      Assets/Azure/KinectScripts/ForegroundBlendRenderer.cs.meta
  69. 62
      Assets/Azure/KinectScripts/ForegroundToRawImage.cs
  70. 12
      Assets/Azure/KinectScripts/ForegroundToRawImage.cs.meta
  71. 80
      Assets/Azure/KinectScripts/ForegroundToRenderer.cs
  72. 12
      Assets/Azure/KinectScripts/ForegroundToRenderer.cs.meta
  73. 184
      Assets/Azure/KinectScripts/FragmentLighting.cs
  74. 11
      Assets/Azure/KinectScripts/FragmentLighting.cs.meta
  75. 184
      Assets/Azure/KinectScripts/HmdHeadMover.cs
  76. 12
      Assets/Azure/KinectScripts/HmdHeadMover.cs.meta
  77. 390
      Assets/Azure/KinectScripts/InteractionInputModule.cs
  78. 12
      Assets/Azure/KinectScripts/InteractionInputModule.cs.meta
  79. 1123
      Assets/Azure/KinectScripts/InteractionManager.cs
  80. 8
      Assets/Azure/KinectScripts/InteractionManager.cs.meta
  81. 8
      Assets/Azure/KinectScripts/Interfaces.meta
  82. 3689
      Assets/Azure/KinectScripts/Interfaces/DepthSensorBase.cs
  83. 11
      Assets/Azure/KinectScripts/Interfaces/DepthSensorBase.cs.meta
  84. 59
      Assets/Azure/KinectScripts/Interfaces/DepthSensorDescriptor.cs
  85. 11
      Assets/Azure/KinectScripts/Interfaces/DepthSensorDescriptor.cs.meta
  86. 168
      Assets/Azure/KinectScripts/Interfaces/DepthSensorInterface.cs
  87. 11
      Assets/Azure/KinectScripts/Interfaces/DepthSensorInterface.cs.meta
  88. 112
      Assets/Azure/KinectScripts/Interfaces/DummyK4AInterface.cs
  89. 11
      Assets/Azure/KinectScripts/Interfaces/DummyK4AInterface.cs.meta
  90. 1846
      Assets/Azure/KinectScripts/Interfaces/Kinect2Interface.cs
  91. 11
      Assets/Azure/KinectScripts/Interfaces/Kinect2Interface.cs.meta
  92. 2881
      Assets/Azure/KinectScripts/Interfaces/Kinect4AzureInterface.cs
  93. 11
      Assets/Azure/KinectScripts/Interfaces/Kinect4AzureInterface.cs.meta
  94. 426
      Assets/Azure/KinectScripts/Interfaces/Kinect4AzureSyncher.cs
  95. 11
      Assets/Azure/KinectScripts/Interfaces/Kinect4AzureSyncher.cs.meta
  96. 628
      Assets/Azure/KinectScripts/Interfaces/KinectFloorDetector.cs
  97. 11
      Assets/Azure/KinectScripts/Interfaces/KinectFloorDetector.cs.meta
  98. 2388
      Assets/Azure/KinectScripts/Interfaces/NetClientInterface.cs
  99. 11
      Assets/Azure/KinectScripts/Interfaces/NetClientInterface.cs.meta
  100. 1093
      Assets/Azure/KinectScripts/Interfaces/RealSenseInterface.cs

58
.gitattributes

@ -0,0 +1,58 @@
# 3D models
*.3dm filter=lfs diff=lfs merge=lfs -text
*.3ds filter=lfs diff=lfs merge=lfs -text
*.blend filter=lfs diff=lfs merge=lfs -text
*.c4d filter=lfs diff=lfs merge=lfs -text
*.collada filter=lfs diff=lfs merge=lfs -text
*.dae filter=lfs diff=lfs merge=lfs -text
*.dxf filter=lfs diff=lfs merge=lfs -text
*.fbx filter=lfs diff=lfs merge=lfs -text
*.jas filter=lfs diff=lfs merge=lfs -text
*.lws filter=lfs diff=lfs merge=lfs -text
*.lxo filter=lfs diff=lfs merge=lfs -text
*.ma filter=lfs diff=lfs merge=lfs -text
*.max filter=lfs diff=lfs merge=lfs -text
*.mb filter=lfs diff=lfs merge=lfs -text
*.obj filter=lfs diff=lfs merge=lfs -text
*.ply filter=lfs diff=lfs merge=lfs -text
*.skp filter=lfs diff=lfs merge=lfs -text
*.stl filter=lfs diff=lfs merge=lfs -text
*.ztl filter=lfs diff=lfs merge=lfs -text
# Audio
*.aif filter=lfs diff=lfs merge=lfs -text
*.aiff filter=lfs diff=lfs merge=lfs -text
*.it filter=lfs diff=lfs merge=lfs -text
*.mod filter=lfs diff=lfs merge=lfs -text
*.mp3 filter=lfs diff=lfs merge=lfs -text
*.ogg filter=lfs diff=lfs merge=lfs -text
*.s3m filter=lfs diff=lfs merge=lfs -text
*.wav filter=lfs diff=lfs merge=lfs -text
*.xm filter=lfs diff=lfs merge=lfs -text
# Video
*.mp4 filter=lfs diff=lfs merge=lfs -text
# Fonts
*.otf filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
# Images
*.bmp filter=lfs diff=lfs merge=lfs -text
*.exr filter=lfs diff=lfs merge=lfs -text
*.gif filter=lfs diff=lfs merge=lfs -text
*.hdr filter=lfs diff=lfs merge=lfs -text
*.iff filter=lfs diff=lfs merge=lfs -text
*.jpeg filter=lfs diff=lfs merge=lfs -text
*.jpg filter=lfs diff=lfs merge=lfs -text
*.pict filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.psd filter=lfs diff=lfs merge=lfs -text
*.tga filter=lfs diff=lfs merge=lfs -text
*.tif filter=lfs diff=lfs merge=lfs -text
*.tiff filter=lfs diff=lfs merge=lfs -text
# Collapse Unity-generated files on GitHub
*.asset linguist-generated
*.mat linguist-generated
*.meta linguist-generated
*.prefab linguist-generated
*.unity linguist-generated
*.aar filter=lfs diff=lfs merge=lfs -text
# ONNX Models
*.onnx filter=lfs diff=lfs merge=lfs -text

76
.gitignore

@ -0,0 +1,76 @@
# This .gitignore file should be placed at the root of your Unity project directory
#
# Get latest from https://github.com/github/gitignore/blob/main/Unity.gitignore
#
/[Ll]ibrary/
/[Tt]emp/
/[Oo]bj/
/[Bb]uild/
/[Bb]uilds/
/[Ll]ogs/
/[Uu]ser[Ss]ettings/
# MemoryCaptures can get excessive in size.
# They also could contain extremely sensitive data
/[Mm]emoryCaptures/
# Recordings can get excessive in size
/[Rr]ecordings/
# Uncomment this line if you wish to ignore the asset store tools plugin
# /[Aa]ssets/AssetStoreTools*
# Autogenerated Jetbrains Rider plugin
/[Aa]ssets/Plugins/Editor/JetBrains*
# Visual Studio cache directory
.vs/
# Gradle cache directory
.gradle/
# Autogenerated VS/MD/Consulo solution and project files
ExportedObj/
.consulo/
*.csproj
*.unityproj
*.sln
*.suo
*.tmp
*.user
*.userprefs
*.pidb
*.booproj
*.svd
*.pdb
*.mdb
*.opendb
*.VC.db
# Unity3D generated meta files
*.pidb.meta
*.pdb.meta
*.mdb.meta
# Unity3D generated file on crash reports
sysinfo.txt
# Builds
*.apk
*.aab
*.unitypackage
*.app
# Crashlytics generated file
crashlytics-build.properties
# Packed Addressables
/[Aa]ssets/[Aa]ddressable[Aa]ssets[Dd]ata/*/*.bin*
# Temporary auto-generated Android Assets
/[Aa]ssets/[Ss]treamingAssets/aa.meta
/[Aa]ssets/[Ss]treamingAssets/aa/*
# Dlls
*.dll

6
.vsconfig

@ -0,0 +1,6 @@
{
"version": "1.0",
"components": [
"Microsoft.VisualStudio.Workload.ManagedGame"
]
}

1625
Assets/Act-4 Azure.unity

File diff suppressed because it is too large

7
Assets/Act-4 Azure.unity.meta

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 39a7265aa0add1a4996e7a5f6b29bdb3
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

2060
Assets/Act-4 Barracuda.unity

File diff suppressed because it is too large

7
Assets/Act-4 Barracuda.unity.meta

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 5deb965500846b64ab123fdbbbc2de24
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

8
Assets/Azure.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e41940127110d0147ae02474cfa63fe9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

8
Assets/Azure/KinectScripts.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5a85012640094184b9a055de55b8416c
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1725
Assets/Azure/KinectScripts/AvatarController.cs

File diff suppressed because it is too large

8
Assets/Azure/KinectScripts/AvatarController.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 06c52c07402f7274cab64e484b1bddc9
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

85
Assets/Azure/KinectScripts/AvatarControllerClassic.cs

@ -0,0 +1,85 @@
using UnityEngine;
//using Windows.Kinect;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.IO;
using System.Text;
namespace com.rfilkov.components
{
/// <summary>
/// Avatar controller is the component that transfers the captured user motion to a humanoid model (avatar). Avatar controller classic allows manual assignment of model's rigged bones to the tracked body joints.
/// </summary>
public class AvatarControllerClassic : AvatarController
{
// Public variables that will get matched to bones. If empty, the Kinect will simply not track it.
public Transform Pelvis;
public Transform SpineNaval;
public Transform SpineChest;
public Transform Neck;
//public Transform Head;
public Transform ClavicleLeft;
public Transform ShoulderLeft;
public Transform ElbowLeft;
public Transform WristLeft;
public Transform ClavicleRight;
public Transform ShoulderRight;
public Transform ElbowRight;
public Transform WristRight;
public Transform HipLeft;
public Transform KneeLeft;
public Transform AnkleLeft;
//private Transform FootLeft = null;
public Transform HipRight;
public Transform KneeRight;
public Transform AnkleRight;
//private Transform FootRight = null;
[Tooltip("The body root node (optional).")]
public Transform BodyRoot;
// map the bones to the model.
protected override void MapBones()
{
bones[0] = Pelvis;
bones[1] = SpineNaval;
bones[2] = SpineChest;
bones[3] = Neck;
//bones[4] = Head;
bones[5] = ClavicleLeft;
bones[6] = ShoulderLeft;
bones[7] = ElbowLeft;
bones[8] = WristLeft;
bones[9] = ClavicleRight;
bones[10] = ShoulderRight;
bones[11] = ElbowRight;
bones[12] = WristRight;
bones[13] = HipLeft;
bones[14] = KneeLeft;
bones[15] = AnkleLeft;
//bones[16] = FootLeft;
bones[17] = HipRight;
bones[18] = KneeRight;
bones[19] = AnkleRight;
//bones[20] = FootRight;
// body root
bodyRoot = BodyRoot;
}
}
}

8
Assets/Azure/KinectScripts/AvatarControllerClassic.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7b38f8c0e60334143a1b54003b3a0ae5
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

771
Assets/Azure/KinectScripts/AvatarScaler.cs

@ -0,0 +1,771 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Avatar scaler is the component that scales avatar's body, according to the user's body and bone sizes.
/// </summary>
[RequireComponent(typeof(Animator))]
public class AvatarScaler : MonoBehaviour
{
[Tooltip("Index of the player, tracked by this component. 0 means the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = 0;
[Tooltip("Whether the avatar is facing the player or not.")]
public bool mirroredAvatar = false;
[Tooltip("Minimum distance to the user.")]
public float minUserDistance = 1.0f;
[Tooltip("Body scale factor (incl. arms and legs) that may be used for fine tuning of model-scale.")]
[Range(0.0f, 2.0f)]
public float bodyScaleFactor = 1f;
[Tooltip("Body width scale factor that may be used for fine tuning of model-width scale.")]
[Range(0.0f, 2.0f)]
public float bodyWidthFactor = 0f;
[Tooltip("Additional scale factor for arms that may be used for fine tuning of model arm-scale.")]
[Range(0.0f, 2.0f)]
public float armScaleFactor = 0f;
[Tooltip("Additional scale factor for legs that may be used for fine tuning of model leg-scale.")]
[Range(0.0f, 2.0f)]
public float legScaleFactor = 0f;
[Tooltip("Whether the scale is updated continuously or just after the calibration pose.")]
public bool continuousScaling = true;
[Tooltip("Scale smoothing factor used in case of continuous scaling.")]
public float smoothFactor = 5f;
[Tooltip("Camera used to overlay the model over the background.")]
public Camera foregroundCamera;
[Tooltip("Plane used to render the color camera background.")]
private Transform backgroundPlane = null;
[Tooltip("Index of the depth sensor that generates the color camera background. 0 is the 1st one, 1 - the 2nd one, etc.")]
private int sensorIndex = 0;
// [Tooltip("Whether to put the clothing model hip and shoulder joints where the user joints are.")]
// public bool fixModelHipsAndShoulders = false;
[Tooltip("UI-Text to display the avatar-scaler debug messages.")]
public UnityEngine.UI.Text debugText;
// used by category selector
[System.NonSerialized]
public ulong currentUserId = 0;
// used by category selector
[System.NonSerialized]
public bool scalerInited = false;
// class references
private KinectManager kinectManager = null;
private AvatarController avtController = null;
// model transforms for scaling
private Transform bodyScaleTransform;
//private Transform bodyHipsTransform;
private Transform leftShoulderScaleTransform;
private Transform leftElbowScaleTransform;
private Transform rightShoulderScaleTransform;
private Transform rightElbowScaleTransform;
private Transform leftHipScaleTransform;
private Transform leftKneeScaleTransform;
private Transform rightHipScaleTransform;
private Transform rightKneeScaleTransform;
private Vector3 modelBodyScale = Vector3.one;
private Vector3 modelLeftShoulderScale = Vector3.one;
private Vector3 modelLeftElbowScale = Vector3.one;
private Vector3 modelRightShoulderScale = Vector3.one;
private Vector3 modelRightElbowScale = Vector3.one;
private Vector3 modelLeftHipScale = Vector3.one;
private Vector3 modelLeftKneeScale = Vector3.one;
private Vector3 modelRightHipScale = Vector3.one;
private Vector3 modelRightKneeScale = Vector3.one;
// model bone sizes and original scales
private float modelBodyHeight = 0f;
private float modelBodyWidth = 0f;
private float modelLeftUpperArmLength = 0f;
private float modelLeftLowerArmLength = 0f;
private float modelRightUpperArmLength = 0f;
private float modelRightLowerArmLength = 0f;
private float modelLeftUpperLegLength = 0f;
private float modelLeftLowerLegLength = 0f;
private float modelRightUpperLegLength = 0f;
private float modelRightLowerLegLength = 0f;
// user bone sizes
private float userBodyHeight = 0f;
private float userBodyWidth = 0f;
private float leftUpperArmLength = 0f;
private float leftLowerArmLength = 0f;
private float rightUpperArmLength = 0f;
private float rightLowerArmLength = 0f;
private float leftUpperLegLength = 0f;
private float leftLowerLegLength = 0f;
private float rightUpperLegLength = 0f;
private float rightLowerLegLength = 0f;
// user bone scale factors
private float fScaleBodyHeight = 0f;
private float fScaleBodyWidth = 0f;
private float fScaleLeftUpperArm = 0f;
private float fScaleLeftLowerArm = 0f;
private float fScaleRightUpperArm = 0f;
private float fScaleRightLowerArm = 0f;
private float fScaleLeftUpperLeg = 0f;
private float fScaleLeftLowerLeg = 0f;
private float fScaleRightUpperLeg = 0f;
private float fScaleRightLowerLeg = 0f;
// background plane rectangle
private Rect planeRect = new Rect();
private bool planeRectSet = false;
// user body lengths
private bool gotUserBodySize = false;
private bool gotUserArmsSize = false;
private bool gotUserLegsSize = false;
// mesh renderer
private SkinnedMeshRenderer meshRenderer = null;
public void Start()
{
// get references to other components
kinectManager = KinectManager.Instance;
avtController = gameObject.GetComponent<AvatarController>();
// get model transforms
Animator animatorComponent = GetComponent<Animator>();
AvatarController avatarController = GetComponent<AvatarController>();
// get mesh renderer
meshRenderer = GetComponentInChildren<SkinnedMeshRenderer>();
// use the root transform for body scale
bodyScaleTransform = transform;
if (animatorComponent && animatorComponent.GetBoneTransform(HumanBodyBones.Hips))
{
//bodyHipsTransform = animatorComponent.GetBoneTransform (HumanBodyBones.Hips);
leftShoulderScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperArm);
leftElbowScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.LeftLowerArm);
rightShoulderScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperArm);
rightElbowScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.RightLowerArm);
leftHipScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperLeg);
leftKneeScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.LeftLowerLeg);
rightHipScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperLeg);
rightKneeScaleTransform = animatorComponent.GetBoneTransform(HumanBodyBones.RightLowerLeg);
}
else if (avatarController)
{
//bodyHipsTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.SpineBase, false));
leftShoulderScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ShoulderLeft, false));
leftElbowScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ElbowLeft, false));
rightShoulderScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ShoulderRight, false));
rightElbowScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ElbowRight, false));
leftHipScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.HipLeft, false));
leftKneeScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.KneeLeft, false));
rightHipScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.HipRight, false));
rightKneeScaleTransform = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.KneeRight, false));
}
else
{
// needed transforms could not be found
return;
}
// get model bone scales
modelBodyScale = bodyScaleTransform ? bodyScaleTransform.localScale : Vector3.one;
modelLeftShoulderScale = leftShoulderScaleTransform ? leftShoulderScaleTransform.localScale : Vector3.one;
modelLeftElbowScale = leftElbowScaleTransform ? leftElbowScaleTransform.localScale : Vector3.one;
modelRightShoulderScale = rightShoulderScaleTransform ? rightShoulderScaleTransform.localScale : Vector3.one;
modelRightElbowScale = rightElbowScaleTransform ? rightElbowScaleTransform.localScale : Vector3.one;
modelLeftHipScale = leftHipScaleTransform ? leftHipScaleTransform.localScale : Vector3.one;
modelLeftKneeScale = leftKneeScaleTransform ? leftKneeScaleTransform.localScale : Vector3.one;
modelRightHipScale = rightHipScaleTransform ? rightHipScaleTransform.localScale : Vector3.one;
modelRightKneeScale = rightKneeScaleTransform ? rightKneeScaleTransform.localScale : Vector3.one;
if (animatorComponent && animatorComponent.GetBoneTransform(HumanBodyBones.Hips))
{
GetModelBodyHeight(animatorComponent, ref modelBodyHeight, ref modelBodyWidth);
//Debug.Log (string.Format("MW: {0:F3}, MH: {1:F3}", modelBodyWidth, modelBodyHeight));
GetModelBoneLength(animatorComponent, HumanBodyBones.LeftUpperArm, HumanBodyBones.LeftLowerArm, ref modelLeftUpperArmLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.LeftLowerArm, HumanBodyBones.LeftHand, ref modelLeftLowerArmLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.RightUpperArm, HumanBodyBones.RightLowerArm, ref modelRightUpperArmLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.RightLowerArm, HumanBodyBones.RightHand, ref modelRightLowerArmLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.LeftUpperLeg, HumanBodyBones.LeftLowerLeg, ref modelLeftUpperLegLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.LeftLowerLeg, HumanBodyBones.LeftFoot, ref modelLeftLowerLegLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.RightUpperLeg, HumanBodyBones.RightLowerLeg, ref modelRightUpperLegLength);
GetModelBoneLength(animatorComponent, HumanBodyBones.RightLowerLeg, HumanBodyBones.RightFoot, ref modelRightLowerLegLength);
scalerInited = true;
}
else if (avatarController)
{
GetModelBodyHeight(avatarController, ref modelBodyHeight, ref modelBodyWidth);
//Debug.Log (string.Format("MW: {0:F3}, MH: {1:F3}", modelBodyWidth, modelBodyHeight));
GetModelBoneLength(avatarController, KinectInterop.JointType.ShoulderLeft, KinectInterop.JointType.ElbowLeft, ref modelLeftUpperArmLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.ElbowLeft, KinectInterop.JointType.WristLeft, ref modelLeftLowerArmLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.ShoulderRight, KinectInterop.JointType.ElbowRight, ref modelRightUpperArmLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.ElbowRight, KinectInterop.JointType.WristRight, ref modelRightLowerArmLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.HipLeft, KinectInterop.JointType.KneeLeft, ref modelLeftUpperLegLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.KneeLeft, KinectInterop.JointType.AnkleLeft, ref modelLeftLowerLegLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.HipRight, KinectInterop.JointType.KneeRight, ref modelRightUpperLegLength);
GetModelBoneLength(avatarController, KinectInterop.JointType.KneeRight, KinectInterop.JointType.AnkleRight, ref modelRightLowerLegLength);
scalerInited = true;
}
// update the scale immediately
Update();
}
public void Update()
{
if (scalerInited && kinectManager && kinectManager.IsInitialized())
{
// get the plane rectangle to be used for object overlay
if (backgroundPlane && !planeRectSet)
{
planeRectSet = true;
planeRect.width = 10f * Mathf.Abs(backgroundPlane.localScale.x);
planeRect.height = 10f * Mathf.Abs(backgroundPlane.localScale.z);
planeRect.x = backgroundPlane.position.x - planeRect.width / 2f;
planeRect.y = backgroundPlane.position.y - planeRect.height / 2f;
}
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
// check user distance and hand positions
if (userId != 0 && minUserDistance > 0f)
{
Vector3 userPos = kinectManager.GetUserPosition(userId);
//bool lHandTracked = kinectManager.IsJointTracked(userId, (int)KinectInterop.JointType.WristLeft);
//Vector3 lHandPos = lHandTracked ? kinectManager.GetJointPosition(userId, (int)KinectInterop.JointType.WristLeft) : Vector3.zero;
//bool rHandTracked = kinectManager.IsJointTracked(userId, (int)KinectInterop.JointType.WristRight);
//Vector3 rHandPos = rHandTracked ? kinectManager.GetJointPosition(userId, (int)KinectInterop.JointType.WristRight) : Vector3.zero;
if (userPos.z < minUserDistance) // ||
//!lHandTracked || (lHandPos.z - userPos.z) <= -0.3f ||
//!rHandTracked || (rHandPos.z - userPos.z) <= -0.3f)
{
// don't scale the model
userId = 0;
//Debug.Log ("Avatar scaling skipped.");
}
}
if (userId != currentUserId)
{
currentUserId = userId;
if (userId != 0)
{
GetUserBodySize(true, true, true);
if (gotUserBodySize)
{
// show the mesh
if (meshRenderer && !meshRenderer.gameObject.activeSelf)
meshRenderer.gameObject.SetActive(true);
// scale avatar initially
ScaleAvatar(0f, true);
}
else
{
// hide the mesh
if (meshRenderer && meshRenderer.gameObject.activeSelf)
meshRenderer.gameObject.SetActive(false);
// consider the user as not tracked
currentUserId = 0;
}
}
else
{
// user not tracked
gotUserBodySize = gotUserArmsSize = gotUserLegsSize = false;
}
}
}
if (currentUserId != 0 && continuousScaling)
{
// scale avatar continuously
GetUserBodySize(true, true, true);
ScaleAvatar(smoothFactor, false);
}
}
// gets the the actual sizes of the user bones
public void GetUserBodySize(bool bBody, bool bArms, bool bLegs)
{
//KinectManager kinectManager = KinectManager.Instance;
if (kinectManager == null)
return;
if (bBody)
{
gotUserBodySize = GetUserBodyHeight(kinectManager, bodyScaleFactor, bodyWidthFactor, ref userBodyHeight, ref userBodyWidth);
}
if (bArms)
{
bool gotLeftArmSize = GetUserBoneLength(kinectManager, KinectInterop.JointType.ShoulderLeft, KinectInterop.JointType.ElbowLeft, armScaleFactor, ref leftUpperArmLength);
gotLeftArmSize &= GetUserBoneLength(kinectManager, KinectInterop.JointType.ElbowLeft, KinectInterop.JointType.WristLeft, armScaleFactor, ref leftLowerArmLength);
bool gotRightArmSize = GetUserBoneLength(kinectManager, KinectInterop.JointType.ShoulderRight, KinectInterop.JointType.ElbowRight, armScaleFactor, ref rightUpperArmLength);
gotRightArmSize &= GetUserBoneLength(kinectManager, KinectInterop.JointType.ElbowRight, KinectInterop.JointType.WristRight, armScaleFactor, ref rightLowerArmLength);
gotUserArmsSize = gotLeftArmSize | gotRightArmSize;
if(gotUserArmsSize)
{
EqualizeBoneLength(ref leftUpperArmLength, ref rightUpperArmLength);
EqualizeBoneLength(ref leftLowerArmLength, ref rightLowerArmLength);
}
}
if (bLegs)
{
bool gotLeftLegSize = GetUserBoneLength(kinectManager, KinectInterop.JointType.HipLeft, KinectInterop.JointType.KneeLeft, legScaleFactor, ref leftUpperLegLength);
gotLeftLegSize &= GetUserBoneLength(kinectManager, KinectInterop.JointType.KneeLeft, KinectInterop.JointType.AnkleLeft, legScaleFactor, ref leftLowerLegLength);
bool gotRightLegSize = GetUserBoneLength(kinectManager, KinectInterop.JointType.HipRight, KinectInterop.JointType.KneeRight, legScaleFactor, ref rightUpperLegLength);
gotRightLegSize &= GetUserBoneLength(kinectManager, KinectInterop.JointType.KneeRight, KinectInterop.JointType.AnkleRight, legScaleFactor, ref rightLowerLegLength);
gotUserLegsSize = gotLeftLegSize | gotRightLegSize;
if(gotUserLegsSize)
{
EqualizeBoneLength(ref leftUpperLegLength, ref rightUpperLegLength);
EqualizeBoneLength(ref leftLowerLegLength, ref rightLowerLegLength);
}
}
}
// scales the avatar as needed
public void ScaleAvatar(float fSmooth, bool bInitialScale)
{
// scale body
if (bodyScaleFactor > 0f && gotUserBodySize)
{
SetupBodyScale(bodyScaleTransform, modelBodyScale, modelBodyHeight, modelBodyWidth, userBodyHeight, userBodyWidth,
fSmooth, ref fScaleBodyHeight, ref fScaleBodyWidth);
if (avtController)
{
// recalibrate avatar position due to transform scale change
avtController.offsetCalibrated = false;
// set AC smooth-factor to 0 to prevent flickering (r618-issue)
if (avtController.smoothFactor != 0f)
{
avtController.smoothFactor = 0f;
}
}
}
// scale arms
if (/**bInitialScale &&*/ armScaleFactor > 0f && gotUserArmsSize)
{
float fLeftUpperArmLength = !mirroredAvatar ? leftUpperArmLength : rightUpperArmLength;
SetupBoneScale(leftShoulderScaleTransform, modelLeftShoulderScale, modelLeftUpperArmLength,
fLeftUpperArmLength, fScaleBodyHeight, fSmooth, ref fScaleLeftUpperArm);
float fLeftLowerArmLength = !mirroredAvatar ? leftLowerArmLength : rightLowerArmLength;
SetupBoneScale(leftElbowScaleTransform, modelLeftElbowScale, modelLeftLowerArmLength,
fLeftLowerArmLength, fScaleLeftUpperArm, fSmooth, ref fScaleLeftLowerArm);
float fRightUpperArmLength = !mirroredAvatar ? rightUpperArmLength : leftUpperArmLength;
SetupBoneScale(rightShoulderScaleTransform, modelRightShoulderScale, modelRightUpperArmLength,
fRightUpperArmLength, fScaleBodyHeight, fSmooth, ref fScaleRightUpperArm);
float fRightLowerArmLength = !mirroredAvatar ? rightLowerArmLength : leftLowerArmLength;
SetupBoneScale(rightElbowScaleTransform, modelRightElbowScale, modelLeftLowerArmLength,
fRightLowerArmLength, fScaleRightUpperArm, fSmooth, ref fScaleRightLowerArm);
}
// scale legs
if (/**bInitialScale &&*/ legScaleFactor > 0 && gotUserLegsSize)
{
float fLeftUpperLegLength = !mirroredAvatar ? leftUpperLegLength : rightUpperLegLength;
SetupBoneScale(leftHipScaleTransform, modelLeftHipScale, modelLeftUpperLegLength,
fLeftUpperLegLength, fScaleBodyHeight, fSmooth, ref fScaleLeftUpperLeg);
float fLeftLowerLegLength = !mirroredAvatar ? leftLowerLegLength : rightLowerLegLength;
SetupBoneScale(leftKneeScaleTransform, modelLeftKneeScale, modelLeftLowerLegLength,
fLeftLowerLegLength, fScaleLeftUpperLeg, fSmooth, ref fScaleLeftLowerLeg);
float fRightUpperLegLength = !mirroredAvatar ? rightUpperLegLength : leftUpperLegLength;
SetupBoneScale(rightHipScaleTransform, modelRightHipScale, modelRightUpperLegLength,
fRightUpperLegLength, fScaleBodyHeight, fSmooth, ref fScaleRightUpperLeg);
float fRightLowerLegLength = !mirroredAvatar ? rightLowerLegLength : leftLowerLegLength;
SetupBoneScale(rightKneeScaleTransform, modelRightKneeScale, modelRightLowerLegLength,
fRightLowerLegLength, fScaleRightUpperLeg, fSmooth, ref fScaleRightLowerLeg);
}
if (debugText != null)
{
string sDebug = string.Format("BW: {0:F2}/{1:F3}, BH: {2:F2}/{3:F3}\nLUA: {4:F3}, LLA: {5:F3}; RUA: {6:F3}, RLA: {7:F3}\nLUL: {8:F3}, LLL: {9:F3}; RUL: {10:F3}, RLL: {11:F3}",
userBodyWidth, fScaleBodyWidth, userBodyHeight, fScaleBodyHeight,
fScaleLeftUpperArm, fScaleLeftLowerArm,
fScaleRightUpperArm, fScaleRightLowerArm,
fScaleLeftUpperLeg, fScaleLeftLowerLeg,
fScaleRightUpperLeg, fScaleRightLowerLeg);
debugText.text = sDebug;
}
}
private bool GetModelBodyHeight(Animator animatorComponent, ref float height, ref float width)
{
height = 0f;
if (animatorComponent)
{
//Transform hipCenter = animatorComponent.GetBoneTransform(HumanBodyBones.Hips);
Transform leftUpperArm = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperArm);
Transform rightUpperArm = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperArm);
Transform leftUpperLeg = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperLeg);
Transform rightUpperLeg = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperLeg);
if (leftUpperArm && rightUpperArm && leftUpperLeg && rightUpperLeg)
{
Vector3 posShoulderCenter = (leftUpperArm.position + rightUpperArm.position) / 2f;
Vector3 posHipCenter = (leftUpperLeg.position + rightUpperLeg.position) / 2f; // hipCenter.position
//height = (posShoulderCenter.y - posHipCenter.y);
height = (posShoulderCenter - posHipCenter).magnitude;
width = (rightUpperArm.position - leftUpperArm.position).magnitude;
return true;
}
}
return false;
}
private bool GetModelBodyHeight(AvatarController avatarController, ref float height, ref float width)
{
height = 0f;
if (avatarController)
{
Transform leftUpperArm = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ShoulderLeft, false));
Transform rightUpperArm = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.ShoulderRight, false));
Transform leftUpperLeg = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.HipLeft, false));
Transform rightUpperLeg = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(KinectInterop.JointType.HipRight, false));
if (leftUpperArm && rightUpperArm && leftUpperLeg && rightUpperLeg)
{
Vector3 posShoulderCenter = (leftUpperArm.position + rightUpperArm.position) / 2f;
Vector3 posHipCenter = (leftUpperLeg.position + rightUpperLeg.position) / 2f; // hipCenter.position
//height = (posShoulderCenter.y - posHipCenter.y);
height = (posShoulderCenter - posHipCenter).magnitude;
width = (rightUpperArm.position - leftUpperArm.position).magnitude;
return true;
}
}
return false;
}
private bool GetModelBoneLength(Animator animatorComponent, HumanBodyBones baseJoint, HumanBodyBones endJoint, ref float length)
{
length = 0f;
if (animatorComponent)
{
Transform joint1 = animatorComponent.GetBoneTransform(baseJoint);
Transform joint2 = animatorComponent.GetBoneTransform(endJoint);
if (joint1 && joint2)
{
length = (joint2.position - joint1.position).magnitude;
return true;
}
}
return false;
}
private bool GetModelBoneLength(AvatarController avatarController, KinectInterop.JointType baseJoint, KinectInterop.JointType endJoint, ref float length)
{
length = 0f;
if (avatarController)
{
Transform joint1 = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(baseJoint, false));
Transform joint2 = avatarController.GetBoneTransform(avatarController.GetBoneIndexByJoint(endJoint, false));
if (joint1 && joint2)
{
length = (joint2.position - joint1.position).magnitude;
return true;
}
}
return false;
}
private bool GetUserBodyHeight(KinectManager manager, float scaleFactor, float widthFactor, ref float height, ref float width)
{
height = 0f;
width = 0f;
Vector3 posHipLeft = GetJointPosition(manager, (int)KinectInterop.JointType.HipLeft);
Vector3 posHipRight = GetJointPosition(manager, (int)KinectInterop.JointType.HipRight);
Vector3 posShoulderLeft = GetJointPosition(manager, (int)KinectInterop.JointType.ShoulderLeft);
Vector3 posShoulderRight = GetJointPosition(manager, (int)KinectInterop.JointType.ShoulderRight);
if (posHipLeft != Vector3.zero && posHipRight != Vector3.zero &&
posShoulderLeft != Vector3.zero && posShoulderRight != Vector3.zero)
{
Vector3 posHipCenter = (posHipLeft + posHipRight) / 2f;
Vector3 posShoulderCenter = (posShoulderLeft + posShoulderRight) / 2f;
//height = (posShoulderCenter.y - posHipCenter.y) * scaleFactor;
height = (posShoulderCenter - posHipCenter).magnitude * scaleFactor;
width = (posShoulderRight - posShoulderLeft).magnitude * widthFactor;
return true;
}
return false;
}
private bool GetUserBoneLength(KinectManager manager, KinectInterop.JointType baseJoint, KinectInterop.JointType endJoint, float scaleFactor, ref float length)
{
length = 0f;
Vector3 vPos1 = GetJointPosition(manager, (int)baseJoint);
Vector3 vPos2 = GetJointPosition(manager, (int)endJoint);
if (vPos1 != Vector3.zero && vPos2 != Vector3.zero)
{
length = (vPos2 - vPos1).magnitude * scaleFactor;
return true;
}
return false;
}
private void EqualizeBoneLength(ref float boneLen1, ref float boneLen2)
{
if (boneLen1 < boneLen2)
{
boneLen1 = boneLen2;
}
else
{
boneLen2 = boneLen1;
}
}
private bool SetupBodyScale(Transform scaleTrans, Vector3 modelBodyScale, float modelHeight, float modelWidth, float userHeight, float userWidth,
float fSmooth, ref float heightScale, ref float widthScale)
{
if (modelHeight > 0f && userHeight > 0f)
{
heightScale = userHeight / modelHeight;
}
if (modelWidth > 0f && userWidth > 0f)
{
widthScale = userWidth / modelWidth;
}
else
{
widthScale = heightScale;
}
if (scaleTrans && heightScale > 0f && widthScale > 0f)
{
float depthScale = heightScale; // (heightScale + widthScale) / 2f;
Vector3 newLocalScale = new Vector3(modelBodyScale.x * widthScale, modelBodyScale.y * heightScale, modelBodyScale.z * depthScale);
if (fSmooth != 0f)
scaleTrans.localScale = Vector3.Lerp(scaleTrans.localScale, newLocalScale, fSmooth * Time.deltaTime);
else
scaleTrans.localScale = newLocalScale;
return true;
}
return false;
}
private bool SetupBoneScale(Transform scaleTrans, Vector3 modelBoneScale, float modelBoneLen, float userBoneLen, float parentScale, float fSmooth, ref float boneScale)
{
if (modelBoneLen > 0f && userBoneLen > 0f)
{
boneScale = userBoneLen / modelBoneLen;
}
float localScale = boneScale;
if (boneScale > 0f && parentScale > 0f)
{
localScale = boneScale / parentScale;
}
if (scaleTrans && localScale > 0f)
{
if (fSmooth != 0f)
scaleTrans.localScale = Vector3.Lerp(scaleTrans.localScale, modelBoneScale * localScale, fSmooth * Time.deltaTime);
else
scaleTrans.localScale = modelBoneScale * localScale;
return true;
}
return false;
}
public bool FixJointsBeforeScale()
{
Animator animatorComponent = GetComponent<Animator>();
KinectManager manager = KinectManager.Instance;
if (animatorComponent && modelBodyHeight > 0f && userBodyHeight > 0f)
{
Transform hipCenter = animatorComponent.GetBoneTransform(HumanBodyBones.Hips);
if ((hipCenter.localScale - Vector3.one).magnitude > 0.01f)
return false;
Transform leftUpperLeg = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperLeg);
Transform rightUpperLeg = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperLeg);
Transform leftUpperArm = animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperArm);
Transform rightUpperArm = animatorComponent.GetBoneTransform(HumanBodyBones.RightUpperArm);
if (leftUpperArm && rightUpperArm && leftUpperLeg && rightUpperLeg)
{
Vector3 posHipCenter = GetJointPosition(manager, (int)KinectInterop.JointType.Pelvis);
Vector3 posHipLeft = GetJointPosition(manager, (int)KinectInterop.JointType.HipLeft);
Vector3 posHipRight = GetJointPosition(manager, (int)KinectInterop.JointType.HipRight);
Vector3 posShoulderLeft = GetJointPosition(manager, (int)KinectInterop.JointType.ShoulderLeft);
Vector3 posShoulderRight = GetJointPosition(manager, (int)KinectInterop.JointType.ShoulderRight);
if (posHipCenter != Vector3.zero && posHipLeft != Vector3.zero && posHipRight != Vector3.zero &&
posShoulderLeft != Vector3.zero && posShoulderRight != Vector3.zero)
{
SetupUnscaledJoint(hipCenter, leftUpperLeg, posHipCenter, (!mirroredAvatar ? posHipLeft : posHipRight), modelBodyHeight, userBodyHeight);
SetupUnscaledJoint(hipCenter, rightUpperLeg, posHipCenter, (!mirroredAvatar ? posHipRight : posHipLeft), modelBodyHeight, userBodyHeight);
SetupUnscaledJoint(hipCenter, leftUpperArm, posHipCenter, (!mirroredAvatar ? posShoulderLeft : posShoulderRight), modelBodyHeight, userBodyHeight);
SetupUnscaledJoint(hipCenter, rightUpperArm, posHipCenter, (!mirroredAvatar ? posShoulderRight : posShoulderLeft), modelBodyHeight, userBodyHeight);
// recalculate model joints
Start();
return true;
}
}
}
return false;
}
// gets the joint position in space
private Vector3 GetJointPosition(KinectManager manager, int joint)
{
Vector3 vPosJoint = Vector3.zero;
if (manager.IsJointTracked(currentUserId, joint))
{
if (backgroundPlane && planeRectSet)
{
// get the plane overlay position
vPosJoint = manager.GetJointPosColorOverlay(currentUserId, joint, sensorIndex, planeRect);
vPosJoint.z = backgroundPlane.position.z;
}
else if (foregroundCamera)
{
// get the background rectangle (use the portrait background, if available)
Rect backgroundRect = foregroundCamera.pixelRect;
PortraitBackground portraitBack = PortraitBackground.Instance;
if (portraitBack && portraitBack.enabled)
{
backgroundRect = portraitBack.GetBackgroundRect();
}
// get the color overlay position
vPosJoint = manager.GetJointPosColorOverlay(currentUserId, joint, sensorIndex, foregroundCamera, backgroundRect);
}
// else
if (vPosJoint == Vector3.zero)
{
vPosJoint = manager.GetJointPosition(currentUserId, joint);
}
}
return vPosJoint;
}
// sets the joint position before scaling
private bool SetupUnscaledJoint(Transform hipCenter, Transform joint, Vector3 posHipCenter, Vector3 posJoint, float modelBoneLen, float userBoneLen)
{
float boneScale = 0f;
if (modelBoneLen > 0f && userBoneLen > 0f)
{
boneScale = userBoneLen / modelBoneLen;
//boneScale = 1f;
}
if (boneScale > 0f)
{
Vector3 posDiff = (posJoint - posHipCenter) / boneScale;
if (foregroundCamera == null && backgroundPlane == null)
posDiff.z = 0f; // ignore difference in z (non-overlay mode)
Vector3 posJointNew = hipCenter.position + posDiff;
joint.position = posJointNew;
return true;
}
return false;
}
}
}

11
Assets/Azure/KinectScripts/AvatarScaler.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: de8f786bc13e05e4ebcbf32c68cf3515
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

280
Assets/Azure/KinectScripts/BackgroundColorCamDepthImage.cs

@ -0,0 +1,280 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
using System;
namespace com.rfilkov.components
{
/// <summary>
/// BackgroundColorCamDepthImage is component that displays the color camera aligned depth image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundColorCamDepthImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// reference to the kinectManager
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
// color-camera aligned frames
private ulong lastColorCamDepthFrameTime = 0;
// color-camera aligned texture and buffers
private RenderTexture depthImageTexture = null;
private Material depthImageMaterial = null;
private ComputeBuffer depthImageBuffer = null;
private ComputeBuffer depthHistBuffer = null;
// depth image hist data
protected int[] depthHistBufferData = null;
protected int[] equalHistBufferData = null;
protected int depthHistTotalPoints = 0;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
if(sensorData != null)
{
// enable color camera aligned depth frames
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, true);
// create the output texture and needed buffers
depthImageTexture = KinectInterop.CreateRenderTexture(depthImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
depthImageMaterial = new Material(Shader.Find("Kinect/DepthHistImageShader"));
//int depthBufferLength = sensorData.colorImageWidth * sensorData.colorImageHeight >> 1;
//depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
depthHistBuffer = KinectInterop.CreateComputeBuffer(depthHistBuffer, DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1, sizeof(int));
depthHistBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
equalHistBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
}
}
void OnDestroy()
{
if (depthImageTexture)
{
depthImageTexture.Release();
depthImageTexture = null;
}
if (depthImageBuffer != null)
{
depthImageBuffer.Dispose();
depthImageBuffer = null;
}
if (depthHistBuffer != null)
{
depthHistBuffer.Dispose();
depthHistBuffer = null;
}
if (sensorData != null)
{
// disable color camera aligned depth frames
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, false);
}
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
// check for new color camera aligned frames
UpdateTextureWithNewFrame();
if (backgroundImage && depthImageTexture != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != depthImageTexture.width || backgroundImage.texture.height != depthImageTexture.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
// enable color camera aligned depth frames
sensorData = kinectManager.GetSensorData(sensorIndex); // sensor data may be re-created after sensor-int restart
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, true);
// set background texture
backgroundImage.texture = depthImageTexture;
backgroundImage.rectTransform.localScale = sensorData.colorImageScale; // kinectManager.GetColorImageScale(sensorIndex);
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int colorImageWidth = sensorData.colorImageWidth; // kinectManager.GetColorImageWidth(sensorIndex);
int colorImageHeight = sensorData.colorImageHeight; // kinectManager.GetColorImageHeight(sensorIndex);
if (colorImageWidth == 0 || colorImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (colorImageWidth > colorImageHeight)
rectWidth = rectHeight * colorImageWidth / colorImageHeight;
else
rectHeight = rectWidth * colorImageHeight / colorImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.colorImageScale; // (Vector2)kinectManager.GetColorImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
// disable color camera aligned depth frames
if(sensorData != null && sensorData.sensorInterface != null)
{
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, false);
}
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
// checks for new color-camera aligned frames, and composes an updated body-index texture, if needed
private void UpdateTextureWithNewFrame()
{
if (sensorData == null || sensorData.sensorInterface == null || sensorData.colorCamDepthImage == null)
return;
// get the updated depth frame
if (lastColorCamDepthFrameTime != sensorData.lastColorCamDepthFrameTime)
{
lastColorCamDepthFrameTime = sensorData.lastColorCamDepthFrameTime;
if (depthImageTexture.width != sensorData.colorImageWidth || depthImageTexture.height != sensorData.colorImageHeight)
{
depthImageTexture = KinectInterop.CreateRenderTexture(depthImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
}
Array.Clear(depthHistBufferData, 0, depthHistBufferData.Length);
Array.Clear(equalHistBufferData, 0, equalHistBufferData.Length);
depthHistTotalPoints = 0;
// get configured min & max distances
float minDistance = ((DepthSensorBase)sensorData.sensorInterface).minDepthDistance;
float maxDistance = ((DepthSensorBase)sensorData.sensorInterface).maxDepthDistance;
int depthMinDistance = (int)(minDistance * 1000f);
int depthMaxDistance = (int)(maxDistance * 1000f);
int frameLen = sensorData.colorCamDepthImage.Length;
for (int i = 0; i < frameLen; i++)
{
int depth = sensorData.colorCamDepthImage[i];
int limDepth = (depth <= DepthSensorBase.MAX_DEPTH_DISTANCE_MM) ? depth : 0;
if (limDepth > 0)
{
depthHistBufferData[limDepth]++;
depthHistTotalPoints++;
}
}
equalHistBufferData[0] = depthHistBufferData[0];
for (int i = 1; i < depthHistBufferData.Length; i++)
{
equalHistBufferData[i] = equalHistBufferData[i - 1] + depthHistBufferData[i];
}
// make depth 0 equal to the max-depth
equalHistBufferData[0] = equalHistBufferData[equalHistBufferData.Length - 1];
int depthBufferLength = sensorData.colorCamDepthImage.Length >> 1;
if(depthImageBuffer == null || depthImageBuffer.count != depthBufferLength)
{
depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(depthImageBuffer, sensorData.colorCamDepthImage, depthBufferLength, sizeof(uint));
if (depthHistBuffer != null)
{
KinectInterop.SetComputeBufferData(depthHistBuffer, equalHistBufferData, equalHistBufferData.Length, sizeof(int));
}
depthImageMaterial.SetInt("_TexResX", sensorData.colorImageWidth);
depthImageMaterial.SetInt("_TexResY", sensorData.colorImageHeight);
depthImageMaterial.SetInt("_MinDepth", depthMinDistance);
depthImageMaterial.SetInt("_MaxDepth", depthMaxDistance);
depthImageMaterial.SetBuffer("_DepthMap", depthImageBuffer);
depthImageMaterial.SetBuffer("_HistMap", depthHistBuffer);
depthImageMaterial.SetInt("_TotalPoints", depthHistTotalPoints);
Graphics.Blit(null, depthImageTexture, depthImageMaterial);
}
}
}
}

12
Assets/Azure/KinectScripts/BackgroundColorCamDepthImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: bcdfb9864840151469f8a2a0ba31d093
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

223
Assets/Azure/KinectScripts/BackgroundColorCamInfraredImage.cs

@ -0,0 +1,223 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
using System;
namespace com.rfilkov.components
{
/// <summary>
/// BackgroundColorCamInfraredImage is component that displays the color camera aligned infrared image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundColorCamInfraredImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// reference to the kinectManager
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
// color-camera aligned frames
private ulong lastColorCamInfraredFrameTime = 0;
// color-camera aligned texture and buffers
private RenderTexture infraredImageTexture = null;
private Material infraredImageMaterial = null;
private ComputeBuffer infraredImageBuffer = null;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
if(sensorData != null)
{
// enable color camera aligned infrared frames
sensorData.sensorInterface.EnableColorCameraInfraredFrame(sensorData, true, false);
// create the output texture and needed buffers
infraredImageTexture = KinectInterop.CreateRenderTexture(infraredImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
infraredImageMaterial = new Material(Shader.Find("Kinect/InfraredImageShader"));
//int infraredBufferLength = sensorData.colorImageWidth * sensorData.colorImageHeight >> 1;
//infraredImageBuffer = KinectInterop.CreateComputeBuffer(infraredImageBuffer, infraredBufferLength, sizeof(uint));
}
}
void OnDestroy()
{
if (infraredImageTexture)
{
infraredImageTexture.Release();
infraredImageTexture = null;
}
if (infraredImageBuffer != null)
{
infraredImageBuffer.Dispose();
infraredImageBuffer = null;
}
if (sensorData != null)
{
// disable color camera aligned infrared frames
sensorData.sensorInterface.EnableColorCameraInfraredFrame(sensorData, false, false);
}
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
// check for new color camera aligned frames
UpdateTextureWithNewFrame();
if (backgroundImage && infraredImageTexture != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != infraredImageTexture.width || backgroundImage.texture.height != infraredImageTexture.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
// enable color camera aligned infrared frames
sensorData = kinectManager.GetSensorData(sensorIndex); // sensor data may be re-created after sensor-int restart
sensorData.sensorInterface.EnableColorCameraInfraredFrame(sensorData, true, false);
backgroundImage.texture = infraredImageTexture;
backgroundImage.rectTransform.localScale = sensorData.colorImageScale; // kinectManager.GetColorImageScale(sensorIndex);
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int colorImageWidth = sensorData.colorImageWidth; // kinectManager.GetColorImageWidth(sensorIndex);
int colorImageHeight = sensorData.colorImageHeight; // kinectManager.GetColorImageHeight(sensorIndex);
if (colorImageWidth == 0 || colorImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (colorImageWidth > colorImageHeight)
rectWidth = rectHeight * colorImageWidth / colorImageHeight;
else
rectHeight = rectWidth * colorImageHeight / colorImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.colorImageScale; // (Vector2)kinectManager.GetColorImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
// disable color camera aligned infrared frames
if (sensorData != null && sensorData.sensorInterface != null)
{
sensorData.sensorInterface.EnableColorCameraInfraredFrame(sensorData, false, false);
}
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
// checks for new color-camera aligned frames, and composes an updated body-index texture, if needed
private void UpdateTextureWithNewFrame()
{
if (sensorData == null || sensorData.sensorInterface == null || sensorData.colorCamInfraredImage == null)
return;
// get the updated infrared
if (lastColorCamInfraredFrameTime != sensorData.lastColorCamInfraredFrameTime)
{
lastColorCamInfraredFrameTime = sensorData.lastColorCamInfraredFrameTime;
if (infraredImageTexture.width != sensorData.colorImageWidth || infraredImageTexture.height != sensorData.colorImageHeight)
{
infraredImageTexture = KinectInterop.CreateRenderTexture(infraredImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
}
int infraredBufferLength = sensorData.colorCamInfraredImage.Length >> 1;
if (infraredImageBuffer == null || infraredImageBuffer.count != infraredBufferLength)
{
infraredImageBuffer = KinectInterop.CreateComputeBuffer(infraredImageBuffer, infraredBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(infraredImageBuffer, sensorData.colorCamInfraredImage, infraredBufferLength, sizeof(uint));
float minInfraredValue = ((DepthSensorBase)sensorData.sensorInterface).GetMinInfraredValue();
float maxInfraredValue = ((DepthSensorBase)sensorData.sensorInterface).GetMaxInfraredValue();
infraredImageMaterial.SetInt("_TexResX", sensorData.colorImageWidth);
infraredImageMaterial.SetInt("_TexResY", sensorData.colorImageHeight);
infraredImageMaterial.SetFloat("_MinValue", minInfraredValue);
infraredImageMaterial.SetFloat("_MaxValue", maxInfraredValue);
infraredImageMaterial.SetBuffer("_InfraredMap", infraredImageBuffer);
Graphics.Blit(null, infraredImageTexture, infraredImageMaterial);
}
}
}
}

12
Assets/Azure/KinectScripts/BackgroundColorCamInfraredImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: db2e5a5269099984a990878f9ccceb23
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

322
Assets/Azure/KinectScripts/BackgroundColorCamUserImage.cs

@ -0,0 +1,322 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
using System;
namespace com.rfilkov.components
{
/// <summary>
/// BackgroundColorCamUserImage is component that displays the color camera aligned user-body image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundColorCamUserImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("Index of the player, tracked by this component. -1 means all players, 0 - the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = -1;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// reference to the kinectManager
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
// color-camera aligned frames
private ulong lastColorCamDepthFrameTime = 0;
private ulong lastColorCamBodyIndexFrameTime = 0;
// color-camera aligned texture and buffers
private RenderTexture bodyImageTexture = null;
private Material bodyImageMaterial = null;
private ComputeBuffer bodyIndexBuffer = null;
private ComputeBuffer depthImageBuffer = null;
private ComputeBuffer bodyHistBuffer = null;
// body image hist data
protected int[] depthBodyBufferData = null;
protected int[] equalBodyBufferData = null;
protected int bodyHistTotalPoints = 0;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
if(sensorData != null)
{
// enable color camera aligned depth & body-index frames
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, true);
sensorData.sensorInterface.EnableColorCameraBodyIndexFrame(sensorData, true);
// create the user texture and needed buffers
//bodyImageTexture = KinectInterop.CreateRenderTexture(bodyImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
bodyImageMaterial = new Material(Shader.Find("Kinect/UserHistImageShader"));
bodyHistBuffer = KinectInterop.CreateComputeBuffer(bodyHistBuffer, DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1, sizeof(int));
depthBodyBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
equalBodyBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
}
}
void OnDestroy()
{
if (bodyImageTexture)
{
bodyImageTexture.Release();
bodyImageTexture = null;
}
if (bodyIndexBuffer != null)
{
bodyIndexBuffer.Dispose();
bodyIndexBuffer = null;
}
if (depthImageBuffer != null)
{
depthImageBuffer.Dispose();
depthImageBuffer = null;
}
if (bodyHistBuffer != null)
{
bodyHistBuffer.Dispose();
bodyHistBuffer = null;
}
if (sensorData != null)
{
// disable color camera aligned depth & body-index frames
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, false);
sensorData.sensorInterface.EnableColorCameraBodyIndexFrame(sensorData, false);
}
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
// check for new color camera aligned frames
UpdateTextureWithNewFrame();
if (backgroundImage && bodyImageTexture != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != bodyImageTexture.width || backgroundImage.texture.height != bodyImageTexture.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
// enable color camera aligned depth & body-index frames
sensorData = kinectManager.GetSensorData(sensorIndex); // sensor data may be re-created after sensor-int restart
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, true);
sensorData.sensorInterface.EnableColorCameraBodyIndexFrame(sensorData, true);
backgroundImage.texture = bodyImageTexture;
backgroundImage.rectTransform.localScale = sensorData.colorImageScale; // kinectManager.GetColorImageScale(sensorIndex);
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int colorImageWidth = sensorData.colorImageWidth; // kinectManager.GetColorImageWidth(sensorIndex);
int colorImageHeight = sensorData.colorImageHeight; // kinectManager.GetColorImageHeight(sensorIndex);
if (colorImageWidth == 0 || colorImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (colorImageWidth > colorImageHeight)
rectWidth = rectHeight * colorImageWidth / colorImageHeight;
else
rectHeight = rectWidth * colorImageHeight / colorImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.colorImageScale; // (Vector2)kinectManager.GetColorImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
if (sensorData != null)
{
// disable color camera aligned depth & body-index frames
sensorData.sensorInterface.EnableColorCameraDepthFrame(sensorData, false);
sensorData.sensorInterface.EnableColorCameraBodyIndexFrame(sensorData, false);
}
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
// checks for new color-camera aligned frames, and composes an updated body-index texture, if needed
private void UpdateTextureWithNewFrame()
{
if (sensorData == null || sensorData.sensorInterface == null || sensorData.colorCamBodyIndexImage == null || sensorData.colorCamDepthImage == null)
return;
if (sensorData.colorImageWidth == 0 || sensorData.colorImageHeight == 0 || sensorData.lastColorCamDepthFrameTime == 0 || sensorData.lastColorCamBodyIndexFrameTime == 0)
return;
// get body index frame
if (lastColorCamDepthFrameTime != sensorData.lastColorCamDepthFrameTime || lastColorCamBodyIndexFrameTime != sensorData.lastColorCamBodyIndexFrameTime)
{
lastColorCamDepthFrameTime = sensorData.lastColorCamDepthFrameTime;
lastColorCamBodyIndexFrameTime = sensorData.lastColorCamBodyIndexFrameTime;
if(bodyImageTexture == null || bodyImageTexture.width != sensorData.colorImageWidth || bodyImageTexture.height != sensorData.colorImageHeight)
{
bodyImageTexture = KinectInterop.CreateRenderTexture(bodyImageTexture, sensorData.colorImageWidth, sensorData.colorImageHeight);
}
Array.Clear(depthBodyBufferData, 0, depthBodyBufferData.Length);
Array.Clear(equalBodyBufferData, 0, equalBodyBufferData.Length);
bodyHistTotalPoints = 0;
// get configured min & max distances
float minDistance = ((DepthSensorBase)sensorData.sensorInterface).minDepthDistance;
float maxDistance = ((DepthSensorBase)sensorData.sensorInterface).maxDepthDistance;
int depthMinDistance = (int)(minDistance * 1000f);
int depthMaxDistance = (int)(maxDistance * 1000f);
int frameLen = sensorData.colorCamDepthImage.Length;
for (int i = 0; i < frameLen; i++)
{
int depth = sensorData.colorCamDepthImage[i];
int limDepth = (depth >= depthMinDistance && depth <= depthMaxDistance) ? depth : 0;
if (/**rawBodyIndexImage[i] != 255 &&*/ limDepth > 0)
{
depthBodyBufferData[limDepth]++;
bodyHistTotalPoints++;
}
}
if (bodyHistTotalPoints > 0)
{
equalBodyBufferData[0] = depthBodyBufferData[0];
for (int i = 1; i < depthBodyBufferData.Length; i++)
{
equalBodyBufferData[i] = equalBodyBufferData[i - 1] + depthBodyBufferData[i];
}
}
int bodyIndexBufferLength = sensorData.colorCamBodyIndexImage.Length >> 2;
if (bodyIndexBuffer == null || bodyIndexBuffer.count != bodyIndexBufferLength)
{
bodyIndexBuffer = KinectInterop.CreateComputeBuffer(bodyIndexBuffer, bodyIndexBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(bodyIndexBuffer, sensorData.colorCamBodyIndexImage, bodyIndexBufferLength, sizeof(uint));
int depthBufferLength = sensorData.colorCamDepthImage.Length >> 1;
if(depthImageBuffer == null || depthImageBuffer.count != depthBufferLength)
{
depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(depthImageBuffer, sensorData.colorCamDepthImage, depthBufferLength, sizeof(uint));
if (bodyHistBuffer != null)
{
KinectInterop.SetComputeBufferData(bodyHistBuffer, equalBodyBufferData, equalBodyBufferData.Length, sizeof(int));
}
float minDist = minDistance; // kinectManager.minUserDistance != 0f ? kinectManager.minUserDistance : minDistance;
float maxDist = maxDistance; // kinectManager.maxUserDistance != 0f ? kinectManager.maxUserDistance : maxDistance;
bodyImageMaterial.SetInt("_TexResX", sensorData.colorImageWidth);
bodyImageMaterial.SetInt("_TexResY", sensorData.colorImageHeight);
bodyImageMaterial.SetInt("_MinDepth", (int)(minDist * 1000f));
bodyImageMaterial.SetInt("_MaxDepth", (int)(maxDist * 1000f));
bodyImageMaterial.SetBuffer("_BodyIndexMap", bodyIndexBuffer);
bodyImageMaterial.SetBuffer("_DepthMap", depthImageBuffer);
bodyImageMaterial.SetBuffer("_HistMap", bodyHistBuffer);
bodyImageMaterial.SetInt("_TotalPoints", bodyHistTotalPoints);
Color[] bodyIndexColors = kinectManager.GetBodyIndexColors();
if(playerIndex >= 0)
{
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
int bodyIndex = kinectManager.GetBodyIndexByUserId(userId);
int numBodyIndices = bodyIndexColors.Length;
Color clrNone = new Color(0f, 0f, 0f, 0f);
for (int i = 0; i < numBodyIndices; i++)
{
if (i != bodyIndex)
bodyIndexColors[i] = clrNone;
}
}
bodyImageMaterial.SetColorArray("_BodyIndexColors", bodyIndexColors);
Graphics.Blit(null, bodyImageTexture, bodyImageMaterial);
}
}
}
}

12
Assets/Azure/KinectScripts/BackgroundColorCamUserImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 5460d666a1cb85f418682ed53b4b3d26
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

132
Assets/Azure/KinectScripts/BackgroundColorImage.cs

@ -0,0 +1,132 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Background color image is component that displays the color camera feed on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundColorImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// references
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
Texture imageTex = kinectManager.GetColorImageTex(sensorIndex);
if (backgroundImage && imageTex != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != imageTex.width || backgroundImage.texture.height != imageTex.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.texture = imageTex;
backgroundImage.rectTransform.localScale = sensorData.colorImageScale; // kinectManager.GetColorImageScale(sensorIndex);
backgroundImage.color = Color.white;
//Debug.Log("aPos: " + backgroundImage.rectTransform.anchoredPosition + ", aMin: " + backgroundImage.rectTransform.anchorMin +
// ", aMax:" + backgroundImage.rectTransform.anchorMax + ", pivot: " + backgroundImage.rectTransform.pivot +
// ", size: " + backgroundImage.rectTransform.sizeDelta);
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int colorImageWidth = sensorData.colorImageWidth; // kinectManager.GetColorImageWidth(sensorIndex);
int colorImageHeight = sensorData.colorImageHeight; // kinectManager.GetColorImageHeight(sensorIndex);
if (colorImageWidth == 0 || colorImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (colorImageWidth > colorImageHeight)
rectWidth = rectHeight * colorImageWidth / colorImageHeight;
else
rectHeight = rectWidth * colorImageHeight / colorImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.colorImageScale; // (Vector2)kinectManager.GetColorImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if(backgroundImage)
//{
// // update the anchor position, if needed
// if(sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if(backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundColorImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 4c68913127bedcd4bbd001b610521305
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

146
Assets/Azure/KinectScripts/BackgroundDepthCamColorImage.cs

@ -0,0 +1,146 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
using System;
namespace com.rfilkov.components
{
/// <summary>
/// BackgroundDepthCamColorImage is component that displays the depth camera aligned color image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundDepthCamColorImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// reference to the kinectManager
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
//// depth-camera aligned frames
//private ulong lastDepthCamColorFrameTime = 0;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
if(sensorData != null)
{
// enable depth camera aligned color frames
sensorData.sensorInterface.EnableDepthCameraColorFrame(sensorData, true);
}
}
void OnDestroy()
{
if (sensorData != null)
{
// disable depth camera aligned color frames
sensorData.sensorInterface.EnableDepthCameraColorFrame(sensorData, false);
}
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
if(sensorData.depthCamColorImageTexture == null)
{
// enable depth camera aligned color frames
sensorData = kinectManager.GetSensorData(sensorIndex); // sensor data may be re-created after sensor-int restart
sensorData.sensorInterface.EnableDepthCameraColorFrame(sensorData, true);
}
if (backgroundImage && sensorData.depthCamColorImageTexture != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != sensorData.depthCamColorImageTexture.width || backgroundImage.texture.height != sensorData.depthCamColorImageTexture.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.texture = sensorData.depthCamColorImageTexture;
backgroundImage.rectTransform.localScale = sensorData.depthImageScale;
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int colorImageWidth = sensorData.depthImageWidth;
int colorImageHeight = sensorData.depthImageHeight;
if (colorImageWidth == 0 || colorImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (colorImageWidth > colorImageHeight)
rectWidth = rectHeight * colorImageWidth / colorImageHeight;
else
rectHeight = rectWidth * colorImageHeight / colorImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.depthImageScale;
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
if (sensorData != null)
{
// disable depth camera aligned color frames
sensorData.sensorInterface.EnableDepthCameraColorFrame(sensorData, false);
}
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundDepthCamColorImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 0cf0a53458b5548acb078c1bc6d443c8
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

132
Assets/Azure/KinectScripts/BackgroundDepthImage.cs

@ -0,0 +1,132 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Background depth image is component that displays the depth camera image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundDepthImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the depth image.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the depth image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// references
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
Texture imageTex = kinectManager.GetDepthImageTex(sensorIndex);
if (backgroundImage && imageTex != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != imageTex.width || backgroundImage.texture.height != imageTex.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.texture = imageTex;
backgroundImage.rectTransform.localScale = sensorData.depthImageScale; // kinectManager.GetDepthImageScale(sensorIndex);
backgroundImage.color = Color.white;
//Debug.Log("aPos: " + backgroundImage.rectTransform.anchoredPosition + ", aMin: " + backgroundImage.rectTransform.anchorMin +
// ", aMax:" + backgroundImage.rectTransform.anchorMax + ", pivot: " + backgroundImage.rectTransform.pivot +
// ", size: " + backgroundImage.rectTransform.sizeDelta);
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int depthImageWidth = sensorData.depthImageWidth; // kinectManager.GetDepthImageWidth(sensorIndex);
int depthImageHeight = sensorData.depthImageHeight; // kinectManager.GetDepthImageHeight(sensorIndex);
if (depthImageWidth == 0 || depthImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (depthImageWidth > depthImageHeight)
rectWidth = rectHeight * depthImageWidth / depthImageHeight;
else
rectHeight = rectWidth * depthImageHeight / depthImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.depthImageScale; // (Vector2)kinectManager.GetDepthImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundDepthImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 65268b386677b334bbfd2f6ec3e726fb
timeCreated: 1483707326
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

128
Assets/Azure/KinectScripts/BackgroundInfraredImage.cs

@ -0,0 +1,128 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Background infrared image is component that displays the infrared camera image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundInfraredImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the depth image.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the depth image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// references
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
Texture imageTex = kinectManager.GetInfraredImageTex(sensorIndex);
if (backgroundImage && imageTex != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != imageTex.width || backgroundImage.texture.height != imageTex.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.texture = imageTex;
backgroundImage.rectTransform.localScale = sensorData.infraredImageScale; // kinectManager.GetInfraredImageScale(sensorIndex);
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int depthImageWidth = sensorData.depthImageWidth; // kinectManager.GetDepthImageWidth(sensorIndex);
int depthImageHeight = sensorData.depthImageHeight; // kinectManager.GetDepthImageHeight(sensorIndex);
if (depthImageWidth == 0 || depthImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (depthImageWidth > depthImageHeight)
rectWidth = rectHeight * depthImageWidth / depthImageHeight;
else
rectHeight = rectWidth * depthImageHeight / depthImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.infraredImageScale; // (Vector2)kinectManager.GetDepthImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundInfraredImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fd43548cc180bc7498e01c3b7606c18e
timeCreated: 1483707326
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

171
Assets/Azure/KinectScripts/BackgroundRemovalByBodyBounds.cs

@ -0,0 +1,171 @@
using com.rfilkov.components;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// BackgroundRemovalByBodyBounds filters user silhouettes, according to the bounds determined by the positions of the body joints.
/// </summary>
public class BackgroundRemovalByBodyBounds : MonoBehaviour
{
[Tooltip("Offset from the lowest body joint to the floor.")]
[Range(-0.1f, 0.4f)]
public float offsetToFloor = 0.05f;
[Tooltip("Offset from the highest body joint to the top of the body.")]
[Range(-0.1f, 0.4f)]
public float headOffset = 0.2f;
[Tooltip("Offset from the leftmost body joint to the left end of the body.")]
[Range(-0.1f, 0.4f)]
public float leftOffset = 0.2f;
[Tooltip("Offset from the rightmost body joint to the right end of the body.")]
[Range(-0.1f, 0.4f)]
public float rightOffset = 0.2f;
[Tooltip("Offset from the frontmost body joint to the front end of the body.")]
[Range(-0.1f, 0.4f)]
public float frontOffset = 0.2f;
[Tooltip("Offset from the backmost body joint to the back end of the body.")]
[Range(-0.1f, 0.4f)]
public float backOffset = 0.2f;
// foreground filter shader
private ComputeShader foregroundFilterShader = null;
private int foregroundFilterKernel = -1;
//private Vector4[] foregroundFilterPos = null;
private Vector4[] bodyPosMin = null;
private Vector4[] bodyPosMaxX = null;
private Vector4[] bodyPosMaxY = null;
private Vector4[] bodyPosMaxZ = null;
private Vector4[] bodyPosDot = null;
// initializes background removal with shaders
public bool InitBackgroundRemoval(KinectInterop.SensorData sensorData, int maxBodyCount)
{
foregroundFilterShader = Resources.Load("ForegroundFiltBodyShader") as ComputeShader;
foregroundFilterKernel = foregroundFilterShader != null ? foregroundFilterShader.FindKernel("FgFiltBody") : -1;
//foregroundFilterPos = new Vector4[KinectInterop.Constants.MaxBodyCount];
bodyPosMin = new Vector4[maxBodyCount];
bodyPosMaxX = new Vector4[maxBodyCount];
bodyPosMaxY = new Vector4[maxBodyCount];
bodyPosMaxZ = new Vector4[maxBodyCount];
bodyPosDot = new Vector4[maxBodyCount];
return true;
}
// releases background removal shader resources
public void FinishBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (foregroundFilterShader != null)
{
foregroundFilterShader = null;
}
//foregroundFilterPos = null;
bodyPosMin = null;
bodyPosMaxX = null;
bodyPosMaxY = null;
bodyPosMaxZ = null;
bodyPosDot = null;
}
/// <summary>
/// Applies foreground filter by body bounds.
/// </summary>
public void ApplyForegroundFilterByBody(Texture vertexTexture, RenderTexture alphaTexture, int playerIndex, int sensorIndex, int maxBodyCount,
Matrix4x4 matKinectWorld, KinectManager kinectManager, Camera foregroundCamera)
{
Matrix4x4 matWorldKinect = matKinectWorld.inverse;
if (kinectManager != null && kinectManager.userManager != null)
{
List<ulong> alUserIds = null;
if (playerIndex < 0)
{
alUserIds = kinectManager.userManager.alUserIds;
}
else
{
alUserIds = new List<ulong>();
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
if (userId != 0)
alUserIds.Add(userId);
}
int uCount = Mathf.Min(alUserIds.Count, maxBodyCount);
foregroundFilterShader.SetInt("_NumBodies", uCount);
//if (uCount > 0)
//{
// Debug.Log("playerIndex: " + playerIndex + ", uCount: " + uCount + ", userId: " + (uCount > 0 ? alUserIds[0] : 0));
//}
// get the background rectangle (use the portrait background, if available)
Rect backgroundRect = foregroundCamera.pixelRect;
PortraitBackground portraitBack = PortraitBackground.Instance;
if (portraitBack && portraitBack.enabled)
{
backgroundRect = portraitBack.GetBackgroundRect();
}
int jCount = kinectManager.GetJointCount();
for (int i = 0; i < uCount; i++)
{
ulong userId = alUserIds[i];
bool bSuccess = kinectManager.GetUserBoundingBox(userId, /**foregroundCamera*/ null, sensorIndex, backgroundRect,
out Vector3 pMin, out Vector3 pMax);
//Debug.Log("pMin: " + pMin + ", pMax: " + pMax);
if (bSuccess)
{
Vector3 posMin = new Vector3(pMin.x - leftOffset, pMin.y - offsetToFloor, pMin.z - frontOffset);
Vector3 posMaxX = new Vector3(pMax.x + rightOffset, posMin.y, posMin.z);
Vector3 posMaxY = new Vector3(posMin.x, pMax.y + headOffset, posMin.z);
Vector3 posMaxZ = new Vector3(posMin.x, posMin.y, pMax.z + backOffset);
//foregroundFilterDistXY[i] = new Vector4(xMin - 0.1f, xMax + 0.1f, yMin - offsetToFloor, yMax + 0.1f);
//foregroundFilterDistZ[i] = new Vector4(zMin - 0.2f, zMax + 0.0f, 0f, 0f);
bodyPosMin[i] = matWorldKinect.MultiplyPoint3x4(posMin);
bodyPosMaxX[i] = matWorldKinect.MultiplyPoint3x4(posMaxX) - (Vector3)bodyPosMin[i];
bodyPosMaxY[i] = matWorldKinect.MultiplyPoint3x4(posMaxY) - (Vector3)bodyPosMin[i];
bodyPosMaxZ[i] = matWorldKinect.MultiplyPoint3x4(posMaxZ) - (Vector3)bodyPosMin[i];
bodyPosDot[i] = new Vector3(Vector3.Dot(bodyPosMaxX[i], bodyPosMaxX[i]), Vector3.Dot(bodyPosMaxY[i], bodyPosMaxY[i]), Vector3.Dot(bodyPosMaxZ[i], bodyPosMaxZ[i]));
//Debug.Log("pMin: " + (Vector3)posMin + ", pMaxX: " + (Vector3)bodyPosMaxX[i] + ", pMaxY: " + (Vector3)bodyPosMaxY[i] + ", pMaxZ: " + (Vector3)bodyPosMaxZ[i] + ", pDot: " + (Vector3)bodyPosDot[i]);
}
//string sMessage2 = string.Format("Xmin: {0:F1}; Xmax: {1:F1}", bodyPosMin[i].x, bodyPosMaxX[i].x);
//Debug.Log(sMessage2);
}
}
//foregroundFilterShader.SetVectorArray("BodyPos", foregroundFilterPos);
foregroundFilterShader.SetVectorArray("_BodyPosMin", bodyPosMin);
foregroundFilterShader.SetVectorArray("_BodyPosMaxX", bodyPosMaxX);
foregroundFilterShader.SetVectorArray("_BodyPosMaxY", bodyPosMaxY);
foregroundFilterShader.SetVectorArray("_BodyPosMaxZ", bodyPosMaxZ);
foregroundFilterShader.SetVectorArray("_BodyPosDot", bodyPosDot);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_VertexTex", vertexTexture);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_AlphaTex", alphaTexture);
foregroundFilterShader.Dispatch(foregroundFilterKernel, vertexTexture.width / 8, vertexTexture.height / 8, 1);
}
}
}

11
Assets/Azure/KinectScripts/BackgroundRemovalByBodyBounds.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 314091ebf33d9fd47a58c6cfd6213f1d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

115
Assets/Azure/KinectScripts/BackgroundRemovalByBodyIndex.cs

@ -0,0 +1,115 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// BackgroundRemovalByBodyIndex filters user silhouettes, according to the body index frames coming from the body tracking SDK.
/// </summary>
public class BackgroundRemovalByBodyIndex : MonoBehaviour
{
// whether the color-bi-buffer is created or not
private bool bColorBiBufferCreated = false;
// foreground filter shader
private ComputeShader foregroundFilterShader = null;
private int foregroundFilterKernel = -1;
// current body indices
private int[] userBodyIndex = null;
// initializes background removal with shaders
public bool InitBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (sensorData != null && sensorData.colorImageWidth > 0 && sensorData.colorImageHeight > 0)
{
if(sensorData.colorBodyIndexBuffer == null)
{
int bufferLength = sensorData.colorImageWidth * sensorData.colorImageHeight / 4;
sensorData.colorBodyIndexBuffer = new ComputeBuffer(bufferLength, sizeof(uint));
bColorBiBufferCreated = true;
}
foregroundFilterShader = Resources.Load("ForegroundFiltBodyIndexShader") as ComputeShader;
foregroundFilterKernel = foregroundFilterShader != null ? foregroundFilterShader.FindKernel("FgFiltBodyIndex") : -1;
return true;
}
return false;
}
// releases background removal shader resources
public void FinishBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (bColorBiBufferCreated && sensorData.colorBodyIndexBuffer != null)
{
sensorData.colorBodyIndexBuffer.Dispose();
sensorData.colorBodyIndexBuffer = null;
}
}
/// <summary>
/// Applies foreground filter by body index.
/// </summary>
public void ApplyForegroundFilterByBodyIndex(RenderTexture alphaTexture, KinectInterop.SensorData sensorData,
KinectManager kinectManager, int playerIndex, int maxBodyCount)
{
if (kinectManager != null && kinectManager.userManager != null && sensorData.colorBodyIndexBuffer != null)
{
List<ulong> alUserIds = null;
if (playerIndex < 0)
{
alUserIds = kinectManager.userManager.alUserIds; // new List<ulong>(); //
}
else
{
alUserIds = new List<ulong>();
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
if (userId != 0)
alUserIds.Add(userId);
}
maxBodyCount = 5; // limit to 5 body indices in the shader, because SetInts() doesn't work correctly
if (userBodyIndex == null)
{
userBodyIndex = new int[maxBodyCount];
}
int uCount = Mathf.Min(alUserIds.Count, maxBodyCount);
for (int i = 0; i < uCount; i++)
{
ulong userId = alUserIds[i];
userBodyIndex[i] = kinectManager.GetBodyIndexByUserId(userId);
}
foregroundFilterShader.SetInt("_TexResX", alphaTexture.width);
foregroundFilterShader.SetInt("_TexResY", alphaTexture.height);
//foregroundFilterShader.SetInt("_NumBodies", uCount);
//foregroundFilterShader.SetInts("_BodyIndices", userBodyIndex); // ComputeShader.SetInts() doesn't work correctly
foregroundFilterShader.SetInt("_BodyIndexAll", playerIndex < 0 ? 1 : 0);
foregroundFilterShader.SetInt("_BodyIndex0", uCount > 0 ? userBodyIndex[0] : -1);
foregroundFilterShader.SetInt("_BodyIndex1", uCount > 1 ? userBodyIndex[1] : -1);
foregroundFilterShader.SetInt("_BodyIndex2", uCount > 2 ? userBodyIndex[2] : -1);
foregroundFilterShader.SetInt("_BodyIndex3", uCount > 3 ? userBodyIndex[3] : -1);
foregroundFilterShader.SetInt("_BodyIndex4", uCount > 4 ? userBodyIndex[4] : -1);
foregroundFilterShader.SetBuffer(foregroundFilterKernel, "_BodyIndexMap", sensorData.colorBodyIndexBuffer);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_AlphaTex", alphaTexture);
foregroundFilterShader.Dispatch(foregroundFilterKernel, alphaTexture.width / 8, alphaTexture.height / 8, 1);
}
}
}
}

11
Assets/Azure/KinectScripts/BackgroundRemovalByBodyIndex.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e4e45b4f33dd18f419753482798bf289
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

93
Assets/Azure/KinectScripts/BackgroundRemovalByDist.cs

@ -0,0 +1,93 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// BackgroundRemovalByDist filters part of the real environment, according to the given spatial limits.
/// </summary>
public class BackgroundRemovalByDist : MonoBehaviour
{
[Tooltip("Whether or not to apply the sensor pose to background removal estimation.")]
public bool applySensorPose = true;
[Space]
[Tooltip("Horizontal limit - minimum, in meters.")]
[Range(-5f, 5f)]
public float xMin = -1.5f;
[Tooltip("Horizontal limit - maximum, in meters.")]
[Range(-5f, 5f)]
public float xMax = 1.5f;
[Tooltip("Vertical limit - minimum, in meters.")]
[Range(-5f, 5f)]
public float yMin = 0f;
[Tooltip("Vertical limit - maximum, in meters.")]
[Range(-5f, 5f)]
public float yMax = 3f;
[Tooltip("Distance limit - minimum, in meters.")]
[Range(0.5f, 10f)]
public float zMin = 0.5f;
[Tooltip("Distance limit - maximum at the left end, in meters.")]
[Range(0.5f, 10f)]
public float zMaxLeft = 3f;
[Tooltip("Distance limit - maximum at the right end, in meters.")]
[Range(0.5f, 10f)]
public float zMaxRight = 3f;
// foreground filter shader
private ComputeShader foregroundFilterShader = null;
private int foregroundFilterKernel = -1;
void Start()
{
foregroundFilterShader = Resources.Load("ForegroundFiltDistShader") as ComputeShader;
foregroundFilterKernel = foregroundFilterShader != null ? foregroundFilterShader.FindKernel("FgFiltDist") : -1;
}
/// <summary>
/// Applies vertex filter by distance.
/// </summary>
/// <param name="vertexTexture">The vertex texture</param>
/// <param name="alphaTexture">The alpha texture</param>
public void ApplyVertexFilter(RenderTexture vertexTexture, RenderTexture alphaTexture, Matrix4x4 sensorWorldMatrix)
{
foregroundFilterShader.SetMatrix("_Transform", applySensorPose ? sensorWorldMatrix : Matrix4x4.identity);
//Matrix4x4 matWorldKinect = sensorWorldMatrix.inverse;
Vector3 posMin = new Vector3(xMin, yMin, zMin); // matWorldKinect.MultiplyPoint3x4(new Vector3(xMin, yMin, zMin));
Vector3 posMaxX = new Vector3(xMax, yMin, zMin) - posMin; // matWorldKinect.MultiplyPoint3x4(new Vector3(xMax, yMin, zMin)) - posMin;
Vector3 posMaxY = new Vector3(xMin, yMax, zMin) - posMin; // matWorldKinect.MultiplyPoint3x4(new Vector3(xMin, yMax, zMin)) - posMin;
Vector3 posMaxZLeft = new Vector3(xMin, yMin, zMaxLeft) - posMin; // matWorldKinect.MultiplyPoint3x4(new Vector3(xMin, yMin, zMaxRight)) - posMin;
Vector3 posMaxZRight = new Vector3(xMin, yMin, zMaxRight) - posMin; // matWorldKinect.MultiplyPoint3x4(new Vector3(xMin, yMin, zMaxLeft)) - posMin;
Vector3 posMaxZ = (posMaxZLeft + posMaxZRight) / 2;
Vector3 posDot = new Vector3(Vector3.Dot(posMaxX, posMaxX), Vector3.Dot(posMaxY, posMaxY), Vector3.Dot(posMaxZ, posMaxZ));
foregroundFilterShader.SetVector("_PosMin", posMin);
foregroundFilterShader.SetVector("_PosMaxX", posMaxX);
foregroundFilterShader.SetVector("_PosMaxY", posMaxY);
//foregroundFilterShader.SetVector("_PosMaxZ", posMaxZ);
foregroundFilterShader.SetVector("_PosMaxZLeft", posMaxZLeft);
foregroundFilterShader.SetVector("_PosMaxZRight", posMaxZRight);
foregroundFilterShader.SetVector("_PosDot", posDot);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_VertexTex", vertexTexture);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_AlphaTex", alphaTexture);
foregroundFilterShader.Dispatch(foregroundFilterKernel, vertexTexture.width / 8, vertexTexture.height / 8, 1);
}
}
}

11
Assets/Azure/KinectScripts/BackgroundRemovalByDist.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3fa7b39e46e63a24c9a12a224406a1f4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

89
Assets/Azure/KinectScripts/BackgroundRemovalByGreenScreen.cs

@ -0,0 +1,89 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// BackgroundRemovalByGreenScreen filters color camera data, according to its similarity or difference to the color of the green-screen.
/// </summary>
public class BackgroundRemovalByGreenScreen : MonoBehaviour
{
[Tooltip("The color of the 'green screen'.")]
public Color greenScreenColor = Color.green;
[Tooltip("Allowed similarity between the 'green screen' color and the texture color.")]
public float greenScreenColorRange = 0.5f;
[Tooltip("Alpha values below this value will be set to fully transparent.")]
[Range(0f, 1f)]
public float setAsTransparentBelow = 0f;
[Tooltip("Alpha values above this value will be set to fully opaque.")]
[Range(0f, 1f)]
public float setAsOpaqueAbove = 1f;
[Tooltip("Green screen rectangle in normalized coordinates (between 0 and 1).")]
public Rect greenScreenRect = new Rect(0, 0, 1, 1);
// foreground filter shader
private ComputeShader foregroundFilterShader = null;
private int foregroundFilterKernel = -1;
// initializes background removal shaders
public bool InitBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (sensorData != null && sensorData.colorImageWidth > 0 && sensorData.colorImageHeight > 0)
{
foregroundFilterShader = Resources.Load("ForegroundFiltGreenScreenShader") as ComputeShader;
foregroundFilterKernel = foregroundFilterShader != null ? foregroundFilterShader.FindKernel("FgFiltFreenScreen") : -1;
return true;
}
return false;
}
//// releases background removal shader resources
//public void FinishBackgroundRemoval(KinectInterop.SensorData sensorData)
//{
//}
/// <summary>
/// Applies foreground filter by green screen.
/// </summary>
public void ApplyForegroundFilterByGreenScreen(RenderTexture alphaTexture, KinectInterop.SensorData sensorData,
KinectManager kinectManager, RenderTexture colorTexture)
{
if (foregroundFilterShader != null && colorTexture != null && alphaTexture != null)
{
foregroundFilterShader.SetVector("_GreenScreenColor", greenScreenColor);
foregroundFilterShader.SetFloat("_GreenScreenColorRange", greenScreenColorRange);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_ColorTex", colorTexture);
foregroundFilterShader.SetTexture(foregroundFilterKernel, "_AlphaTex", alphaTexture);
foregroundFilterShader.SetFloat("_SetTranspBelow", setAsTransparentBelow);
foregroundFilterShader.SetFloat("_SetOpaqueAbove", setAsOpaqueAbove);
float xMin = sensorData.colorImageScale.x > 0 ? greenScreenRect.xMin * colorTexture.width : (1f - greenScreenRect.xMax) * colorTexture.width;
float yMin = sensorData.colorImageScale.y > 0 ? greenScreenRect.yMin * colorTexture.height : (1f - greenScreenRect.yMax) * colorTexture.height;
float xMax = sensorData.colorImageScale.x > 0 ? greenScreenRect.xMax * colorTexture.width : (1f - greenScreenRect.xMin) * colorTexture.width;
float yMax = sensorData.colorImageScale.y > 0 ? greenScreenRect.yMax * colorTexture.height : (1f - greenScreenRect.yMin) * colorTexture.height;
Vector4 vGreenScreenRect = new Vector4(xMin, yMin, xMax, yMax);
foregroundFilterShader.SetVector("_GreenScreenRect", vGreenScreenRect);
//Debug.Log(vGreenScreenRect);
foregroundFilterShader.Dispatch(foregroundFilterKernel, alphaTexture.width / 8, alphaTexture.height / 8, 1);
//Debug.Log("ApplyForegroundFilterByGreenScreen()");
}
}
}
}

11
Assets/Azure/KinectScripts/BackgroundRemovalByGreenScreen.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: db05d85cc2655464b934a2b1d7f5658c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

623
Assets/Azure/KinectScripts/BackgroundRemovalManager.cs

@ -0,0 +1,623 @@
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using com.rfilkov.components;
namespace com.rfilkov.kinect
{
/// <summary>
/// Background removal manager is the component that filters and renders user body silhouettes.
/// </summary>
public class BackgroundRemovalManager : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("Index of the player, tracked by this component. -1 means all players, 0 - the 1st player only, 1 - the 2nd player only, etc.")]
public int playerIndex = -1;
[Tooltip("RawImage used for displaying the foreground image.")]
public UnityEngine.UI.RawImage foregroundImage;
[Tooltip("Camera used for alignment of bodies to color camera image.")]
public Camera foregroundCamera;
[Tooltip("Resolution of the generated foreground textures.")]
private DepthSensorBase.PointCloudResolution foregroundImageResolution = DepthSensorBase.PointCloudResolution.ColorCameraResolution;
[Tooltip("Whether only the alpha texture is needed.")]
public bool computeAlphaMaskOnly = false;
[Tooltip("Whether the alpha texture will be inverted or not..")]
public bool invertAlphaMask = false;
[Tooltip("(Advanced) Whether to apply the median filter before the other filters.")]
public bool applyMedianFilter = false;
[Tooltip("(Advanced) Number of iterations used by the alpha texture's erode filter 0.")]
[Range(0, 9)]
public int erodeIterations0 = 0; // 1
[Tooltip("(Advanced) Number of iterations used by the alpha texture's dilate filter 1.")]
[Range(0, 9)]
public int dilateIterations = 0; // 3;
[Tooltip("(Advanced) Whether to apply the gradient filter.")]
private bool applyGradientFilter = true;
[Tooltip("(Advanced) Number of iterations used by the alpha texture's erode filter 2.")]
[Range(0, 9)]
public int erodeIterations = 0; // 4;
[Tooltip("(Advanced) Whether to apply the blur filter after at the end.")]
public bool applyBlurFilter = true;
[Tooltip("(Advanced) Color applied to the body contour after the filters.")]
public Color bodyContourColor = Color.black;
[Tooltip("UI-Text to display the BR-Manager debug messages.")]
public UnityEngine.UI.Text debugText;
// max number of bodies to track
private const int MAX_BODY_COUNT = 10;
// primary sensor data structure
private KinectInterop.SensorData sensorData = null;
private KinectManager kinectManager = null;
// sensor interface
private DepthSensorBase sensorInt = null;
// render texture resolution
private Vector2Int textureRes;
// Bool to keep track whether Kinect and BR library have been initialized
private bool bBackgroundRemovalInited = false;
private int lastColorW = 0, lastColorH = 0;
// The single instance of BackgroundRemovalManager
//private static BackgroundRemovalManager instance;
// last point cloud frame time
private ulong lastDepth2SpaceFrameTime = 0;
private ulong lastColorBodyIndexBufferTime = 0;
// render textures used by the shaders
private RenderTexture colorTexture = null;
private RenderTexture vertexTexture = null;
private RenderTexture alphaTexture = null;
private RenderTexture foregroundTexture = null;
// Materials used to apply the shaders
private Material medianFilterMat = null;
private Material erodeFilterMat = null;
private Material dilateFilterMat = null;
private Material gradientFilterMat = null;
private Material blurFilterMat = null;
private Material invertAlphaMat = null;
private Material foregroundMat = null;
// reference to filter-by-distance component
private BackgroundRemovalByBodyBounds filterByBody = null;
private BackgroundRemovalByDist filterByDist = null;
private BackgroundRemovalByBodyIndex filterByBI = null;
private BackgroundRemovalByGreenScreen filterByGS = null;
// whether the textures are created or not
private bool bColorTextureCreated = false;
private bool bVertexTextureCreated = false;
///// <summary>
///// Gets the single BackgroundRemovalManager instance.
///// </summary>
///// <value>The BackgroundRemovalManager instance.</value>
//public static BackgroundRemovalManager Instance
//{
// get
// {
// return instance;
// }
//}
/// <summary>
/// Determines whether the BackgroundRemovalManager was successfully initialized.
/// </summary>
/// <returns><c>true</c> if the BackgroundRemovalManager was successfully initialized; otherwise, <c>false</c>.</returns>
public bool IsBackgroundRemovalInited()
{
return bBackgroundRemovalInited;
}
/// <summary>
/// Gets the foreground image texture.
/// </summary>
/// <returns>The foreground image texture.</returns>
public Texture GetForegroundTex()
{
return foregroundTexture;
}
/// <summary>
/// Gets the alpha texture.
/// </summary>
/// <returns>The alpha texture.</returns>
public Texture GetAlphaTex()
{
return alphaTexture;
}
/// <summary>
/// Gets the color texture.
/// </summary>
/// <returns>The color texture.</returns>
public Texture GetColorTex()
{
return colorTexture;
}
/// <summary>
/// Gets the last background removal frame time.
/// </summary>
/// <returns>The last background removal time.</returns>
public ulong GetLastBackgroundRemovalTime()
{
return lastDepth2SpaceFrameTime;
}
//----------------------------------- end of public functions --------------------------------------//
//void Awake()
//{
// instance = this;
//}
public void Start()
{
try
{
// get sensor data
kinectManager = KinectManager.Instance;
if (kinectManager && kinectManager.IsInitialized())
{
sensorData = kinectManager.GetSensorData(sensorIndex);
}
if (sensorData == null || sensorData.sensorInterface == null)
{
throw new Exception("Background removal cannot be started, because KinectManager is missing or not initialized.");
}
if(foregroundImage == null)
{
// look for a foreground image
foregroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
if (!foregroundCamera)
{
// by default - the main camera
foregroundCamera = Camera.main;
}
// try to get reference to other filter components
filterByBody = GetComponent<BackgroundRemovalByBodyBounds>();
if(filterByBody == null)
filterByDist = GetComponent<BackgroundRemovalByDist>();
if (filterByBody == null && filterByDist == null)
filterByBI = GetComponent<BackgroundRemovalByBodyIndex>();
if (filterByBody == null && filterByDist == null && filterByBI == null)
filterByGS = GetComponent<BackgroundRemovalByGreenScreen>();
if (filterByBody == null && filterByDist == null && filterByBI == null && filterByGS == null)
filterByBI = gameObject.AddComponent<BackgroundRemovalByBodyIndex>(); // fallback
//// Initialize the background removal
//bBackgroundRemovalInited = InitBackgroundRemoval(sensorData);
//if (bBackgroundRemovalInited)
//{
// if (debugText != null)
// debugText.text = string.Empty;
//}
//else
//{
// throw new Exception("Background removal could not be initialized.");
//}
//bBackgroundRemovalInited = bSuccess;
}
catch (DllNotFoundException ex)
{
Debug.LogError(ex.ToString());
if (debugText != null)
debugText.text = "Please check the SDK installations.";
}
catch (Exception ex)
{
Debug.LogException(ex);
if (debugText != null)
debugText.text = ex.Message;
}
}
public void OnDestroy()
{
if (bBackgroundRemovalInited)
{
// finish background removal
FinishBackgroundRemoval(sensorData);
}
bBackgroundRemovalInited = false;
//instance = null;
}
void Update()
{
if (sensorData == null)
return;
if(!bBackgroundRemovalInited || lastColorW != sensorData.colorImageWidth || lastColorH != sensorData.colorImageHeight)
{
lastColorW = sensorData.colorImageWidth;
lastColorH = sensorData.colorImageHeight;
if (bBackgroundRemovalInited)
{
FinishBackgroundRemoval(sensorData);
if(foregroundImage != null)
foregroundImage.texture = null;
//bBackgroundRemovalInited = false;
// dispose the used shaders & buffers, as well
if(sensorInt != null)
sensorInt.UpdateTransformedFrameTextures(sensorData, kinectManager);
}
bBackgroundRemovalInited = InitBackgroundRemoval(sensorData);
if (bBackgroundRemovalInited)
{
if (debugText != null)
debugText.text = string.Empty;
}
}
if (bBackgroundRemovalInited)
{
// update the background removal
UpdateBackgroundRemoval(sensorData);
// check for valid foreground image texture
if(foregroundImage != null && foregroundImage.texture == null)
{
foregroundImage.texture = foregroundTexture;
foregroundImage.rectTransform.localScale = kinectManager.GetColorImageScale(sensorIndex);
foregroundImage.color = Color.white;
}
}
}
// initializes background removal with shaders
private bool InitBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (sensorData != null && sensorData.sensorInterface != null && KinectInterop.IsDirectX11Available())
{
if(filterByBody != null)
{
if (!filterByBody.InitBackgroundRemoval(sensorData, MAX_BODY_COUNT))
{
Debug.LogError("Could not init the background removal by body bounds!");
return false;
}
}
else if(filterByBI != null)
{
if(!filterByBI.InitBackgroundRemoval(sensorData))
{
Debug.LogError("Could not init the background removal by body index!");
return false;
}
}
else if (filterByGS != null)
{
if (!filterByGS.InitBackgroundRemoval(sensorData))
{
Debug.LogError("Could not init the background removal by green screen!");
return false;
}
}
sensorInt = (DepthSensorBase)sensorData.sensorInterface;
// set the texture resolution
if (sensorInt.pointCloudColorTexture == null && sensorInt.pointCloudVertexTexture == null)
{
sensorInt.pointCloudResolution = foregroundImageResolution;
}
textureRes = sensorInt.GetPointCloudTexResolution(sensorData);
if(sensorInt.pointCloudColorTexture == null)
{
colorTexture = KinectInterop.CreateRenderTexture(colorTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32);
sensorInt.pointCloudColorTexture = colorTexture;
bColorTextureCreated = true;
}
else
{
colorTexture = sensorInt.pointCloudColorTexture;
bColorTextureCreated = false;
}
if (filterByBody != null || filterByDist != null)
{
if(sensorInt.pointCloudVertexTexture == null)
{
vertexTexture = KinectInterop.CreateRenderTexture(vertexTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGBHalf);
sensorInt.pointCloudVertexTexture = vertexTexture;
bVertexTextureCreated = true;
}
else
{
vertexTexture = sensorInt.pointCloudVertexTexture;
bVertexTextureCreated = false;
}
}
alphaTexture = KinectInterop.CreateRenderTexture(alphaTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32);
foregroundTexture = KinectInterop.CreateRenderTexture(foregroundTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32);
Shader erodeShader = Shader.Find("Kinect/ErodeShader");
erodeFilterMat = new Material(erodeShader);
erodeFilterMat.SetFloat("_TexResX", (float)textureRes.x);
erodeFilterMat.SetFloat("_TexResY", (float)textureRes.y);
//sensorData.erodeBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture);
Shader dilateShader = Shader.Find("Kinect/DilateShader");
dilateFilterMat = new Material(dilateShader);
dilateFilterMat.SetFloat("_TexResX", (float)textureRes.x);
dilateFilterMat.SetFloat("_TexResY", (float)textureRes.y);
//sensorData.dilateBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture);
Shader gradientShader = Shader.Find("Kinect/GradientShader");
gradientFilterMat = new Material(gradientShader);
Shader medianShader = Shader.Find("Kinect/MedianShader");
medianFilterMat = new Material(medianShader);
//sensorData.medianBodyMaterial.SetFloat("_Amount", 1.0f);
Shader blurShader = Shader.Find("Kinect/BlurShader");
blurFilterMat = new Material(blurShader);
Shader invertShader = Shader.Find("Kinect/InvertShader");
invertAlphaMat = new Material(invertShader);
Shader foregroundShader = Shader.Find("Kinect/ForegroundShader");
foregroundMat = new Material(foregroundShader);
return true;
}
return false;
}
// releases background removal shader resources
private void FinishBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if(filterByBody != null)
{
filterByBody.FinishBackgroundRemoval(sensorData);
}
else if(filterByBI != null)
{
filterByBI.FinishBackgroundRemoval(sensorData);
}
if (sensorInt)
{
sensorInt.pointCloudColorTexture = null;
sensorInt.pointCloudVertexTexture = null;
}
if (bColorTextureCreated && colorTexture)
{
colorTexture.Release();
colorTexture = null;
}
if (bVertexTextureCreated && vertexTexture)
{
vertexTexture.Release();
vertexTexture = null;
}
if (alphaTexture)
{
alphaTexture.Release();
alphaTexture = null;
}
if(foregroundTexture)
{
foregroundTexture.Release();
foregroundTexture = null;
}
erodeFilterMat = null;
dilateFilterMat = null;
medianFilterMat = null;
blurFilterMat = null;
invertAlphaMat = null;
foregroundMat = null;
}
// computes current background removal texture
private bool UpdateBackgroundRemoval(KinectInterop.SensorData sensorData)
{
if (bBackgroundRemovalInited && (lastDepth2SpaceFrameTime != sensorData.lastDepth2SpaceFrameTime ||
lastColorBodyIndexBufferTime != sensorData.lastColorBodyIndexBufferTime || filterByGS != null))
{
lastDepth2SpaceFrameTime = sensorData.lastDepth2SpaceFrameTime;
lastColorBodyIndexBufferTime = sensorData.lastColorBodyIndexBufferTime;
//Debug.Log("BR Depth2SpaceFrameTime: " + lastDepth2SpaceFrameTime + " ColorBodyIndexBufferTime: " + lastColorBodyIndexBufferTime);
RenderTexture[] tempTextures = new RenderTexture[2];
tempTextures[0] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0);
tempTextures[1] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0);
RenderTexture[] tempGradTextures = null;
if (applyGradientFilter)
{
tempGradTextures = new RenderTexture[2];
tempGradTextures[0] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0);
tempGradTextures[1] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0);
}
// filter
if(filterByBody != null && sensorInt != null)
{
filterByBody.ApplyForegroundFilterByBody(vertexTexture, alphaTexture, playerIndex, sensorIndex, MAX_BODY_COUNT,
sensorInt.GetSensorToWorldMatrix(), kinectManager, foregroundCamera);
}
else if(filterByDist != null && sensorInt != null)
{
// filter by distance
filterByDist.ApplyVertexFilter(vertexTexture, alphaTexture, sensorInt.GetSensorToWorldMatrix());
}
else if(filterByBI != null)
{
// filter by body index
filterByBI.ApplyForegroundFilterByBodyIndex(alphaTexture, sensorData, kinectManager, playerIndex, MAX_BODY_COUNT);
}
else if (filterByGS != null)
{
// filter by green screen
filterByGS.ApplyForegroundFilterByGreenScreen(alphaTexture, sensorData, kinectManager, colorTexture);
}
//if(filterByBI == null)
//{
// Graphics.Blit(vertexTexture, alphaTexture);
//}
// median
if (applyMedianFilter)
{
ApplySimpleFilter(alphaTexture, alphaTexture, medianFilterMat, tempTextures);
}
//else
//{
// Graphics.Blit(vertexTexture, alphaTexture);
//}
// erode0
ApplyIterableFilter(alphaTexture, alphaTexture, erodeFilterMat, erodeIterations0, tempTextures);
if(applyGradientFilter)
{
Graphics.CopyTexture(alphaTexture, tempGradTextures[0]);
}
// dilate
ApplyIterableFilter(alphaTexture, alphaTexture, dilateFilterMat, dilateIterations, tempTextures);
if (applyGradientFilter)
{
//Graphics.Blit(alphaTexture, tempGradTextures[1]);
gradientFilterMat.SetTexture("_ErodeTex", tempGradTextures[0]);
ApplySimpleFilter(alphaTexture, tempGradTextures[1], gradientFilterMat, tempTextures);
}
// erode
ApplyIterableFilter(alphaTexture, alphaTexture, erodeFilterMat, erodeIterations, tempTextures);
if (tempGradTextures != null)
{
Graphics.Blit(alphaTexture, tempGradTextures[0]);
}
// blur
if(applyBlurFilter)
{
ApplySimpleFilter(alphaTexture, alphaTexture, blurFilterMat, tempTextures);
}
if(invertAlphaMask)
{
ApplySimpleFilter(alphaTexture, alphaTexture, invertAlphaMat, tempTextures);
}
if(!computeAlphaMaskOnly)
{
foregroundMat.SetTexture("_ColorTex", colorTexture);
foregroundMat.SetTexture("_GradientTex", tempGradTextures[1]);
Color gradientColor = (erodeIterations0 != 0 || dilateIterations != 0 || erodeIterations != 0) ? bodyContourColor : Color.clear;
foregroundMat.SetColor("_GradientColor", gradientColor);
ApplySimpleFilter(alphaTexture, foregroundTexture, foregroundMat, tempTextures);
}
else
{
Graphics.CopyTexture(alphaTexture, foregroundTexture);
}
// cleanup
if (tempGradTextures != null)
{
RenderTexture.ReleaseTemporary(tempGradTextures[0]);
RenderTexture.ReleaseTemporary(tempGradTextures[1]);
}
RenderTexture.ReleaseTemporary(tempTextures[0]);
RenderTexture.ReleaseTemporary(tempTextures[1]);
//sensorData.usedColorBodyIndexBufferTime = sensorData.lastColorBodyIndexBufferTime;
}
return true;
}
// applies iterable filter to the source texture
private void ApplyIterableFilter(RenderTexture source, RenderTexture destination, Material filterMat, int numIterations, RenderTexture[] tempTextures)
{
if (!source || !destination || !filterMat || numIterations == 0)
return;
Graphics.Blit(source, tempTextures[0]);
for (int i = 0; i < numIterations; i++)
{
Graphics.Blit(tempTextures[i % 2], tempTextures[(i + 1) % 2], filterMat);
}
if ((numIterations % 2) == 0)
{
Graphics.Blit(tempTextures[0], destination);
}
else
{
Graphics.Blit(tempTextures[1], destination);
}
}
// applies simple filter to the source texture
private void ApplySimpleFilter(RenderTexture source, RenderTexture destination, Material filterMat, RenderTexture[] tempTextures)
{
if (!source || !destination || !filterMat)
return;
Graphics.Blit(source, tempTextures[0], filterMat);
Graphics.Blit(tempTextures[0], destination);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundRemovalManager.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: a559e12c3f373eb4c8d0e1096fec8a67
timeCreated: 1426178739
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

107
Assets/Azure/KinectScripts/BackgroundStaticImage.cs

@ -0,0 +1,107 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Background static image is component that displays the static image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundStaticImage : MonoBehaviour
{
[Tooltip("Image dimensions in pixels.")]
public Vector2Int imageSize = Vector2Int.zero;
[Tooltip("Image scale in X and Y directions.")]
public Vector2 imageScale = Vector2.one;
[Tooltip("RawImage used to display the color camera feed.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the color image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
private Vector2 initialAnchorPos = Vector2.zero;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
if(imageSize == Vector2.zero && backgroundImage != null && backgroundImage.texture != null)
{
imageSize = new Vector2Int(backgroundImage.texture.width, backgroundImage.texture.height);
}
}
void Update()
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
if (backgroundImage && (lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.rectTransform.localScale = new Vector3(imageScale.x, imageScale.y, 1f);
backgroundImage.color = Color.white;
//Debug.Log("aPos: " + backgroundImage.rectTransform.anchoredPosition + ", aMin: " + backgroundImage.rectTransform.anchorMin +
// ", aMax:" + backgroundImage.rectTransform.anchorMax + ", pivot: " + backgroundImage.rectTransform.pivot +
// ", size: " + backgroundImage.rectTransform.sizeDelta);
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int imageWidth = imageSize.x;
int imageHeight = imageSize.y;
if (imageWidth == 0 || imageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (imageWidth > imageHeight)
rectWidth = rectHeight * imageWidth / imageHeight;
else
rectHeight = rectWidth * imageHeight / imageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
//Vector2 imageScale = this.imageScale;
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
//Debug.Log("imgSize: " + imageSize + ", camW: " + cameraWidth + ", camH: " + cameraHeight + ", sizeDelta: " + rectImage.sizeDelta + ", anchoredPosition: " + rectImage.anchoredPosition);
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
}
}

12
Assets/Azure/KinectScripts/BackgroundStaticImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d93e8d33f3227485e88a407ecc3a14b0
timeCreated: 1481733120
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

300
Assets/Azure/KinectScripts/BackgroundUserBodyImage.cs

@ -0,0 +1,300 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
using System;
namespace com.rfilkov.components
{
/// <summary>
/// Background user-body image is component that displays the user-body image on RawImage texture, usually the scene background.
/// </summary>
public class BackgroundUserBodyImage : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("Index of the player, tracked by this component. -1 means all players, 0 - the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = -1;
[Tooltip("RawImage used to display the user-body image.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the depth image's aspect ratio.")]
public Camera backgroundCamera;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
// references
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Vector2 initialAnchorPos = Vector2.zero;
// color-camera aligned frames
private ulong lastDepthFrameTime = 0;
private ulong lastBodyIndexFrameTime = 0;
// color-camera aligned texture and buffers
private RenderTexture bodyImageTexture = null;
private Material bodyImageMaterial = null;
private ComputeBuffer bodyIndexBuffer = null;
private ComputeBuffer depthImageBuffer = null;
private ComputeBuffer bodyHistBuffer = null;
// body image hist data
protected int[] depthBodyBufferData = null;
protected int[] equalBodyBufferData = null;
protected int bodyHistTotalPoints = 0;
void Start()
{
if (backgroundImage == null)
{
backgroundImage = GetComponent<UnityEngine.UI.RawImage>();
}
kinectManager = KinectManager.Instance;
sensorData = kinectManager != null ? kinectManager.GetSensorData(sensorIndex) : null;
if (sensorData != null)
{
// create the user texture and needed buffers
//bodyImageTexture = KinectInterop.CreateRenderTexture(bodyImageTexture, sensorData.depthImageWidth, sensorData.depthImageHeight);
bodyImageMaterial = new Material(Shader.Find("Kinect/UserHistImageShader"));
bodyHistBuffer = KinectInterop.CreateComputeBuffer(bodyHistBuffer, DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1, sizeof(int));
depthBodyBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
equalBodyBufferData = new int[DepthSensorBase.MAX_DEPTH_DISTANCE_MM + 1];
}
}
void OnDestroy()
{
if (bodyImageTexture)
{
bodyImageTexture.Release();
bodyImageTexture = null;
}
if (bodyIndexBuffer != null)
{
bodyIndexBuffer.Dispose();
bodyIndexBuffer = null;
}
if (depthImageBuffer != null)
{
depthImageBuffer.Dispose();
depthImageBuffer = null;
}
if (bodyHistBuffer != null)
{
bodyHistBuffer.Dispose();
bodyHistBuffer = null;
}
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized() && sensorData != null)
{
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
// check for new depth and body-index frames
UpdateTextureWithNewFrame();
//Texture bodyImageTexture = kinectManager.GetUsersImageTex(sensorIndex);
if (backgroundImage && bodyImageTexture != null && (backgroundImage.texture == null ||
backgroundImage.texture.width != bodyImageTexture.width || backgroundImage.texture.height != bodyImageTexture.height ||
lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
backgroundImage.texture = bodyImageTexture;
backgroundImage.rectTransform.localScale = sensorData.depthImageScale; // kinectManager.GetDepthImageScale(sensorIndex);
backgroundImage.color = Color.white;
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int depthImageWidth = sensorData.depthImageWidth; // kinectManager.GetDepthImageWidth(sensorIndex);
int depthImageHeight = sensorData.depthImageHeight; // kinectManager.GetDepthImageHeight(sensorIndex);
if (depthImageWidth == 0 || depthImageHeight == 0)
return;
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (depthImageWidth > depthImageHeight)
rectWidth = rectHeight * depthImageWidth / depthImageHeight;
else
rectHeight = rectWidth * depthImageHeight / depthImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = sensorData.depthImageScale; // (Vector2)kinectManager.GetDepthImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = initialAnchorPos = anchorPos;
}
}
//if (backgroundImage)
//{
// // update the anchor position, if needed
// if (sensorData != null && sensorData.sensorInterface != null)
// {
// Vector2 updatedAnchorPos = initialAnchorPos + sensorData.sensorInterface.GetBackgroundImageAnchorPos(sensorData);
// if (backgroundImage.rectTransform.anchoredPosition != updatedAnchorPos)
// {
// backgroundImage.rectTransform.anchoredPosition = updatedAnchorPos;
// }
// }
//}
}
else
{
// reset the background texture, if needed
if (backgroundImage && backgroundImage.texture != null)
{
backgroundImage.texture = null;
}
}
//RectTransform rectTransform = backgroundImage.rectTransform;
//Debug.Log("pivot: " + rectTransform.pivot + ", anchorPos: " + rectTransform.anchoredPosition + ", \nanchorMin: " + rectTransform.anchorMin + ", anchorMax: " + rectTransform.anchorMax);
}
// checks for new color-camera aligned frames, and composes an updated body-index texture, if needed
private void UpdateTextureWithNewFrame()
{
if (sensorData == null || sensorData.sensorInterface == null || sensorData.bodyIndexImage == null || sensorData.depthImage == null)
return;
if (sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0 || sensorData.lastDepthFrameTime == 0 || sensorData.lastBodyIndexFrameTime == 0)
return;
// get body index frame
if (lastDepthFrameTime != sensorData.lastDepthFrameTime || lastBodyIndexFrameTime != sensorData.lastBodyIndexFrameTime)
{
lastDepthFrameTime = sensorData.lastDepthFrameTime;
lastBodyIndexFrameTime = sensorData.lastBodyIndexFrameTime;
if (bodyImageTexture == null || bodyImageTexture.width != sensorData.depthImageWidth || bodyImageTexture.height != sensorData.depthImageHeight)
{
bodyImageTexture = KinectInterop.CreateRenderTexture(bodyImageTexture, sensorData.depthImageWidth, sensorData.depthImageHeight);
}
Array.Clear(depthBodyBufferData, 0, depthBodyBufferData.Length);
Array.Clear(equalBodyBufferData, 0, equalBodyBufferData.Length);
bodyHistTotalPoints = 0;
// get configured min & max distances
float minDistance = ((DepthSensorBase)sensorData.sensorInterface).minDepthDistance;
float maxDistance = ((DepthSensorBase)sensorData.sensorInterface).maxDepthDistance;
int depthMinDistance = (int)(minDistance * 1000f);
int depthMaxDistance = (int)(maxDistance * 1000f);
int frameLen = sensorData.depthImage.Length;
for (int i = 0; i < frameLen; i++)
{
int depth = sensorData.depthImage[i];
int limDepth = (depth >= depthMinDistance && depth <= depthMaxDistance) ? depth : 0;
if (/**rawBodyIndexImage[i] != 255 &&*/ limDepth > 0)
{
depthBodyBufferData[limDepth]++;
bodyHistTotalPoints++;
}
}
if (bodyHistTotalPoints > 0)
{
equalBodyBufferData[0] = depthBodyBufferData[0];
for (int i = 1; i < depthBodyBufferData.Length; i++)
{
equalBodyBufferData[i] = equalBodyBufferData[i - 1] + depthBodyBufferData[i];
}
}
int bodyIndexBufferLength = sensorData.bodyIndexImage.Length >> 2;
if (bodyIndexBuffer == null || bodyIndexBuffer.count != bodyIndexBufferLength)
{
bodyIndexBuffer = KinectInterop.CreateComputeBuffer(bodyIndexBuffer, bodyIndexBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(bodyIndexBuffer, sensorData.bodyIndexImage, bodyIndexBufferLength, sizeof(uint));
int depthBufferLength = sensorData.depthImage.Length >> 1;
if (depthImageBuffer == null || depthImageBuffer.count != depthBufferLength)
{
depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
}
KinectInterop.SetComputeBufferData(depthImageBuffer, sensorData.depthImage, depthBufferLength, sizeof(uint));
if (bodyHistBuffer != null)
{
KinectInterop.SetComputeBufferData(bodyHistBuffer, equalBodyBufferData, equalBodyBufferData.Length, sizeof(int));
}
float minDist = minDistance; // kinectManager.minUserDistance != 0f ? kinectManager.minUserDistance : minDistance;
float maxDist = maxDistance; // kinectManager.maxUserDistance != 0f ? kinectManager.maxUserDistance : maxDistance;
bodyImageMaterial.SetInt("_TexResX", sensorData.depthImageWidth);
bodyImageMaterial.SetInt("_TexResY", sensorData.depthImageHeight);
bodyImageMaterial.SetInt("_MinDepth", (int)(minDist * 1000f));
bodyImageMaterial.SetInt("_MaxDepth", (int)(maxDist * 1000f));
bodyImageMaterial.SetBuffer("_BodyIndexMap", bodyIndexBuffer);
bodyImageMaterial.SetBuffer("_DepthMap", depthImageBuffer);
bodyImageMaterial.SetBuffer("_HistMap", bodyHistBuffer);
bodyImageMaterial.SetInt("_TotalPoints", bodyHistTotalPoints);
Color[] bodyIndexColors = kinectManager.GetBodyIndexColors();
if (playerIndex >= 0)
{
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
int bodyIndex = kinectManager.GetBodyIndexByUserId(userId);
int numBodyIndices = bodyIndexColors.Length;
Color clrNone = new Color(0f, 0f, 0f, 0f);
for (int i = 0; i < numBodyIndices; i++)
{
if (i != bodyIndex)
bodyIndexColors[i] = clrNone;
}
}
bodyImageMaterial.SetColorArray("_BodyIndexColors", bodyIndexColors);
Graphics.Blit(null, bodyImageTexture, bodyImageMaterial);
}
}
}
}

12
Assets/Azure/KinectScripts/BackgroundUserBodyImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: df3491a43e1ff0f4d84b03fdfd277aad
timeCreated: 1483707326
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

408
Assets/Azure/KinectScripts/BodyDataRecorderPlayer.cs

@ -0,0 +1,408 @@
using UnityEngine;
using System.Collections;
using System.IO;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// BodyDataRecorderPlayer is the component that can be used for recording and replaying of body-data files.
/// </summary>
public class BodyDataRecorderPlayer : MonoBehaviour
{
[Tooltip("Path to the file used to record or replay the recorded data.")]
public string filePath = "BodyRecording.txt";
[Tooltip("UI-Text to display information messages.")]
public UnityEngine.UI.Text infoText;
[Tooltip("Whether to start playing the recorded data, right after the scene start.")]
public bool playAtStart = false;
// singleton instance of the class
private static BodyDataRecorderPlayer instance = null;
// whether it is recording or playing saved data at the moment
private bool isRecording = false;
private bool isPlaying = false;
// reference to the KM
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
// time variables used for recording and playing
private ulong liRelTime = 0;
private float fStartTime = 0f;
private float fCurrentTime = 0f;
private int fCurrentFrame = 0;
// player variables
private StreamReader fileReader = null;
private float fPlayTime = 0f;
private string sPlayLine = string.Empty;
private Vector3 sensorSpaceScale = Vector3.one;
/// <summary>
/// Gets the singleton BodyDataRecorderPlayer instance.
/// </summary>
/// <value>The KinectRecorderPlayer instance.</value>
public static BodyDataRecorderPlayer Instance
{
get
{
return instance;
}
}
// starts recording
public void StartRecording()
{
if (isRecording)
return;
isRecording = true;
// avoid recording an playing at the same time
if (isPlaying && isRecording)
{
CloseFile();
isPlaying = false;
Debug.Log("Playing stopped.");
}
// stop recording if there is no file name specified
if (filePath.Length == 0)
{
isRecording = false;
Debug.LogError("No file to save.");
if (infoText != null)
{
infoText.text = "No file to save.";
}
}
else if(filePath.IndexOf('/') < 0 && filePath.IndexOf('\\') < 0)
{
#if UNITY_EDITOR || UNITY_STANDALONE
string saveFolder = ".";
#else
string saveFolder = Application.persistentDataPath;
#endif
if (saveFolder.Length > 0 && saveFolder[saveFolder.Length - 1] != '/' && saveFolder[saveFolder.Length - 1] != '\\')
{
saveFolder += "/";
}
filePath = saveFolder + filePath;
}
if (isRecording)
{
Debug.Log("Recording started. File: " + filePath);
if (infoText != null)
{
infoText.text = "Recording...";
}
// delete the old csv file
if (filePath.Length > 0 && File.Exists(filePath))
{
File.Delete(filePath);
}
// initialize times
fStartTime = fCurrentTime = Time.time;
fCurrentFrame = 0;
}
//return isRecording;
}
// starts playing
public void StartPlaying()
{
if (isPlaying)
return;
isPlaying = true;
// avoid recording an playing at the same time
if (isRecording && isPlaying)
{
isRecording = false;
Debug.Log("Recording stopped.");
}
if (filePath.Length > 0 && filePath.IndexOf('/') < 0 && filePath.IndexOf('\\') < 0)
{
#if UNITY_EDITOR || UNITY_STANDALONE
string saveFolder = ".";
#else
string saveFolder = Application.persistentDataPath;
#endif
if (saveFolder.Length > 0 && saveFolder[saveFolder.Length - 1] != '/' && saveFolder[saveFolder.Length - 1] != '\\')
{
saveFolder += "/";
}
filePath = saveFolder + filePath;
}
// stop playing if there is no file name specified
if (filePath.Length == 0 || !File.Exists(filePath))
{
isPlaying = false;
Debug.LogError("File not found: " + filePath);
if (infoText != null)
{
infoText.text = "File not found: " + filePath;
}
}
if (isPlaying)
{
Debug.Log("Playing started. File: " + filePath);
if (infoText != null)
{
infoText.text = "Playing...";
}
// initialize times
fStartTime = fCurrentTime = Time.time;
fCurrentFrame = -1;
// open the file and read a line
#if !UNITY_WSA
fileReader = new StreamReader(filePath);
#endif
ReadLineFromFile();
// enable the play mode
if (kinectManager)
{
kinectManager.EnablePlayMode(true);
}
}
//return isPlaying;
}
// stops recording or playing
public void StopRecordingOrPlaying()
{
if (isRecording)
{
isRecording = false;
string sSavedTimeAndFrames = string.Format("{0:F3}s., {1} frames.", (fCurrentTime - fStartTime), fCurrentFrame);
Debug.Log("Recording stopped @ " + sSavedTimeAndFrames);
if (infoText != null)
{
infoText.text = "Recording stopped @ " + sSavedTimeAndFrames;
}
}
if (isPlaying)
{
// restore the space scale
if(sensorData != null)
{
sensorData.sensorSpaceScale = sensorSpaceScale;
}
// close the file, if it is playing
CloseFile();
isPlaying = false;
Debug.Log("Playing stopped.");
if (infoText != null)
{
infoText.text = "Playing stopped.";
}
}
//if (infoText != null)
//{
// infoText.text = "Say: 'Record' to start the recorder, or 'Play' to start the player.";
//}
}
// returns if file recording is in progress at the moment
public bool IsRecording()
{
return isRecording;
}
// returns if file-play is in progress at the moment
public bool IsPlaying()
{
return isPlaying;
}
// ----- end of public functions -----
void Awake()
{
instance = this;
}
void Start()
{
//if (infoText != null)
//{
// infoText.text = "Say: 'Record' to start the recorder, or 'Play' to start the player.";
//}
kinectManager = KinectManager.Instance;
sensorData = kinectManager ? kinectManager.GetSensorData(0) : null;
sensorSpaceScale = sensorData != null ? sensorData.sensorSpaceScale : Vector3.one;
if (!kinectManager)
{
Debug.Log("KinectManager not found, probably not initialized.");
if (infoText != null)
{
infoText.text = "KinectManager not found, probably not initialized.";
}
}
if (playAtStart)
{
StartPlaying();
}
}
void Update()
{
if (isRecording)
{
// save the body frame, if any
if (kinectManager && kinectManager.IsInitialized() && liRelTime != kinectManager.GetBodyFrameTimestamp())
{
liRelTime = kinectManager.GetBodyFrameTimestamp();
string sBodyFrame = kinectManager.GetBodyFrameData(ref fCurrentTime, ';');
System.Globalization.CultureInfo invCulture = System.Globalization.CultureInfo.InvariantCulture;
if (sBodyFrame.Length > 0)
{
#if !UNITY_WSA
using (StreamWriter writer = File.AppendText(filePath))
{
string sRelTime = string.Format(invCulture, "{0:F3}", (fCurrentTime - fStartTime));
writer.WriteLine(sRelTime + "|" + sBodyFrame);
if (infoText != null)
{
infoText.text = string.Format("Recording @ {0}s., frame {1}.", sRelTime, fCurrentFrame);
}
fCurrentFrame++;
}
#else
string sRelTime = string.Format(invCulture, "{0:F3}", (fCurrentTime - fStartTime));
Debug.Log(sRelTime + "|" + sBodyFrame);
#endif
}
}
}
if (isPlaying)
{
// wait for the right time
fCurrentTime = Time.time;
float fRelTime = fCurrentTime - fStartTime;
if (sPlayLine != null && fRelTime >= fPlayTime)
{
// then play the line
if (kinectManager && sPlayLine.Length > 0)
{
kinectManager.SetBodyFrameData(sPlayLine);
}
// and read the next line
ReadLineFromFile();
}
if (sPlayLine == null)
{
// finish playing, if we reached the EOF
StopRecordingOrPlaying();
}
}
}
void OnDestroy()
{
// don't forget to release the resources
CloseFile();
isRecording = isPlaying = false;
}
// reads a line from the file
private bool ReadLineFromFile()
{
if (fileReader == null)
return false;
// read a line
sPlayLine = fileReader.ReadLine();
if (sPlayLine == null)
return false;
System.Globalization.CultureInfo invCulture = System.Globalization.CultureInfo.InvariantCulture;
System.Globalization.NumberStyles numFloat = System.Globalization.NumberStyles.Float;
// extract the unity time and the body frame
char[] delimiters = { '|' };
string[] sLineParts = sPlayLine.Split(delimiters);
if (sLineParts.Length >= 2)
{
float.TryParse(sLineParts[0], numFloat, invCulture, out fPlayTime);
sPlayLine = sLineParts[1];
fCurrentFrame++;
if (infoText != null)
{
infoText.text = string.Format("Playing @ {0:F3}s., frame {1}.", fPlayTime, fCurrentFrame);
}
return true;
}
return false;
}
// close the file and disable the play mode
private void CloseFile()
{
// close the file
if (fileReader != null)
{
fileReader.Dispose();
fileReader = null;
}
// disable the play mode
if (kinectManager)
{
kinectManager.EnablePlayMode(false);
}
}
}
}

12
Assets/Azure/KinectScripts/BodyDataRecorderPlayer.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d8a2276ae0c75c9439e36365c3e7554d
timeCreated: 1438963698
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

655
Assets/Azure/KinectScripts/BodySlicer.cs

@ -0,0 +1,655 @@
using UnityEngine;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// Body slice enum.
/// </summary>
public enum BodySlice
{
HEIGHT = 0,
WIDTH = 1,
TORSO_1 = 2,
TORSO_2 = 3,
TORSO_3 = 4,
TORSO_4 = 5,
TORSO_5 = 6,
COUNT = 7
}
/// <summary>
/// Data structure used by the body slicer.
/// </summary>
public struct BodySliceData
{
public BodySlice sliceType;
public bool isSliceValid;
public float diameter;
public int depthPointsLength;
public int colorPointsLength;
// public ushort[] depths;
public Vector2 startDepthPoint;
public Vector2 endDepthPoint;
public Vector2 startColorPoint;
public Vector2 endColorPoint;
public Vector3 startKinectPoint;
public Vector3 endKinectPoint;
}
/// <summary>
/// Body slicer is component that estimates the user height from the depth data, as well as several other body sizes.
/// </summary>
public class BodySlicer : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("Index of the player, tracked by this component. 0 means the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = 0;
[Tooltip("Camera used to estimate the overlay positions of 3D-objects over the background. By default it is the main camera.")]
public Camera foregroundCamera;
[Tooltip("Whether the sensor is in portrait mode or not.")]
public bool portraitMode = false;
[Tooltip("Whether the body height should estimated or not.")]
public bool estimateBodyHeight = true;
[Tooltip("Whether the body width should estimated or not.")]
public bool estimateBodyWidth = false;
[Tooltip("Whether the body slices should estimated or not.")]
public bool estimateBodySlices = false;
[Tooltip("Whether the slicing should be done on all updates, or only after the user calibration.")]
public bool continuousSlicing = false;
[Tooltip("Whether the detected body slices should be displayed on the screen.")]
public bool displayBodySlices = false;
private ulong calibratedUserId;
private byte userBodyIndex;
// The singleton instance of BodySlicer
//private static BodySlicer instance = null;
private KinectManager kinectManager;
private KinectInterop.SensorData sensorData;
private ulong lastDepthFrameTime;
// body slice data
private BodySliceData[] bodySlices = new BodySliceData[(int)BodySlice.COUNT];
// depth image resolution
private int depthImageWidth;
private int depthImageHeight;
// depth scale
private Vector3 depthScale = Vector3.one;
// screen rectangle taken by the foreground image (in pixels)
private Rect foregroundImgRect;
///// <summary>
///// Gets the singleton BodySlicer instance.
///// </summary>
///// <value>The singleton BodySlicer instance.</value>
//public static BodySlicer Instance
//{
// get
// {
// return instance;
// }
//}
/// <summary>
/// Gets the height of the user.
/// </summary>
/// <returns>The user height.</returns>
public float getUserHeight()
{
return getSliceWidth(BodySlice.HEIGHT);
}
/// <summary>
/// Gets the slice width.
/// </summary>
/// <returns>The slice width.</returns>
/// <param name="slice">Slice.</param>
public float getSliceWidth(BodySlice slice)
{
int iSlice = (int)slice;
if (bodySlices[iSlice].isSliceValid)
{
return bodySlices[iSlice].diameter;
}
return 0f;
}
/// <summary>
/// Gets the body slice count.
/// </summary>
/// <returns>The body slice count.</returns>
public int getBodySliceCount()
{
return bodySlices != null ? bodySlices.Length : 0;
}
/// <summary>
/// Gets the body slice data.
/// </summary>
/// <returns>The body slice data.</returns>
/// <param name="slice">Slice.</param>
public BodySliceData getBodySliceData(BodySlice slice)
{
return bodySlices[(int)slice];
}
/// <summary>
/// Gets the calibrated user ID.
/// </summary>
/// <returns>The calibrated user ID.</returns>
public ulong getCalibratedUserId()
{
return calibratedUserId;
}
/// <summary>
/// Gets the last frame time.
/// </summary>
/// <returns>The last frame time.</returns>
public ulong getLastFrameTime()
{
return lastDepthFrameTime;
}
////////////////////////////////////////////////////////////////////////
void Awake()
{
//instance = this;
}
void Start()
{
kinectManager = KinectManager.Instance;
sensorData = kinectManager ? kinectManager.GetSensorData(sensorIndex) : null;
if(kinectManager && kinectManager.IsInitialized())
{
depthImageWidth = kinectManager.GetDepthImageWidth(sensorIndex);
depthImageHeight = kinectManager.GetDepthImageHeight(sensorIndex);
depthScale = kinectManager.GetDepthImageScale(sensorIndex);
}
if (foregroundCamera == null)
{
// by default use the main camera
foregroundCamera = Camera.main;
}
}
void Update()
{
if (!kinectManager || !kinectManager.IsInitialized() || sensorData == null)
return;
// calculate the foreground rectangle
foregroundImgRect = kinectManager.GetForegroundRectDepth(sensorIndex, foregroundCamera);
// get required player
ulong userId = kinectManager.GetUserIdByIndex(playerIndex);
if (calibratedUserId == 0)
{
if (userId != 0)
{
OnCalibrationSuccess(userId);
}
}
else
{
if (calibratedUserId != userId)
{
OnUserLost(calibratedUserId);
}
else if (continuousSlicing)
{
EstimateBodySlices(calibratedUserId);
}
}
}
void OnRenderObject()
{
if (displayBodySlices)
{
DrawBodySlice(bodySlices[(int)BodySlice.HEIGHT]);
DrawBodySlice(bodySlices[(int)BodySlice.TORSO_1]);
DrawBodySlice(bodySlices[(int)BodySlice.TORSO_2]);
DrawBodySlice(bodySlices[(int)BodySlice.TORSO_3]);
DrawBodySlice(bodySlices[(int)BodySlice.TORSO_4]);
DrawBodySlice(bodySlices[(int)BodySlice.TORSO_5]);
}
}
// draws a body slice line
private void DrawBodySlice(BodySliceData bodySlice)
{
if (bodySlice.isSliceValid && bodySlice.startDepthPoint != Vector2.zero && bodySlice.endDepthPoint != Vector2.zero)
{
float rectX = foregroundImgRect.xMin;
float rectY = foregroundImgRect.yMin;
float scaleX = foregroundImgRect.width / depthImageWidth;
float scaleY = foregroundImgRect.height / depthImageHeight;
float x1 = rectX + (depthScale.x >= 0f ? bodySlice.startDepthPoint.x : depthImageWidth - bodySlice.startDepthPoint.x) * scaleX;
float y1 = rectY + (depthScale.y >= 0f ? bodySlice.startDepthPoint.y : depthImageHeight - bodySlice.startDepthPoint.y) * scaleY;
float x2 = rectX + (depthScale.x >= 0f ? bodySlice.endDepthPoint.x : depthImageWidth - bodySlice.endDepthPoint.x) * scaleX;
float y2 = rectY + (depthScale.y >= 0f ? bodySlice.endDepthPoint.y : depthImageHeight - bodySlice.endDepthPoint.y) * scaleY;
KinectInterop.DrawLine((int)x1, (int)y1, (int)x2, (int)y2, 2f, Color.red);
}
}
public void OnCalibrationSuccess(ulong userId)
{
calibratedUserId = userId;
// estimate body slices
EstimateBodySlices(calibratedUserId);
}
void OnUserLost(ulong userId)
{
calibratedUserId = 0;
// invalidate the body slice data
for (int i = 0; i < bodySlices.Length; i++)
{
bodySlices[i].isSliceValid = false;
}
}
// estimates the body slice data for the given user
public bool EstimateBodySlices(ulong userId)
{
if (userId <= 0)
userId = calibratedUserId;
if (!kinectManager || userId == 0)
return false;
userBodyIndex = (byte)kinectManager.GetBodyIndexByUserId(userId);
if (userBodyIndex == 255)
return false;
bool bSliceSuccess = false;
if (sensorData.bodyIndexImage != null && sensorData.depthImage != null &&
sensorData.lastDepthFrameTime != lastDepthFrameTime)
{
lastDepthFrameTime = sensorData.lastDepthFrameTime;
bSliceSuccess = true;
Vector2 pointPelvis = kinectManager.MapSpacePointToDepthCoords(sensorIndex, kinectManager.GetJointKinectPosition(userId, (int)KinectInterop.JointType.Pelvis, false));
if (estimateBodyHeight)
{
bodySlices[(int)BodySlice.HEIGHT] = !portraitMode ? GetUserHeightParams(pointPelvis) : GetUserWidthParams(pointPelvis);
}
if (estimateBodyWidth)
{
bodySlices[(int)BodySlice.WIDTH] = !portraitMode ? GetUserWidthParams(pointPelvis) : GetUserHeightParams(pointPelvis);
}
if (estimateBodySlices && kinectManager.IsJointTracked(userId, (int)KinectInterop.JointType.Pelvis) && kinectManager.IsJointTracked(userId, (int)KinectInterop.JointType.Neck))
{
Vector2 point1 = pointPelvis;
Vector2 point2 = kinectManager.MapSpacePointToDepthCoords(sensorIndex, kinectManager.GetJointKinectPosition(userId, (int)KinectInterop.JointType.Neck, false));
Vector2 sliceDir = (point2 - point1) / 4f;
bool sliceOnX = !portraitMode ? true : false;
bool sliceOnY = !portraitMode ? false : true;
Vector2 vSlicePoint = point1;
bodySlices[(int)BodySlice.TORSO_1] = GetBodySliceParams(BodySlice.TORSO_1, vSlicePoint, sliceOnX, sliceOnY, -1);
vSlicePoint += sliceDir;
bodySlices[(int)BodySlice.TORSO_2] = GetBodySliceParams(BodySlice.TORSO_2, vSlicePoint, sliceOnX, sliceOnY, -1);
vSlicePoint += sliceDir;
bodySlices[(int)BodySlice.TORSO_3] = GetBodySliceParams(BodySlice.TORSO_3, vSlicePoint, sliceOnX, sliceOnY, -1);
vSlicePoint += sliceDir;
bodySlices[(int)BodySlice.TORSO_4] = GetBodySliceParams(BodySlice.TORSO_4, vSlicePoint, sliceOnX, sliceOnY, -1);
vSlicePoint = point2;
bodySlices[(int)BodySlice.TORSO_5] = GetBodySliceParams(BodySlice.TORSO_5, vSlicePoint, sliceOnX, sliceOnY, -1);
}
}
return bSliceSuccess;
}
// creates body slice data for user height
private BodySliceData GetUserHeightParams(Vector2 pointSpineBase)
{
int depthLength = sensorData.depthImage.Length;
int depthWidth = sensorData.depthImageWidth;
int depthHeight = sensorData.depthImageHeight;
Vector2 posTop = new Vector2(0, depthHeight);
for (int i = 0, x = 0, y = 0; i < depthLength; i++)
{
if (sensorData.bodyIndexImage[i] == userBodyIndex)
{
//if (posTop.y > y)
posTop = new Vector2(x, y);
break;
}
x++;
if (x >= depthWidth)
{
x = 0;
y++;
}
}
Vector2 posBottom = new Vector2(0, -1);
for (int i = depthLength - 1, x = depthWidth - 1, y = depthHeight - 1; i >= 0; i--)
{
if (sensorData.bodyIndexImage[i] == userBodyIndex)
{
//if (posBottom.y < y)
posBottom = new Vector2(x, y);
break;
}
x--;
if (x < 0)
{
x = depthWidth - 1;
y--;
}
}
BodySliceData sliceData = new BodySliceData();
sliceData.sliceType = BodySlice.HEIGHT;
sliceData.isSliceValid = false;
if (posBottom.y >= 0)
{
sliceData.startDepthPoint = posTop;
sliceData.endDepthPoint = posBottom;
sliceData.depthPointsLength = (int)posBottom.y - (int)posTop.y + 1;
int index1 = (int)posTop.y * depthWidth + (int)posTop.x;
ushort depth1 = sensorData.depthImage[index1];
sliceData.startKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.startDepthPoint, depth1, true);
int index2 = (int)posBottom.y * depthWidth + (int)posBottom.x;
ushort depth2 = sensorData.depthImage[index2];
sliceData.endKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.endDepthPoint, depth2, true);
sliceData.startColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.startDepthPoint, depth1);
sliceData.endColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.endDepthPoint, depth2);
if (sliceData.startColorPoint.y < 0)
sliceData.startColorPoint.y = 0;
if (sliceData.endColorPoint.y >= sensorData.colorImageHeight)
sliceData.endColorPoint.y = sensorData.colorImageHeight - 1;
sliceData.colorPointsLength = (int)sliceData.endColorPoint.y - (int)sliceData.startColorPoint.y + 1;
// correct x-positions of depth points
sliceData.startDepthPoint.x = pointSpineBase.x;
sliceData.endDepthPoint.x = pointSpineBase.x;
sliceData.diameter = (sliceData.endKinectPoint - sliceData.startKinectPoint).magnitude;
sliceData.isSliceValid = true;
}
return sliceData;
}
// creates body slice data for user width
private BodySliceData GetUserWidthParams(Vector2 pointSpineBase)
{
int depthLength = sensorData.depthImage.Length;
int depthWidth = sensorData.depthImageWidth;
//int depthHeight = sensorData.depthImageHeight;
Vector2 posLeft = new Vector2(depthWidth, 0);
Vector2 posRight = new Vector2(-1, 0);
for (int i = 0, x = 0, y = 0; i < depthLength; i++)
{
if (sensorData.bodyIndexImage[i] == userBodyIndex)
{
if (posLeft.x > x)
posLeft = new Vector2(x, y);
if (posRight.x < x)
posRight = new Vector2(x, y);
}
x++;
if (x >= depthWidth)
{
x = 0;
y++;
}
}
BodySliceData sliceData = new BodySliceData();
sliceData.sliceType = BodySlice.WIDTH;
sliceData.isSliceValid = false;
if (posRight.x >= 0)
{
sliceData.startDepthPoint = posLeft;
sliceData.endDepthPoint = posRight;
sliceData.depthPointsLength = (int)posRight.x - (int)posLeft.x + 1;
int index1 = (int)posLeft.y * depthWidth + (int)posLeft.x;
ushort depth1 = sensorData.depthImage[index1];
sliceData.startKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.startDepthPoint, depth1, true);
int index2 = (int)posRight.y * depthWidth + (int)posRight.x;
ushort depth2 = sensorData.depthImage[index2];
sliceData.endKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.endDepthPoint, depth2, true);
sliceData.startColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.startDepthPoint, depth1);
sliceData.endColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.endDepthPoint, depth2);
if (sliceData.startColorPoint.x < 0)
sliceData.startColorPoint.x = 0;
if (sliceData.endColorPoint.x >= sensorData.colorImageWidth)
sliceData.endColorPoint.x = sensorData.colorImageWidth - 1;
sliceData.colorPointsLength = (int)sliceData.endColorPoint.x - (int)sliceData.startColorPoint.x + 1;
// correct y-positions of depth points
sliceData.startDepthPoint.y = pointSpineBase.y;
sliceData.endDepthPoint.y = pointSpineBase.y;
sliceData.diameter = (sliceData.endKinectPoint - sliceData.startKinectPoint).magnitude;
sliceData.isSliceValid = true;
}
return sliceData;
}
// creates body slice data for the given body slice
private BodySliceData GetBodySliceParams(BodySlice sliceType, Vector2 middlePoint, bool bSliceOnX, bool bSliceOnY, int maxDepthLength)
{
BodySliceData sliceData = new BodySliceData();
sliceData.sliceType = sliceType;
sliceData.isSliceValid = false;
sliceData.depthPointsLength = 0;
if (!kinectManager || middlePoint == Vector2.zero)
return sliceData;
if (!bSliceOnX && !bSliceOnY)
return sliceData;
middlePoint.x = Mathf.FloorToInt(middlePoint.x + 0.5f);
middlePoint.y = Mathf.FloorToInt(middlePoint.y + 0.5f);
int depthWidth = sensorData.depthImageWidth;
int depthHeight = sensorData.depthImageHeight;
int indexMid = (int)middlePoint.y * depthWidth + (int)middlePoint.x;
byte userIndex = sensorData.bodyIndexImage[indexMid];
if (userIndex != userBodyIndex)
return sliceData;
sliceData.startDepthPoint = middlePoint;
sliceData.endDepthPoint = middlePoint;
int indexDiff1 = 0;
int indexDiff2 = 0;
if (bSliceOnX)
{
// min-max
int minIndex = (int)middlePoint.y * depthWidth;
int maxIndex = (int)(middlePoint.y + 1) * depthWidth;
// horizontal left
int stepIndex = -1;
indexDiff1 = TrackSliceInDirection(indexMid, stepIndex, minIndex, maxIndex, userIndex);
// horizontal right
stepIndex = 1;
indexDiff2 = TrackSliceInDirection(indexMid, stepIndex, minIndex, maxIndex, userIndex);
}
else if (bSliceOnY)
{
// min-max
int minIndex = 0;
int maxIndex = depthHeight * depthWidth;
// vertical up
int stepIndex = -depthWidth;
indexDiff1 = TrackSliceInDirection(indexMid, stepIndex, minIndex, maxIndex, userIndex);
// vertical down
stepIndex = depthWidth;
indexDiff2 = TrackSliceInDirection(indexMid, stepIndex, minIndex, maxIndex, userIndex);
}
// calculate depth length
sliceData.depthPointsLength = indexDiff1 + indexDiff2 + 1;
// check for max length (used by upper legs)
if (maxDepthLength > 0 && sliceData.depthPointsLength > maxDepthLength)
{
if (indexDiff1 > indexDiff2)
indexDiff1 = indexDiff2;
else
indexDiff2 = indexDiff1;
sliceData.depthPointsLength = indexDiff1 + indexDiff2 + 1;
}
// set start and end depth points
if (bSliceOnX)
{
sliceData.startDepthPoint.x -= indexDiff1;
sliceData.endDepthPoint.x += indexDiff2;
}
else if (bSliceOnY)
{
sliceData.startDepthPoint.y -= indexDiff1;
sliceData.endDepthPoint.y += indexDiff2;
}
// start point
int index1 = (int)sliceData.startDepthPoint.y * depthWidth + (int)sliceData.startDepthPoint.x;
ushort depth1 = sensorData.depthImage[index1];
sliceData.startKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.startDepthPoint, depth1, true);
// end point
int index2 = (int)sliceData.endDepthPoint.y * depthWidth + (int)sliceData.endDepthPoint.x;
ushort depth2 = sensorData.depthImage[index2];
sliceData.endKinectPoint = kinectManager.MapDepthPointToSpaceCoords(sensorIndex, sliceData.endDepthPoint, depth2, true);
sliceData.startColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.startDepthPoint, depth1);
sliceData.endColorPoint = kinectManager.MapDepthPointToColorCoords(sensorIndex, sliceData.endDepthPoint, depth2);
if (sliceData.startColorPoint.x < 0)
sliceData.startColorPoint.x = 0;
if (sliceData.endColorPoint.x >= sensorData.colorImageWidth)
sliceData.endColorPoint.x = sensorData.colorImageWidth - 1;
sliceData.colorPointsLength = (int)sliceData.endColorPoint.x - (int)sliceData.startColorPoint.x + 1;
// diameter
sliceData.diameter = (sliceData.endKinectPoint - sliceData.startKinectPoint).magnitude;
sliceData.isSliceValid = true;
return sliceData;
}
// determines the number of points in the given direction
private int TrackSliceInDirection(int index, int stepIndex, int minIndex, int maxIndex, byte userIndex)
{
int indexDiff = 0;
int errCount = 0;
index += stepIndex;
while (index >= minIndex && index < maxIndex)
{
if (sensorData.bodyIndexImage[index] != userIndex)
{
errCount++;
if (errCount > 0) // allow 0 error(s)
break;
}
else
{
errCount = 0;
}
index += stepIndex;
indexDiff++;
}
return indexDiff;
}
}
}

12
Assets/Azure/KinectScripts/BodySlicer.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ff0269bc02f81454bb8fbc41688637a4
timeCreated: 1468761285
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

328
Assets/Azure/KinectScripts/CubemanController.cs

@ -0,0 +1,328 @@
using UnityEngine;
using System;
using System.Collections;
using com.rfilkov.kinect;
using static com.rfilkov.kinect.KinectInterop;
namespace com.rfilkov.components
{
/// <summary>
/// Cubeman controller transfers the captured user motion to a cubeman model.
/// </summary>
public class CubemanController : MonoBehaviour
{
[Tooltip("Index of the player, tracked by this component. 0 means the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = 0;
[Tooltip("Whether the cubeman is allowed to move vertically or not.")]
public bool verticalMovement = true;
[Tooltip("Whether the cubeman is facing the player or not.")]
public bool mirroredMovement = false;
[Tooltip("Scene object that will be used to represent the sensor's position and rotation in the scene.")]
public Transform sensorTransform;
[Tooltip("Rate at which the cubeman will move through the scene.")]
public float moveRate = 1f;
public GameObject Pelvis;
public GameObject SpineNaval;
public GameObject SpineChest;
public GameObject Neck;
public GameObject Head;
public GameObject ClavicleLeft;
public GameObject ShoulderLeft;
public GameObject ElbowLeft;
public GameObject WristLeft;
public GameObject HandLeft;
public GameObject ClavicleRight;
public GameObject ShoulderRight;
public GameObject ElbowRight;
public GameObject WristRight;
public GameObject HandRight;
public GameObject HipLeft;
public GameObject KneeLeft;
public GameObject AnkleLeft;
public GameObject FootLeft;
public GameObject HipRight;
public GameObject KneeRight;
public GameObject AnkleRight;
public GameObject FootRight;
public GameObject Nose;
public GameObject EyeLeft;
public GameObject EarLeft;
public GameObject EyeRight;
public GameObject EarRight;
public GameObject HandtipLeft;
public GameObject ThumbLeft;
public GameObject HandtipRight;
public GameObject ThumbRight;
[Tooltip("Line renderer to draw the skeleton lines.")]
public LineRenderer skeletonLine;
//public LineRenderer debugLine;
[Tooltip("UI Text to display debug information.")]
public UnityEngine.UI.Text debugInfo;
private GameObject[] bones;
private LineRenderer[] lines;
private LineRenderer lineTLeft;
private LineRenderer lineTRight;
private LineRenderer lineFLeft;
private LineRenderer lineFRight;
private Vector3 initialPosition;
private Quaternion initialRotation;
private Vector3 initialPosUser = Vector3.zero;
private Vector3 initialPosOffset = Vector3.zero;
private ulong initialPosUserID = 0;
private ulong lastBodyTimestamp = 0;
void Start()
{
//store bones in a list for easier access
bones = new GameObject[]
{
Pelvis,
SpineNaval,
SpineChest,
Neck,
Head,
ClavicleLeft,
ShoulderLeft,
ElbowLeft,
WristLeft,
HandLeft,
ClavicleRight,
ShoulderRight,
ElbowRight,
WristRight,
HandRight,
HipLeft,
KneeLeft,
AnkleLeft,
FootLeft,
HipRight,
KneeRight,
AnkleRight,
FootRight,
Nose,
EyeLeft,
EarLeft,
EyeRight,
EarRight,
HandtipLeft,
ThumbLeft,
HandtipRight,
ThumbRight
};
// array holding the skeleton lines
lines = new LineRenderer[bones.Length];
initialPosition = transform.position;
initialRotation = transform.rotation;
}
void Update()
{
KinectManager kinectManager = KinectManager.Instance;
// get 1st player
ulong userID = kinectManager ? kinectManager.GetUserIdByIndex(playerIndex) : 0;
if (userID == 0)
{
initialPosUserID = 0;
initialPosOffset = Vector3.zero;
initialPosUser = Vector3.zero;
// reset the pointman position and rotation
if (transform.position != initialPosition)
{
transform.position = initialPosition;
}
if (transform.rotation != initialRotation)
{
transform.rotation = initialRotation;
}
for (int i = 0; i < bones.Length; i++)
{
bones[i].gameObject.SetActive(true);
bones[i].transform.localPosition = Vector3.zero;
bones[i].transform.localRotation = Quaternion.identity;
if (lines[i] != null)
{
lines[i].gameObject.SetActive(false);
}
}
return;
}
// set the position in space
Vector3 posPointMan = !sensorTransform ? kinectManager.GetUserPosition(userID) : kinectManager.GetUserKinectPosition(userID, true);
if (sensorTransform)
{
posPointMan = sensorTransform.TransformPoint(posPointMan);
}
Vector3 posPointManMP = new Vector3(posPointMan.x, posPointMan.y, !mirroredMovement ? -posPointMan.z : posPointMan.z);
// store the initial position
if (initialPosUserID != userID)
{
initialPosUserID = userID;
//initialPosOffset = transform.position - (verticalMovement ? posPointMan * moveRate : new Vector3(posPointMan.x, 0, posPointMan.z) * moveRate);
initialPosOffset = posPointMan;
initialPosUser = initialPosition;
if (verticalMovement)
initialPosUser.y = 0f; // posPointManMP.y provides the vertical position in this case
}
Vector3 relPosUser = (posPointMan - initialPosOffset);
relPosUser.z = !mirroredMovement ? -relPosUser.z : relPosUser.z;
transform.position = verticalMovement ? initialPosUser + posPointManMP * moveRate :
initialPosUser + new Vector3(posPointManMP.x, 0, posPointManMP.z) * moveRate;
//Debug.Log (userID + ", pos: " + posPointMan + ", ipos: " + initialPosUser + ", rpos: " + posPointManMP + ", tpos: " + transform.position);
//Vector3 rotPelvis = kinectManager.GetJointOrientation(userID, (int)KinectInterop.JointType.Pelvis, true).eulerAngles;
//if(rotPelvis.y > 90 && rotPelvis.y < 270)
// Debug.Log($"Time: {DateTime.Now.ToString("HH.mm.ss.fff")} - pelRot: {rotPelvis}");
ulong bodyTimestamp = kinectManager.GetBodyFrameTime(0);
if (lastBodyTimestamp != bodyTimestamp)
{
BodyData bodyData = kinectManager.GetUserBodyData(userID);
JointData pelvis = bodyData.joint[(int)JointType.Pelvis];
JointData neck = bodyData.joint[(int)JointType.Neck];
if (debugInfo != null)
{
debugInfo.text = $"Pelvis: {GetJointDataString(pelvis)}, Neck: {GetJointDataString(neck)}";
}
lastBodyTimestamp = bodyTimestamp;
}
// update the local positions of the bones
for (int i = 0; i < bones.Length; i++)
{
if (bones[i] != null)
{
int joint = !mirroredMovement ? i : (int)KinectInterop.GetMirrorJoint((KinectInterop.JointType)i);
if (joint < 0)
continue;
if (kinectManager.IsJointTracked(userID, joint))
{
bones[i].gameObject.SetActive(true);
Vector3 posJoint = !sensorTransform ? kinectManager.GetJointPosition(userID, joint) : kinectManager.GetJointKinectPosition(userID, joint, true);
if (sensorTransform)
{
posJoint = sensorTransform.TransformPoint(posJoint);
}
posJoint.z = !mirroredMovement ? -posJoint.z : posJoint.z;
Quaternion rotJoint = kinectManager.GetJointOrientation(userID, joint, !mirroredMovement);
rotJoint = initialRotation * rotJoint;
posJoint -= posPointManMP;
if (mirroredMovement)
{
posJoint.x = -posJoint.x;
posJoint.z = -posJoint.z;
}
bones[i].transform.localPosition = posJoint;
bones[i].transform.rotation = rotJoint;
if (lines[i] == null && skeletonLine != null)
{
lines[i] = Instantiate(skeletonLine) as LineRenderer;
lines[i].transform.parent = transform;
}
int parJoint = (int)kinectManager.GetParentJoint((KinectInterop.JointType)joint);
if (lines[i] != null)
{
lines[i].gameObject.SetActive(true);
Vector3 posJoint2 = bones[i].transform.position;
parJoint = !mirroredMovement ? parJoint : (int)KinectInterop.GetMirrorJoint((KinectInterop.JointType)parJoint);
Vector3 posParent = bones[parJoint].transform.position;
//Vector3 dirFromParent = kinectManager.GetJointDirection(userID, joint, false, false);
//dirFromParent.z = !mirroredMovement ? -dirFromParent.z : dirFromParent.z;
//Vector3 posParent = posJoint2 - dirFromParent;
//Vector3 posParent = !sensorTransform ? kinectManager.GetJointPosition(userID, parJoint) : kinectManager.GetJointKinectPosition(userID, parJoint, true);
//if (sensorTransform)
//{
// posParent = sensorTransform.TransformPoint(posParent);
//}
//posParent.z = !mirroredMovement ? -posParent.z : posParent.z;
//lines[i].SetVertexCount(2);
lines[i].SetPosition(0, posParent);
lines[i].SetPosition(1, posJoint2);
}
}
else
{
bones[i].gameObject.SetActive(false);
if (lines[i] != null)
{
lines[i].gameObject.SetActive(false);
}
}
}
}
}
// returns the joint data string
private string GetJointDataString(JointData jd)
{
return $"{jd.trackingState.ToString()[0]} {jd.position.ToString("F2")}";
}
}
}

8
Assets/Azure/KinectScripts/CubemanController.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 9f828727d4cfb7a4b8984b4e0310ae67
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

202
Assets/Azure/KinectScripts/DepthIrFilterImage.cs

@ -0,0 +1,202 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// DepthIrFilterImage filters the sensor's IR image with the raw depth. The resulting image is displayed on the given RawImage.
/// </summary>
public class DepthIrFilterImage : MonoBehaviour
{
[Tooltip("Index of the used depth sensor. 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("RawImage used to display the depth filtered IR image.")]
public UnityEngine.UI.RawImage backgroundImage;
[Tooltip("Camera used to display the background image. Set it, if you'd like to allow background image to resize, to match the depth image's aspect ratio.")]
public Camera backgroundCamera;
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private Material depthFilterMat = null;
private RenderTexture depthFilterTex = null;
private ComputeBuffer depthImageBuffer = null;
private ulong lastDepthFrameTime = 0;
// last camera rect width & height
private float lastCamRectW = 0;
private float lastCamRectH = 0;
/// <summary>
/// Gets depth filtered IR texture.
/// </summary>
/// <returns>Depth filtered IR texture.</returns>
public Texture GetDepthFilterIrTex()
{
return depthFilterTex;
}
/// <summary>
/// Gets depth filtered IR material.
/// </summary>
/// <returns>Depth filtered IR material.</returns>
public Material GetDepthFilterIrMat()
{
return depthFilterMat;
}
/// <summary>
/// Sets the maximum IR value, used to convert the raw IR values to texture.
/// </summary>
/// <param name="maxIrValue">Max IR value.</param>
public void SetMaxIrValue(float maxIrValue)
{
if(kinectManager)
{
kinectManager.SetSensorMinMaxIrValues(sensorIndex, 0, maxIrValue);
}
}
void Start()
{
kinectManager = KinectManager.Instance;
if (kinectManager && kinectManager.IsInitialized())
{
Shader depthFilterShader = Shader.Find("Kinect/DepthIrFilterShader");
sensorData = kinectManager.GetSensorData(sensorIndex);
if (depthFilterShader != null && sensorData != null)
{
depthFilterMat = new Material(depthFilterShader);
}
}
}
void OnDestroy()
{
if (depthImageBuffer != null)
{
depthImageBuffer.Dispose();
depthImageBuffer = null;
}
if(depthFilterTex != null)
{
depthFilterTex.Release();
depthFilterTex = null;
if (backgroundImage)
{
backgroundImage.texture = null;
}
}
}
void LateUpdate()
{
if (kinectManager && kinectManager.IsInitialized() && depthFilterMat != null)
{
if (sensorData != null && sensorData.infraredImageTexture != null && sensorData.depthImage != null &&
lastDepthFrameTime != sensorData.lastDepthFrameTime)
{
lastDepthFrameTime = sensorData.lastDepthFrameTime;
int depthBufferLength = (sensorData.depthImageWidth * sensorData.depthImageHeight) >> 1;
if (depthImageBuffer == null || depthImageBuffer.count != depthBufferLength)
{
depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
}
if (depthFilterTex == null || depthFilterTex.width != sensorData.depthImageWidth || depthFilterTex.height != sensorData.depthImageHeight)
{
depthFilterTex = KinectInterop.CreateRenderTexture(depthFilterTex, sensorData.depthImageWidth, sensorData.depthImageHeight);
if (backgroundImage)
{
backgroundImage.texture = depthFilterTex;
backgroundImage.rectTransform.localScale = kinectManager.GetDepthImageScale(sensorIndex);
backgroundImage.color = Color.white;
}
}
float minDistance = ((DepthSensorBase)sensorData.sensorInterface).minDepthDistance;
float maxDistance = ((DepthSensorBase)sensorData.sensorInterface).maxDepthDistance;
depthFilterMat.SetInt("_TexResX", sensorData.depthImageWidth);
depthFilterMat.SetInt("_TexResY", sensorData.depthImageHeight);
depthFilterMat.SetInt("_MinDepth", (int)(minDistance * 1000f));
depthFilterMat.SetInt("_MaxDepth", (int)(maxDistance * 1000f));
KinectInterop.SetComputeBufferData(depthImageBuffer, sensorData.depthImage, depthBufferLength, sizeof(uint));
depthFilterMat.SetBuffer("_DepthMap", depthImageBuffer);
depthFilterMat.SetTexture("_IrTex", sensorData.infraredImageTexture);
Graphics.Blit(null, depthFilterTex, depthFilterMat);
}
// check for resolution change
float cameraWidth = backgroundCamera ? backgroundCamera.pixelRect.width : 0f;
float cameraHeight = backgroundCamera ? backgroundCamera.pixelRect.height : 0f;
if (backgroundImage && (lastCamRectW != cameraWidth || lastCamRectH != cameraHeight))
{
SetImageResolution(cameraWidth, cameraHeight);
}
}
}
// sets new image resolution
private void SetImageResolution(float cameraWidth, float cameraHeight)
{
lastCamRectW = cameraWidth;
lastCamRectH = cameraHeight;
//Debug.Log("aPos: " + backgroundImage.rectTransform.anchoredPosition + ", aMin: " + backgroundImage.rectTransform.anchorMin +
// ", aMax:" + backgroundImage.rectTransform.anchorMax + ", pivot: " + backgroundImage.rectTransform.pivot +
// ", size: " + backgroundImage.rectTransform.sizeDelta);
if (backgroundCamera != null)
{
// adjust image's size and position to match the stream aspect ratio
int depthImageWidth = kinectManager.GetDepthImageWidth(sensorIndex);
int depthImageHeight = kinectManager.GetDepthImageHeight(sensorIndex);
RectTransform rectImage = backgroundImage.rectTransform;
float rectWidth = (rectImage.anchorMin.x != rectImage.anchorMax.x) ? cameraWidth * (rectImage.anchorMax.x - rectImage.anchorMin.x) : rectImage.sizeDelta.x;
float rectHeight = (rectImage.anchorMin.y != rectImage.anchorMax.y) ? cameraHeight * (rectImage.anchorMax.y - rectImage.anchorMin.y) : rectImage.sizeDelta.y;
if (depthImageWidth > depthImageHeight)
rectWidth = rectHeight * depthImageWidth / depthImageHeight;
else
rectHeight = rectWidth * depthImageHeight / depthImageWidth;
Vector2 pivotOffset = (rectImage.pivot - new Vector2(0.5f, 0.5f)) * 2f;
Vector2 imageScale = (Vector2)kinectManager.GetDepthImageScale(sensorIndex);
Vector2 anchorPos = rectImage.anchoredPosition + pivotOffset * imageScale * new Vector2(rectWidth, rectHeight);
if (rectImage.anchorMin.x != rectImage.anchorMax.x)
{
rectWidth = -(cameraWidth - rectWidth);
}
if (rectImage.anchorMin.y != rectImage.anchorMax.y)
{
rectHeight = -(cameraHeight - rectHeight);
}
rectImage.sizeDelta = new Vector2(rectWidth, rectHeight);
rectImage.anchoredPosition = anchorPos;
}
}
}
}

12
Assets/Azure/KinectScripts/DepthIrFilterImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: f44231b5677b1c3488a66da7d3bd995c
timeCreated: 1463267447
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

8
Assets/Azure/KinectScripts/Filters.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 50bc3aae16556ef4da4f40d878f1e934
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

593
Assets/Azure/KinectScripts/Filters/BodySpinFilter.cs

@ -0,0 +1,593 @@
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UIElements;
namespace com.rfilkov.kinect
{
/// <summary>
/// Type of body-spin filter.
/// </summary>
public enum BodySpinType : int { None = 0, FixBodySpinAndLegCross = 1, FixBodySpinOnly = 2, FixLegCrossOnly = 3 }
/// <summary>
/// Detects and corrects body-spins caused by wrong body-part recognitions.
/// </summary>
public class BodySpinFilter
{
// criteria to block body spinning
public enum FACING_DIRECTION : int { FORWARD_AND_BACKWARD = 0, FORWARD_ONLY = 1, BACKWARD_ONLY = 2 };
public FACING_DIRECTION isForwardFacing = FACING_DIRECTION.FORWARD_ONLY; // whether the user is always facing to/against the camera. otherwise uses MAX_SPIN_TIME
public bool tryRecoverPose = false; // whether to try to recover the correct pose, or restore the last correct pose
public float maxSpinTime = 0.5f; // in seconds, in case it's not set as always-forward/backward-facing
private const float MIN_ANGLE_COS = 0f; // cos(a) used for spin detection
private bool _fixBodySpin = true; // whether to fix the temporary body-spin issue
private bool _fixLegCross = true; // whether to fix the leg-cross issue
//private const bool FIX_JOINT_ANGLE = false; // whether to fix the incorrect angles at knee and ankle joints
// history data
private BodyHistoryData[] history = null;
private Dictionary<ulong, Vector3> userRightDir = new Dictionary<ulong, Vector3>();
// Initializes a new instance of the class.
public BodySpinFilter()
{
Reset();
}
// Initializes a new instance of the class.
public BodySpinFilter(BodySpinType bodySpinType)
{
switch(bodySpinType)
{
case BodySpinType.None:
_fixBodySpin = false;
_fixLegCross = false;
break;
case BodySpinType.FixBodySpinAndLegCross:
_fixBodySpin = true;
_fixLegCross = true;
break;
case BodySpinType.FixBodySpinOnly:
_fixBodySpin = true;
_fixLegCross = false;
break;
case BodySpinType.FixLegCrossOnly:
_fixBodySpin = false;
_fixLegCross = true;
break;
}
Reset();
}
// Resets the filter to default values.
public void Reset(ulong userId = 0)
{
KinectManager kinectManager = KinectManager.Instance;
int maxBodyCount = 10; // kinectManager.GetMaxBodyCount();
int jointCount = kinectManager.GetJointCount();
if(userId == 0)
{
// create the history data
history = new BodyHistoryData[maxBodyCount];
for (int i = 0; i < maxBodyCount; i++)
{
history[i] = new BodyHistoryData(jointCount);
}
userRightDir.Clear();
}
else
{
// clean the history of the given user only
if (userRightDir.ContainsKey(userId))
{
userRightDir.Remove(userId);
}
for (int i = 0; i < maxBodyCount; i++)
{
if (history[i].userId == userId)
{
history[i].userId = 0;
history[i].lastTimestamp = 0;
history[i].lastUpdateTime = 0;
history[i].frameCount = 0;
//Debug.Log("Removed history for userId " + userId + ", index: " + i);
}
}
}
}
// Update the filter with a new frame of data and smooth.
public void UpdateFilter(ref KinectInterop.BodyData bodyData, long bodyTimestamp, Matrix4x4 s2wMatrix, Vector3 spaceScale)
{
if (bodyData.bIsTracked)
{
// get body index
int bodyIndex = GetUserIndex(bodyData.liTrackingID);
if (bodyIndex < 0)
{
bodyIndex = GetFreeIndex();
if (bodyIndex >= 0)
history[bodyIndex].userId = bodyData.liTrackingID;
Vector3 lShPos = bodyData.joint[(int)KinectInterop.JointType.ShoulderLeft].position;
Vector3 rShPos = bodyData.joint[(int)KinectInterop.JointType.ShoulderRight].position;
Vector3 curDirLR = (rShPos - lShPos).normalized;
var dotRight = Vector3.Dot(curDirLR, Vector3.right);
userRightDir[bodyData.liTrackingID] = dotRight >= 0f ? Vector3.right : Vector3.left;
//Debug.Log($"Created history for userId: {history[bodyIndex].userId}, index: {bodyIndex}, time: {DateTime.UtcNow}, dotR: {dotRight:F3}, rDir: {userRightDir[bodyData.liTrackingID]}");
}
// filter
if (bodyIndex >= 0)
{
FilterBodyJoints(ref bodyData, bodyIndex, bodyTimestamp, s2wMatrix, spaceScale);
}
}
// free unused history - moved to sensor-int
//CleanUpUserHistory();
}
// Update the filter for all body joints
private void FilterBodyJoints(ref KinectInterop.BodyData bodyData, int bodyIndex, long bodyTimestamp, Matrix4x4 s2wMatrix, Vector3 spaceScale)
{
//long nowTicks = DateTime.UtcNow.Ticks;
long deltaTicks = bodyTimestamp - history[bodyIndex].lastTimestamp;
float deltaTime = deltaTicks * 0.0000001f;
// w2s matrix
Matrix4x4 w2sMatrix = s2wMatrix.inverse;
if (_fixBodySpin)
{
bool isBodyOK = CheckJointPair(ref bodyData, bodyIndex, (int)KinectInterop.JointType.ShoulderLeft, (int)KinectInterop.JointType.ShoulderRight, deltaTime, bodyTimestamp);
if(isBodyOK)
{
SaveAllJoints(ref bodyData, bodyIndex, bodyTimestamp);
}
else
{
if ((!tryRecoverPose) && history[bodyIndex].frameCount > 0)
{
RestoreAllJoints(ref bodyData, bodyIndex);
}
else
{
SwapAllJoints(ref bodyData, bodyIndex, bodyTimestamp);
SwapAllJointsZpos(ref bodyData, s2wMatrix, spaceScale, bodyTimestamp);
}
}
}
Vector3 hipPosL = bodyData.joint[(int)KinectInterop.JointType.HipLeft].position;
Vector3 hipPosR = bodyData.joint[(int)KinectInterop.JointType.HipRight].position;
Vector3 hipsDir = hipPosR - hipPosL;
// check and fix leg-crossing issues
if (_fixLegCross)
{
// check for and fix invalid l-r directions between legs
CheckAndFixLegPair(ref bodyData, bodyIndex, (int)KinectInterop.JointType.HipLeft, (int)KinectInterop.JointType.HipRight, bodyTimestamp);
CheckAndFixLegPair(ref bodyData, bodyIndex, (int)KinectInterop.JointType.KneeLeft, (int)KinectInterop.JointType.KneeRight, bodyTimestamp);
CheckAndFixLegPair(ref bodyData, bodyIndex, (int)KinectInterop.JointType.AnkleLeft, (int)KinectInterop.JointType.AnkleRight, bodyTimestamp);
CheckAndFixLegPair(ref bodyData, bodyIndex, (int)KinectInterop.JointType.FootLeft, (int)KinectInterop.JointType.FootRight, bodyTimestamp);
CheckAndFixLegInwardDir(ref bodyData, (int)KinectInterop.JointType.KneeLeft, hipsDir, w2sMatrix, spaceScale, bodyTimestamp);
CheckAndFixLegInwardDir(ref bodyData, (int)KinectInterop.JointType.KneeRight, -hipsDir, w2sMatrix, spaceScale, bodyTimestamp);
//CheckAndFixLegInwardDir(ref bodyData, (int)KinectInterop.JointType.AnkleLeft, hipsDir, w2sMatrix, spaceScale, bodyTimestamp);
//CheckAndFixLegInwardDir(ref bodyData, (int)KinectInterop.JointType.AnkleRight, -hipsDir, w2sMatrix, spaceScale, bodyTimestamp);
}
////check and fix knee &ankle angles
//if (FIX_JOINT_ANGLE)
//{
// CheckAndFixLegJointAngle(ref bodyData, (int)KinectInterop.JointType.KneeLeft, -hipsDir, 35f, 180f, w2sMatrix, spaceScale, bodyTimestamp);
// CheckAndFixLegJointAngle(ref bodyData, (int)KinectInterop.JointType.KneeRight, -hipsDir, 35f, 180f, w2sMatrix, spaceScale, bodyTimestamp);
// CheckAndFixLegJointAngle(ref bodyData, (int)KinectInterop.JointType.AnkleLeft, hipsDir, 45f, 135f, w2sMatrix, spaceScale, bodyTimestamp);
// CheckAndFixLegJointAngle(ref bodyData, (int)KinectInterop.JointType.AnkleRight, hipsDir, 45f, 135f, w2sMatrix, spaceScale, bodyTimestamp);
//}
// update body root positions
bodyData.position = bodyData.joint[0].position;
bodyData.kinectPos = bodyData.joint[0].kinectPos;
////if (!isBodyOK)
//{
// string sSwap = (!isBodyOK ? "1" : "0") + (isHipsSwap ? "1" : "0") +
// (isKneesSwap ? "1" : "0") + (isAnklesSwap ? "1" : "0") + (isFeetSwap ? "1" : "0");
// Vector3 shL = bodyData.joint[(int)KinectInterop.JointType.ShoulderLeft].position;
// Vector3 shR = bodyData.joint[(int)KinectInterop.JointType.ShoulderRight].position;
// Vector3 hipL = bodyData.joint[(int)KinectInterop.JointType.HipLeft].position;
// Vector3 hipR = bodyData.joint[(int)KinectInterop.JointType.HipRight].position;
// Vector3 kneeL = bodyData.joint[(int)KinectInterop.JointType.KneeLeft].position;
// Vector3 kneeR = bodyData.joint[(int)KinectInterop.JointType.KneeRight].position;
// Vector3 ankleL = bodyData.joint[(int)KinectInterop.JointType.AnkleLeft].position;
// Vector3 ankleR = bodyData.joint[(int)KinectInterop.JointType.AnkleRight].position;
// Vector3 footL = bodyData.joint[(int)KinectInterop.JointType.FootLeft].position;
// Vector3 footR = bodyData.joint[(int)KinectInterop.JointType.FootRight].position;
// Vector3 neck = bodyData.joint[(int)KinectInterop.JointType.Neck].position;
// Vector3 head = bodyData.joint[(int)KinectInterop.JointType.Head].position;
// Vector3 nose = bodyData.joint[(int)KinectInterop.JointType.Nose].position;
// Debug.Log($" ts: {bodyTimestamp}, dt: {deltaTime:F6}, swap: {sSwap}, shL: {shL}, shR: {shR}, hipL: {hipL}, hipR: {hipR}, kneeL: {kneeL}, kneeR: {kneeR}, ankleL: {ankleL}, ankleR: {ankleR}, footL: {footL}, footR: {footR}, neck: {neck}, head: {head}, nose: {nose}\n");
//}
}
// check the given joint pair for spinning rotation
private bool CheckJointPair(ref KinectInterop.BodyData bodyData, int bodyIndex, int jointL, int jointR, float deltaTime, long bodyTimestamp)
{
bool isPairOK = true;
Vector3 curPosL = bodyData.joint[jointL].position;
Vector3 curPosR = bodyData.joint[jointR].position;
Vector3 curDirLR = curPosR - curPosL;
//curDirLR.z = -curDirLR.z;
// update the saved right-dir, if needed
if (isForwardFacing == FACING_DIRECTION.FORWARD_AND_BACKWARD && deltaTime > maxSpinTime)
{
var dotRight = Vector3.Dot(curDirLR, Vector3.right);
userRightDir[bodyData.liTrackingID] = dotRight >= 0f ? Vector3.right : Vector3.left;
//Debug.Log($"Updated r-dir for userId: {bodyData.liTrackingID}, dotR: {dotRight:F3}, rDir: {userRightDir[bodyData.liTrackingID]}");
}
// right direction
Vector3 prevDirLR = Vector3.right;
switch (isForwardFacing)
{
case FACING_DIRECTION.FORWARD_AND_BACKWARD:
if(userRightDir.ContainsKey(bodyData.liTrackingID))
prevDirLR = userRightDir[bodyData.liTrackingID];
break;
case FACING_DIRECTION.FORWARD_ONLY:
prevDirLR = Vector3.right;
break;
case FACING_DIRECTION.BACKWARD_ONLY:
prevDirLR = Vector3.left;
break;
default:
Debug.LogWarning($"Unknown value for IS_FORWARD_FACING: {isForwardFacing}");
break;
}
// check for different directions
float dotPrevCur = Vector3.Dot(prevDirLR, curDirLR.normalized);
if (curDirLR != Vector3.zero && prevDirLR != Vector3.zero && dotPrevCur < MIN_ANGLE_COS &&
(isForwardFacing != FACING_DIRECTION.FORWARD_AND_BACKWARD || deltaTime <= maxSpinTime))
{
isPairOK = false;
}
//if(!isPairOK)
//{
// string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
// Debug.Log($"check LR for uID: {bodyData.liTrackingID} - {isPairOK}, dot: {dotPrevCur:F3}, dt: {deltaTime:F3}, time: {curTime}, ts: {bodyTimestamp}, cpL: {curPosL}, cpR: {curPosR}, cDir: {curDirLR:F2}, pDir: {prevDirLR:F2}\n"); // System.IO.File.AppendAllText(logFilename,
//}
return isPairOK;
}
// saves all joints to history
private void SaveAllJoints(ref KinectInterop.BodyData bodyData, int bodyIndex, long bodyTimestamp)
{
int jointCount = bodyData.joint.Length;
for(int j = 0; j < jointCount; j++)
{
history[bodyIndex].jointHistory[j].lastPosition = bodyData.joint[j].position;
history[bodyIndex].jointHistory[j].lastKinectPos = bodyData.joint[j].kinectPos;
history[bodyIndex].jointHistory[j].lastTrackingState = bodyData.joint[j].trackingState;
}
history[bodyIndex].lastTimestamp = (long)bodyData.bodyTimestamp;
history[bodyIndex].lastUpdateTime = DateTime.UtcNow.Ticks;
history[bodyIndex].frameCount++;
//string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
//Debug.Log($" saved joints - uID: {bodyData.liTrackingID} time: {curTime}, ts: {bodyTimestamp}\n"); // System.IO.File.AppendAllText(logFilename,
}
// restores all joints from history
private void RestoreAllJoints(ref KinectInterop.BodyData bodyData, int bodyIndex)
{
int jointCount = bodyData.joint.Length;
for (int j = 0; j < jointCount; j++)
{
bodyData.joint[j].position = history[bodyIndex].jointHistory[j].lastPosition;
bodyData.joint[j].kinectPos = history[bodyIndex].jointHistory[j].lastKinectPos;
bodyData.joint[j].trackingState = history[bodyIndex].jointHistory[j].lastTrackingState;
}
// restore body timestamp
bodyData.bodyTimestamp = (ulong)history[bodyIndex].lastTimestamp;
// prevent history clean ups
history[bodyIndex].lastUpdateTime = DateTime.UtcNow.Ticks;
//string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
//Debug.Log($" restored joints - uID: {bodyData.liTrackingID}, ts: {history[bodyIndex].lastTimestamp}, time: {curTime}\n"); // System.IO.File.AppendAllText(logFilename,
}
// swaps all left & right joints
private void SwapAllJoints(ref KinectInterop.BodyData bodyData, int bodyIndex, long bodyTimestamp)
{
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.ClavicleLeft, (int)KinectInterop.JointType.ClavicleRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.ShoulderLeft, (int)KinectInterop.JointType.ShoulderRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.ElbowLeft, (int)KinectInterop.JointType.ElbowRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.WristLeft, (int)KinectInterop.JointType.WristRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.HandLeft, (int)KinectInterop.JointType.HandRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.HandtipLeft, (int)KinectInterop.JointType.HandtipRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.ThumbLeft, (int)KinectInterop.JointType.ThumbRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.HipLeft, (int)KinectInterop.JointType.HipRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.KneeLeft, (int)KinectInterop.JointType.KneeRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.AnkleLeft, (int)KinectInterop.JointType.AnkleRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.FootLeft, (int)KinectInterop.JointType.FootRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.EyeLeft, (int)KinectInterop.JointType.EyeRight, bodyIndex);
SwapJointsData(ref bodyData, (int)KinectInterop.JointType.EarLeft, (int)KinectInterop.JointType.EarRight, bodyIndex);
//string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
//Debug.Log($" swapped joints - uID: {bodyData.liTrackingID}, ts: {bodyData.bodyTimestamp}, time: {curTime}\n"); // System.IO.File.AppendAllText(logFilename,
}
// restores all joints from history
private void SwapAllJointsZpos(ref KinectInterop.BodyData bodyData, Matrix4x4 s2wMatrix, Vector3 spaceScale, long bodyTimestamp)
{
float pelPosZ = bodyData.joint[(int)KinectInterop.JointType.Pelvis].kinectPos.z;
int jointCount = bodyData.joint.Length;
for (int j = 1; j < jointCount; j++)
{
int joint = j;
Vector3 kinectPos = bodyData.joint[joint].kinectPos;
float jointDiffZ = kinectPos.z - pelPosZ;
kinectPos.z -= 2 * jointDiffZ;
bodyData.joint[joint].kinectPos = kinectPos;
bodyData.joint[joint].position = s2wMatrix.MultiplyPoint3x4(new Vector3(kinectPos.x * spaceScale.x, kinectPos.y * spaceScale.y, kinectPos.z));
}
//string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
//Debug.Log($" swapZpos joints - uID: {bodyData.liTrackingID}, ts: {bodyTimestamp}, time: {curTime}\n"); // System.IO.File.AppendAllText(logFilename,
}
// checks the given leg pair for incorrect direction, and fixes it if needed
private void CheckAndFixLegPair(ref KinectInterop.BodyData bodyData, int bodyIndex, int jointL, int jointR, long bodyTimestamp)
{
bool isPairOK = true;
Vector3 legPosL = bodyData.joint[jointL].position;
Vector3 legPosR = bodyData.joint[jointR].position;
Vector3 legDirLR = legPosR - legPosL;
legDirLR.z = -legDirLR.z;
Vector3 shPosL = bodyData.joint[(int)KinectInterop.JointType.ShoulderLeft].position;
Vector3 shPosR = bodyData.joint[(int)KinectInterop.JointType.ShoulderRight].position;
Vector3 shDirLR = shPosR - shPosL;
shDirLR.z = -shDirLR.z;
// check for different directions
float dotShLeg = Vector3.Dot(shDirLR.normalized, legDirLR.normalized);
if (legDirLR != Vector3.zero && shDirLR != Vector3.zero && dotShLeg < 0f)
{
isPairOK = false;
}
//if (jointL == (int)KinectInterop.JointType.KneeLeft)
//{
// string curTime = DateTime.Now.ToString("HH:mm:ss.fff");
// Debug.Log($"time: {curTime}, dot: {dotPrevCur:F3}, lpL: {legPosL}, lpR: {legPosR}, lDir: {legDirLR}, pDir: {hipDirLR}\n"); // System.IO.File.AppendAllText(logFilename,
//}
if (!isPairOK)
{
// fix the issue
SwapJointsData(ref bodyData, jointL, jointR, bodyIndex);
//Debug.Log($" swapping {(KinectInterop.JointType)jointL}-{(KinectInterop.JointType)jointR} for uID: {bodyData.liTrackingID}, ts: {bodyTimestamp}, shDir: {shDirLR}, legDir: {legDirLR}, dot: {dotShLeg:F3}\n"); // System.IO.File.AppendAllText(logFilename,
}
}
// swaps the positional data of two joints
private void SwapJointsData(ref KinectInterop.BodyData bodyData, int jointL, int jointR, int bodyIndex)
{
KinectInterop.TrackingState trackingStateL = bodyData.joint[jointL].trackingState;
Vector3 kinectPosL = bodyData.joint[jointL].kinectPos;
Vector3 positionL = bodyData.joint[jointL].position;
KinectInterop.TrackingState trackingStateR = bodyData.joint[jointR].trackingState;
Vector3 kinectPosR = bodyData.joint[jointR].kinectPos;
Vector3 positionR = bodyData.joint[jointR].position;
bodyData.joint[jointL].trackingState = trackingStateR;
bodyData.joint[jointL].kinectPos = kinectPosR;
bodyData.joint[jointL].position = positionR;
bodyData.joint[jointR].trackingState = trackingStateL;
bodyData.joint[jointR].kinectPos = kinectPosL;
bodyData.joint[jointR].position = positionL;
}
//// checks the given leg joint for incorrect angle, and fixes it if needed
//private void CheckAndFixLegJointAngle(ref KinectInterop.BodyData bodyData, int midJoint, Vector3 hipsDir, float minAngle, float maxAngle,
// Matrix4x4 w2sMatrix, Vector3 spaceScale, long bodyTimestamp)
//{
// int parJoint = (int)KinectInterop.GetParentJoint((KinectInterop.JointType)midJoint);
// int nextJoint = (int)KinectInterop.GetNextJoint((KinectInterop.JointType)midJoint);
// if(bodyData.joint[midJoint].trackingState == KinectInterop.TrackingState.NotTracked ||
// bodyData.joint[parJoint].trackingState == KinectInterop.TrackingState.NotTracked ||
// bodyData.joint[nextJoint].trackingState == KinectInterop.TrackingState.NotTracked)
// {
// return;
// }
// Vector3 midJointPos = bodyData.joint[midJoint].position;
// Vector3 parJointPos = bodyData.joint[parJoint].position;
// Vector3 nextJointPos = bodyData.joint[nextJoint].position;
// Vector3 parJointDir = parJointPos - midJointPos;
// Vector3 nextJointDir = nextJointPos - midJointPos;
// // check the angle
// float dirAngle = Vector3.SignedAngle(parJointDir.normalized, nextJointDir.normalized, hipsDir.normalized);
// //Debug.Log($" {(KinectInterop.JointType)midJoint} for uID {bodyData.liTrackingID} - dirs-angle: {dirAngle:F1}, parDir: {parJointDir}, nextDir: {nextJointDir}, hipsDir: {hipsDir}, min: {minAngle}, max: {maxAngle}");
// if (parJointDir != Vector3.zero && nextJointDir != Vector3.zero && hipsDir != Vector3.zero &&
// (dirAngle < minAngle || dirAngle > maxAngle))
// {
// Vector3 crossDir = Vector3.Cross(parJointDir.normalized, nextJointDir.normalized);
// float turnAngle = Mathf.Abs(Mathf.DeltaAngle(dirAngle, minAngle)) < Mathf.Abs(Mathf.DeltaAngle(dirAngle, maxAngle)) ? minAngle : maxAngle;
// Quaternion turnRotation = Quaternion.AngleAxis(turnAngle, crossDir.normalized);
// Vector3 newJointDir = turnRotation * parJointDir;
// newJointDir *= nextJointDir.magnitude / parJointDir.magnitude; // scale
// Vector3 newJointPos = midJointPos + newJointDir;
// bodyData.joint[nextJoint].position = newJointPos;
// Vector3 newKinectPos = w2sMatrix.MultiplyPoint3x4(newJointPos);
// bodyData.joint[nextJoint].kinectPos = new Vector3(newKinectPos.x * spaceScale.x, newKinectPos.y * spaceScale.y, newKinectPos.z);
// Debug.Log($" fix angle @ {(KinectInterop.JointType)midJoint} for uID {bodyData.liTrackingID} - old: {dirAngle:F1} new: {turnAngle:F1}, ts: {bodyTimestamp}, newDir: {newJointDir}, newPos: {newJointPos}\n"); // System.IO.File.AppendAllText(logFilename,
// }
//}
// checks the given leg joint for incorrect (inward) direction, and fixes it if needed
private void CheckAndFixLegInwardDir(ref KinectInterop.BodyData bodyData, int midJoint, Vector3 hipsDir, Matrix4x4 w2sMatrix, Vector3 spaceScale, long bodyTimestamp)
{
int parJoint = (int)KinectInterop.GetParentJoint((KinectInterop.JointType)midJoint);
if (bodyData.joint[midJoint].trackingState == KinectInterop.TrackingState.NotTracked ||
bodyData.joint[parJoint].trackingState == KinectInterop.TrackingState.NotTracked)
{
return;
}
Vector3 midJointPos = bodyData.joint[midJoint].position;
Vector3 parJointPos = bodyData.joint[parJoint].position;
Vector3 parJointDir = midJointPos - parJointPos;
Vector3 hipsBackDir = Vector3.Cross(hipsDir.normalized, parJointDir.normalized);
float dotJointDir = Vector3.Dot(hipsDir.normalized, parJointDir.normalized);
if(dotJointDir > 0f)
{
// fix the joint position
Vector3 newJointDir = Vector3.Cross(hipsBackDir.normalized, hipsDir.normalized);
newJointDir *= parJointDir.magnitude;
Vector3 newJointPos = parJointPos + newJointDir;
bodyData.joint[midJoint].position = newJointPos;
Vector3 newKinectPos = w2sMatrix.MultiplyPoint3x4(newJointPos);
bodyData.joint[midJoint].kinectPos = new Vector3(newKinectPos.x * spaceScale.x, newKinectPos.y * spaceScale.y, newKinectPos.z);
//Debug.Log($" fix inward @ {(KinectInterop.JointType)midJoint} for uID {bodyData.liTrackingID} - dot: {dotJointDir:F3} ts: {bodyTimestamp}, oldDir: {parJointDir}, newDir: {newJointDir}, newPos: {newJointPos}\n"); // System.IO.File.AppendAllText(logFilename,
}
}
// returns the history index for the given user, or -1 if not found
private int GetUserIndex(ulong userId)
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == userId)
return i;
}
return -1;
}
// returns the 1st free history index, or -1 if not found
private int GetFreeIndex()
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == 0)
return i;
}
return -1;
}
// frees history indices that were unused for long time
public void CleanUpUserHistory()
{
DateTime dtNow = DateTime.UtcNow;
long timeNow = dtNow.Ticks;
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId != 0 && history[i].lastUpdateTime != 0 && (timeNow - history[i].lastUpdateTime) >= 10000000)
{
//Debug.Log("Removing history for userId " + history[i].userId + ", index: " + i + ", time: " + dtNow + ", not used since: " + (timeNow - history[i].lastUpdateTime) + " ticks");
history[i].userId = 0;
history[i].lastTimestamp = 0;
history[i].lastUpdateTime = 0;
history[i].frameCount = 0;
}
}
}
// body history data used by the filter
private struct BodyHistoryData
{
public ulong userId;
public long lastTimestamp;
public long lastUpdateTime;
public JointHistoryData[] jointHistory;
public uint frameCount;
public BodyHistoryData(int jointCount)
{
userId = 0;
lastTimestamp = 0;
lastUpdateTime = 0;
jointHistory = new JointHistoryData[jointCount];
frameCount = 0;
}
}
// joint history data used by the filter
private struct JointHistoryData
{
// last joint position
public Vector3 lastPosition;
// last sensor position
public Vector3 lastKinectPos;
// last tracking state
public KinectInterop.TrackingState lastTrackingState;
}
}
}

11
Assets/Azure/KinectScripts/Filters/BodySpinFilter.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 017113e2f305e944e9954ff2c290c874
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

336
Assets/Azure/KinectScripts/Filters/BoneOrientationConstraints.cs

@ -0,0 +1,336 @@
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
namespace com.rfilkov.kinect
{
/// <summary>
/// Filter to correct the joint orientations to constraint to the range of plausible human motion.
/// </summary>
public class BoneOrientationConstraints
{
// constraint types
public enum CT { None = 0, LimA = 1, LimST = 2, LimH = 3 }
// list of joint constraints
private readonly List<BoneOrientationConstraint> jointConstraints = new List<BoneOrientationConstraint>();
private UnityEngine.UI.Text debugText;
private long frameNum = 0;
//private float currentTime = 0f;
// Initializes a new instance of the BoneOrientationConstraints class.
public BoneOrientationConstraints()
{
}
public void SetDebugText(UnityEngine.UI.Text debugText)
{
this.debugText = debugText;
}
// AddDefaultConstraints - Adds a set of default joint constraints for normal human poses.
// This is a reasonable set of constraints for plausible human bio-mechanics.
public void AddDefaultConstraints()
{
// SpineNaval
AddBoneOrientationConstraint((int)KinectInterop.JointType.SpineNaval, CT.LimST, Vector3.up, 5f, 0f);
// SpineChest
AddBoneOrientationConstraint((int)KinectInterop.JointType.SpineChest, CT.LimST, Vector3.up, 5f, 0f);
// Neck
AddBoneOrientationConstraint((int)KinectInterop.JointType.Neck, CT.LimST, Vector3.up, 10f, 10f);
// Head
AddBoneOrientationConstraint((int)KinectInterop.JointType.Head, CT.LimST, Vector3.up, 50f, 80f);
// ShoulderLeft, ShoulderRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.ShoulderLeft, CT.LimST, Vector3.left, 180f, 180f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.ShoulderRight, CT.LimST, Vector3.right, 180f, 180f);
// ElbowLeft, ElbowRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.ElbowLeft, CT.LimST, Vector3.left, 180f, 180f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.ElbowRight, CT.LimST, Vector3.right, 180f, 180f);
// WristLeft, WristRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.WristLeft, CT.LimST, Vector3.left, 60f, 40f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.WristRight, CT.LimST, Vector3.right, 60f, 40f);
// HandLeft, HandRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.HandLeft, CT.LimH, Vector3.forward, -5f, -80f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.HandRight, CT.LimH, Vector3.forward, 5f, 80f);
// HipLeft, HipRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.HipLeft, CT.LimST, Vector3.down, 120f, 0f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.HipRight, CT.LimST, Vector3.down, 120f, 0f);
// KneeLeft, KneeRight
AddBoneOrientationConstraint((int)KinectInterop.JointType.KneeLeft, CT.LimH, Vector3.right, 0f, 150f);
AddBoneOrientationConstraint((int)KinectInterop.JointType.KneeRight, CT.LimH, Vector3.right, 0f, 150f);
//// AnkleLeft, AnkleRight
////AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleLeft, CT.LimST, Vector3.forward, 30f, 0f);
////AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleRight, CT.LimST, Vector3.forward, 30f, 0f);
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleLeft, CT.LimA, Vector3.forward, -5f, 5f); // lat
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleLeft, CT.LimA, Vector3.right, -10f, 10f); // sag
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleLeft, CT.LimA, Vector3.up, -30f, 30f); // rot
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleRight, CT.LimA, Vector3.forward, -5f, 5f); // lat
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleRight, CT.LimA, Vector3.right, -10f, 10f); // sag
//AddBoneOrientationConstraint((int)KinectInterop.JointType.AnkleRight, CT.LimA, Vector3.up, -30f, 30f); // rot
}
// Apply the orientation constraints
public void Constrain(ref KinectInterop.BodyData bodyData)
{
KinectManager kinectManager = KinectManager.Instance;
frameNum++;
for (int i = 0; i < jointConstraints.Count; i++)
{
BoneOrientationConstraint jc = this.jointConstraints[i];
if (jc.thisJoint == (int)KinectInterop.JointType.Pelvis || bodyData.joint[jc.thisJoint].normalRotation == Quaternion.identity)
continue;
if (kinectManager.ignoreZCoordinates && (jc.thisJoint == (int)KinectInterop.JointType.KneeLeft || jc.thisJoint == (int)KinectInterop.JointType.KneeRight))
continue;
if (bodyData.joint[jc.thisJoint].trackingState == KinectInterop.TrackingState.NotTracked)
continue;
int prevJoint = (int)KinectInterop.GetParentJoint((KinectInterop.JointType)jc.thisJoint);
if (bodyData.joint[prevJoint].trackingState == KinectInterop.TrackingState.NotTracked)
continue;
Quaternion rotParentN = bodyData.joint[prevJoint].normalRotation;
//Quaternion rotDefaultN = Quaternion.identity; // Quaternion.FromToRotation(KinectInterop.JointBaseDir[prevJoint], KinectInterop.JointBaseDir[jc.thisJoint]);
//rotParentN = rotParentN * rotDefaultN;
Quaternion rotJointN = bodyData.joint[jc.thisJoint].normalRotation;
Quaternion rotLocalN = Quaternion.Inverse(rotParentN) * rotJointN;
Vector3 eulerAnglesN = rotLocalN.eulerAngles;
bool isConstrained = false;
//string sDebug = string.Empty;
for (int a = 0; a < jc.axisConstrainrs.Count; a++)
{
AxisOrientationConstraint ac = jc.axisConstrainrs[a];
Quaternion rotLimited = rotLocalN;
switch (ac.consType)
{
case 0:
break;
case CT.LimA:
eulerAnglesN = LimitAngles(eulerAnglesN, ac.axis, ac.angleMin, ac.angleMax);
rotLimited = Quaternion.Euler(eulerAnglesN);
break;
case CT.LimST:
rotLimited = LimitSwing(rotLocalN, ac.axis, ac.angleMin);
rotLimited = LimitTwist(rotLimited, ac.axis, ac.angleMax);
break;
case CT.LimH:
float lastAngle = bodyData.joint[jc.thisJoint].lastAngle;
rotLimited = LimitHinge(rotLocalN, ac.axis, ac.angleMin, ac.angleMax, ref lastAngle);
bodyData.joint[jc.thisJoint].lastAngle = lastAngle;
break;
default:
throw new Exception("Undefined constraint type found: " + (int)ac.consType);
}
if (rotLimited != rotLocalN)
{
rotLocalN = rotLimited;
isConstrained = true;
}
}
//if (sDebug.Length > 0)
//{
// if (debugText != null && jc.thisJoint == (int)KinectInterop.JointType.ElbowLeft)
// {
// // debugText.text = sDebug;
// }
// Debug.Log(sDebug);
//}
if (isConstrained)
{
rotJointN = rotParentN * rotLocalN;
Vector3 eulerJoint = rotJointN.eulerAngles;
Vector3 eulerJointM = new Vector3(eulerJoint.x, -eulerJoint.y, -eulerJoint.z);
Quaternion rotJointM = Quaternion.Euler(eulerJointM);
// put it back into the bone orientations
bodyData.joint[jc.thisJoint].normalRotation = rotJointN;
bodyData.joint[jc.thisJoint].mirroredRotation = rotJointM;
}
}
}
// find the bone constraint structure for given joint
// returns the structure index in the list, or -1 if the bone structure is not found
private int FindBoneOrientationConstraint(int thisJoint)
{
for (int i = 0; i < jointConstraints.Count; i++)
{
if (jointConstraints[i].thisJoint == thisJoint)
return i;
}
return -1;
}
// AddJointConstraint - Adds a joint constraint to the system.
private void AddBoneOrientationConstraint(int thisJoint, CT consType, Vector3 axis, float angleMin, float angleMax)
{
int index = FindBoneOrientationConstraint(thisJoint);
BoneOrientationConstraint jc = index >= 0 ? jointConstraints[index] : new BoneOrientationConstraint(thisJoint);
if (index < 0)
{
index = jointConstraints.Count;
jointConstraints.Add(jc);
}
AxisOrientationConstraint constraint = new AxisOrientationConstraint(consType, axis, angleMin, angleMax);
jc.axisConstrainrs.Add(constraint);
jointConstraints[index] = jc;
}
private Vector3 LimitAngles(Vector3 eulerAngles, Vector3 axis, float limitMin, float limitMax)
{
int iAxis = (axis.x != 0f) ? 0 : (axis.y != 0f) ? 1 : (axis.z != 0f) ? 2 : -1;
if (iAxis >= 0)
{
float angle = eulerAngles[iAxis];
if (angle > 180f)
{
angle = angle - 360f;
}
float newAngle = Mathf.Clamp(angle, limitMin, limitMax);
if (newAngle < 0f)
{
newAngle += 360f;
}
eulerAngles[iAxis] = newAngle;
}
return eulerAngles;
}
private Quaternion LimitSwing(Quaternion rotation, Vector3 axis, float limit)
{
if (rotation == Quaternion.identity)
return rotation;
if (limit >= 180f)
return rotation;
Vector3 swingAxis = rotation * axis;
Quaternion swingRot = Quaternion.FromToRotation(axis, swingAxis);
Quaternion limSwingRot = Quaternion.RotateTowards(Quaternion.identity, swingRot, limit);
Quaternion backRot = Quaternion.FromToRotation(swingAxis, limSwingRot * axis);
return backRot * rotation;
}
private Quaternion LimitTwist(Quaternion rotation, Vector3 axis, float limit)
{
limit = Mathf.Clamp(limit, 0f, 180f);
if (limit >= 180f)
return rotation;
Vector3 orthoAxis = new Vector3(axis.y, axis.z, axis.x);
Vector3 orthoTangent = orthoAxis;
Vector3 normal = rotation * axis;
Vector3.OrthoNormalize(ref normal, ref orthoTangent);
Vector3 rotOrthoTangent = rotation * orthoAxis;
Vector3.OrthoNormalize(ref normal, ref rotOrthoTangent);
Quaternion fixedRot = Quaternion.FromToRotation(rotOrthoTangent, orthoTangent) * rotation;
if (limit <= 0f)
return fixedRot;
return Quaternion.RotateTowards(fixedRot, rotation, limit);
}
private Quaternion LimitHinge(Quaternion rotation, Vector3 axis, float limitMin, float limitMax, ref float lastAngle)
{
if (limitMin == 0f && limitMax == 0f)
return Quaternion.AngleAxis(0, axis);
Quaternion rotOnAxis = Quaternion.FromToRotation(rotation * axis, axis) * rotation; // limit-1
Quaternion lastRotation = Quaternion.AngleAxis(lastAngle, axis);
Quaternion rotAdded = rotOnAxis * Quaternion.Inverse(lastRotation);
float rotAngle = Quaternion.Angle(Quaternion.identity, rotAdded);
Vector3 secAxis = new Vector3(axis.z, axis.x, axis.y);
Vector3 cross = Vector3.Cross(secAxis, axis);
if (Vector3.Dot(rotAdded * secAxis, cross) > 0f)
{
rotAngle = -rotAngle;
}
rotAngle = Mathf.Clamp(lastAngle + rotAngle, limitMin, limitMax);
return Quaternion.AngleAxis(rotAngle, axis);
}
private struct BoneOrientationConstraint
{
public int thisJoint;
public List<AxisOrientationConstraint> axisConstrainrs;
public BoneOrientationConstraint(int thisJoint)
{
this.thisJoint = thisJoint;
axisConstrainrs = new List<AxisOrientationConstraint>();
}
}
private struct AxisOrientationConstraint
{
public CT consType;
public Vector3 axis;
public float angleMin;
public float angleMax;
public AxisOrientationConstraint(CT consType, Vector3 axis, float angleMin, float angleMax)
{
this.consType = consType;
this.axis = axis;
// Set the min and max rotations in degrees
this.angleMin = angleMin;
this.angleMax = angleMax;
}
}
}
}

8
Assets/Azure/KinectScripts/Filters/BoneOrientationConstraints.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 24a68b3ab03e9e24d8fb673f4f8d107a
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

447
Assets/Azure/KinectScripts/Filters/JointPositionsFilter.cs

@ -0,0 +1,447 @@
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
namespace com.rfilkov.kinect
{
// predefined smoothing types
public enum SmoothingType : int { None, Default, Light, Medium, Aggressive }
/// <summary>
/// Parameters used for smoothing of the body-joint positions between frames.
/// </summary>
public class SmoothParameters
{
public float smoothing;
public float correction;
public float prediction;
public float jitterRadius;
public float maxDeviationRadius;
}
/// <summary>
/// Implementation of a Holt Double Exponential Smoothing filter. The double exponential
/// smooths the curve and predicts. There is also noise jitter removal. And maximum
/// prediction bounds. The parameters are commented in the Init function.
/// </summary>
public class JointPositionsFilter
{
// The history data.
//private JointHistoryData[,] history;
private BodyHistoryData[] history;
// The smoothing parameters for this filter.
private SmoothParameters smoothParameters;
private SmoothingType smoothingType = SmoothingType.Default;
// True when the filter parameters are initialized.
private bool init;
// userId to index
private Dictionary<ulong, int> dictUserIdToIndex = new Dictionary<ulong, int>();
/// Initializes a new instance of the class.
public JointPositionsFilter()
{
init = false;
}
// Initialize the filter with a default set of TransformSmoothParameters.
public void Init()
{
// Specify some defaults
Init(0.5f, 0.5f, 0.5f, 0.05f, 0.04f);
}
/// <summary>
/// Initialize the filter with a set of manually specified TransformSmoothParameters.
/// </summary>
/// <param name="smoothingValue">Smoothing = [0..1], lower values is closer to the raw data and more noisy.</param>
/// <param name="correctionValue">Correction = [0..1], higher values correct faster and feel more responsive.</param>
/// <param name="predictionValue">Prediction = [0..n], how many frames into the future we want to predict.</param>
/// <param name="jitterRadiusValue">JitterRadius = The deviation distance in m that defines jitter.</param>
/// <param name="maxDeviationRadiusValue">MaxDeviation = The maximum distance in m that filtered positions are allowed to deviate from raw data.</param>
public void Init(float smoothingValue, float correctionValue, float predictionValue, float jitterRadiusValue, float maxDeviationRadiusValue)
{
this.smoothingType = SmoothingType.Default;
smoothParameters = new SmoothParameters();
smoothParameters.smoothing = smoothingValue; // How much soothing will occur. Will lag when too high
smoothParameters.correction = correctionValue; // How much to correct back from prediction. Can make things springy
smoothParameters.prediction = predictionValue; // Amount of prediction into the future to use. Can over shoot when too high
smoothParameters.jitterRadius = jitterRadiusValue; // Size of the radius where jitter is removed. Can do too much smoothing when too high
smoothParameters.maxDeviationRadius = maxDeviationRadiusValue; // Size of the max prediction radius Can snap back to noisy data when too high
// Check for divide by zero. Use an epsilon of a 10th of a millimeter
smoothParameters.jitterRadius = Math.Max(0.0001f, this.smoothParameters.jitterRadius);
Reset();
init = true;
}
// Initialize the filter with a set of SmoothParameters.
public void Init(SmoothParameters smoothParameters)
{
this.smoothingType = SmoothingType.Default;
this.smoothParameters = smoothParameters;
Reset();
init = true;
}
// Initialize the filter with a set of SmoothParameters.
public void Init(SmoothingType smoothingType)
{
this.smoothingType = smoothingType;
smoothParameters = new SmoothParameters();
switch (smoothingType)
{
case SmoothingType.Light:
smoothParameters.smoothing = 0.3f;
smoothParameters.correction = 0.35f;
smoothParameters.prediction = 0.35f;
smoothParameters.jitterRadius = 0.15f;
smoothParameters.maxDeviationRadius = 0.15f;
break;
case SmoothingType.Medium:
smoothParameters.smoothing = 0.5f;
smoothParameters.correction = 0.1f;
smoothParameters.prediction = 0.5f;
smoothParameters.jitterRadius = 0.1f;
smoothParameters.maxDeviationRadius = 0.1f;
break;
case SmoothingType.Aggressive:
smoothParameters.smoothing = 0.7f;
smoothParameters.correction = 0.3f;
smoothParameters.prediction = 1.0f;
smoothParameters.jitterRadius = 1.0f;
smoothParameters.maxDeviationRadius = 1.0f;
break;
//case SmoothingType.Default:
default:
smoothParameters.smoothing = 0.5f;
smoothParameters.correction = 0.5f;
smoothParameters.prediction = 0.5f;
smoothParameters.jitterRadius = 0.05f;
smoothParameters.maxDeviationRadius = 0.04f;
break;
}
Reset();
init = true;
}
// Resets the filter to default values.
public void Reset(ulong userId = 0)
{
KinectManager kinectManager = KinectManager.Instance;
int maxBodyCount = 10; // kinectManager.GetMaxBodyCount();
int jointCount = kinectManager.GetJointCount();
if (userId == 0)
{
//history = new JointHistoryData[kinectManager.GetMaxBodyCount(), kinectManager.GetJointCount()];
history = new BodyHistoryData[maxBodyCount];
for (int i = 0; i < maxBodyCount; i++)
{
history[i] = new BodyHistoryData(jointCount);
}
}
else
{
// clean the history of the given user only
for (int i = 0; i < maxBodyCount; i++)
{
if (history[i].userId == userId)
{
history[i].userId = 0;
history[i].lastUpdateTime = 0;
for (int j = 0; j < history[i].jointHistory.Length; j++)
{
history[i].jointHistory[j].frameCount = 0;
}
//Debug.Log("Removed pos history for userId " + userId + ", index: " + i);
}
}
}
//Debug.Log("BodyCount: " + kinectManager.GetMaxBodyCount() + ", JointCount: " + kinectManager.GetJointCount());
}
//// Update the filter with a new frame of data and smooth.
//public void UpdateFilter(ref KinectInterop.BodyData[] alTrackedBodies)
//{
// if (!init)
// {
// // initialize with by-default parameters
// Init();
// }
// if (smoothingType == SmoothingType.None)
// return;
// SmoothParameters tempSmoothingParams = new SmoothParameters();
// tempSmoothingParams.smoothing = this.smoothParameters.smoothing;
// tempSmoothingParams.correction = this.smoothParameters.correction;
// tempSmoothingParams.prediction = this.smoothParameters.prediction;
// int bodyCount = alTrackedBodies != null ? alTrackedBodies.Length : 0;
// for (int bodyIndex = 0; bodyIndex < bodyCount; bodyIndex++)
// {
// if (alTrackedBodies[bodyIndex].bIsTracked)
// {
// FilterBodyJoints(ref alTrackedBodies[bodyIndex], /**bodyIndex*/ alTrackedBodies[bodyIndex].iBodyIndex, tempSmoothingParams);
// }
// }
//}
// Update the filter with a new frame of data and smooth.
public void UpdateFilter(ref KinectInterop.BodyData bodyData)
{
if (!init)
{
// initialize with by-default parameters
Init();
}
if (smoothingType == SmoothingType.None)
return;
SmoothParameters tempSmoothingParams = new SmoothParameters();
tempSmoothingParams.smoothing = smoothParameters.smoothing;
tempSmoothingParams.correction = smoothParameters.correction;
tempSmoothingParams.prediction = smoothParameters.prediction;
if (bodyData.bIsTracked)
{
// get body index
int bodyIndex = GetUserIndex(bodyData.liTrackingID);
if (bodyIndex < 0)
{
bodyIndex = GetFreeIndex();
if (bodyIndex >= 0)
history[bodyIndex].userId = bodyData.liTrackingID;
//Debug.Log("Created history for userId: " + history[bodyIndex].userId + ", index: " + bodyIndex + ", time: " + DateTime.UtcNow);
}
// filter
if (bodyIndex >= 0)
{
FilterBodyJoints(ref bodyData, bodyIndex, tempSmoothingParams);
}
}
// free unused history
//CleanUpUserHistory();
}
// Update the filter for all body joints
private void FilterBodyJoints(ref KinectInterop.BodyData bodyData, int bodyIndex, SmoothParameters tempSmoothingParams)
{
KinectManager kinectManager = KinectManager.Instance;
int jointCount = kinectManager.GetJointCount();
long lastUpdateTime = history[bodyIndex].lastUpdateTime;
for (int jointIndex = 0; jointIndex < jointCount; jointIndex++)
{
//// If not tracked, we smooth a bit more by using a bigger jitter radius
//// Always filter end joints highly as they are more noisy
//if (bodyData.joint[jointIndex].trackingState != KinectInterop.TrackingState.Tracked ||
// jointIndex == (int)KinectInterop.JointType.FootLeft || jointIndex == (int)KinectInterop.JointType.FootRight ||
// jointIndex == (int)KinectInterop.JointType.HandLeft || jointIndex == (int)KinectInterop.JointType.HandRight ||
// jointIndex == (int)KinectInterop.JointType.HandtipLeft || jointIndex == (int)KinectInterop.JointType.HandtipRight ||
// jointIndex == (int)KinectInterop.JointType.ThumbLeft || jointIndex == (int)KinectInterop.JointType.ThumbRight)
////|| jointIndex == (int)KinectInterop.JointType.Head)
//{
// tempSmoothingParams.jitterRadius = smoothParameters.jitterRadius * 2.0f;
// tempSmoothingParams.maxDeviationRadius = smoothParameters.maxDeviationRadius * 2.0f;
//}
//else
{
tempSmoothingParams.jitterRadius = smoothParameters.jitterRadius;
tempSmoothingParams.maxDeviationRadius = smoothParameters.maxDeviationRadius;
}
bodyData.joint[jointIndex].position = FilterJoint(bodyData.joint[jointIndex].position, bodyIndex, jointIndex, tempSmoothingParams);
}
bodyData.position = bodyData.joint[0].position;
//Debug.Log(" updated pos history for userId: " + history[bodyIndex].userId + ", index: " + bodyIndex + ", time: " + history[bodyIndex].lastUpdateTime + " (" + lastUpdateTime + ")");
}
// Update the filter for one joint
private Vector3 FilterJoint(Vector3 rawPosition, int bodyIndex, int jointIndex, SmoothParameters smoothingParameters)
{
Vector3 filteredPosition;
Vector3 diffVec;
Vector3 trend;
float diffVal;
float diffFactor;
Vector3 prevFilteredPosition = history[bodyIndex].jointHistory[jointIndex].filteredPosition;
Vector3 prevTrend = history[bodyIndex].jointHistory[jointIndex].trend;
Vector3 prevRawPosition = history[bodyIndex].jointHistory[jointIndex].rawPosition;
bool jointIsValid = (rawPosition != Vector3.zero);
// If joint is invalid, reset the filter
if (!jointIsValid)
{
history[bodyIndex].jointHistory[jointIndex].frameCount = 0;
}
// Initial start values
if (history[bodyIndex].jointHistory[jointIndex].frameCount == 0)
{
filteredPosition = rawPosition;
trend = Vector3.zero;
}
else if (history[bodyIndex].jointHistory[jointIndex].frameCount == 1)
{
filteredPosition = (rawPosition + prevRawPosition) * 0.5f;
diffVec = filteredPosition - prevFilteredPosition;
trend = (diffVec * smoothingParameters.correction) + (prevTrend * (1.0f - smoothingParameters.correction));
}
else
{
// First apply jitter filter
diffVec = rawPosition - prevFilteredPosition;
diffVal = Math.Abs(diffVec.magnitude);
diffFactor = diffVal / smoothingParameters.jitterRadius;
if (diffVal <= smoothingParameters.jitterRadius)
{
filteredPosition = (rawPosition * diffFactor) + (prevFilteredPosition * (1.0f - diffFactor));
}
else
{
filteredPosition = rawPosition;
}
// Now the double exponential smoothing filter
filteredPosition = (filteredPosition * (1.0f - smoothingParameters.smoothing)) + ((prevFilteredPosition + prevTrend) * smoothingParameters.smoothing);
diffVec = filteredPosition - prevFilteredPosition;
trend = (diffVec * smoothingParameters.correction) + (prevTrend * (1.0f - smoothingParameters.correction));
}
// Predict into the future to reduce latency
Vector3 predictedPosition = filteredPosition + (trend * smoothingParameters.prediction);
// Check that we are not too far away from raw data
diffVec = predictedPosition - rawPosition;
diffVal = Mathf.Abs(diffVec.magnitude);
diffFactor = smoothingParameters.maxDeviationRadius / diffVal;
if (diffVal > smoothingParameters.maxDeviationRadius)
{
predictedPosition = (predictedPosition * diffFactor) + (rawPosition * (1.0f - diffFactor));
}
// Save the data from this frame
history[bodyIndex].jointHistory[jointIndex].rawPosition = rawPosition;
history[bodyIndex].jointHistory[jointIndex].filteredPosition = filteredPosition;
history[bodyIndex].jointHistory[jointIndex].trend = trend;
history[bodyIndex].jointHistory[jointIndex].frameCount++;
DateTime dtNow = DateTime.UtcNow;
history[bodyIndex].lastUpdateTime = dtNow.Ticks;
return predictedPosition;
}
// returns the history index for the given user, or -1 if not found
private int GetUserIndex(ulong userId)
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == userId)
return i;
}
return -1;
}
// returns the 1st free history index, or -1 if not found
private int GetFreeIndex()
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == 0)
return i;
}
return -1;
}
// frees history indices that were unused for long time
public void CleanUpUserHistory()
{
DateTime dtNow = DateTime.UtcNow;
long timeNow = dtNow.Ticks;
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId != 0 && (timeNow - history[i].lastUpdateTime) >= 10000000)
{
//Debug.Log("Removed pos history for userId " + history[i].userId + ", index: " + i + ", time: " + dtNow + ", not used since: " + (timeNow - history[i].lastUpdateTime) + " ticks");
history[i].userId = 0;
history[i].lastUpdateTime = 0;
for (int j = 0; j < history[i].jointHistory.Length; j++)
{
history[i].jointHistory[j].frameCount = 0;
}
}
}
}
// body history data used by the filter
private struct BodyHistoryData
{
public ulong userId;
public long lastUpdateTime;
public JointHistoryData[] jointHistory;
public BodyHistoryData(int jointCount)
{
userId = 0;
lastUpdateTime = 0;
jointHistory = new JointHistoryData[jointCount];
}
}
// joint history data used by the filter
private struct JointHistoryData
{
// Gets or sets Historical Position.
public Vector3 rawPosition;
// Gets or sets Historical Filtered Position.
public Vector3 filteredPosition;
// Gets or sets Historical Trend.
public Vector3 trend;
// Gets or sets Historical FrameCount.
public uint frameCount;
}
}
}

11
Assets/Azure/KinectScripts/Filters/JointPositionsFilter.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 31507b0aadcf2ce48bbb4b902f62ada7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

398
Assets/Azure/KinectScripts/Filters/JointVelocitiesFilter.cs

@ -0,0 +1,398 @@
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
namespace com.rfilkov.kinect
{
/// <summary>
/// Implementation of a Holt Double Exponential Smoothing filter. The double exponential
/// smooths the curve and predicts. There is also noise jitter removal. And maximum
/// prediction bounds. The parameters are commented in the Init function.
/// </summary>
public class JointVelocitiesFilter
{
// The history data.
//private VelocityHistoryData[,] history;
private BodyHistoryData[] history;
// The smoothing parameters for this filter.
private SmoothParameters smoothParameters;
private SmoothingType smoothingType = SmoothingType.Default;
// True when the filter parameters are initialized.
private bool init;
// userId to index
private Dictionary<ulong, int> dictUserIdToIndex = new Dictionary<ulong, int>();
/// Initializes a new instance of the class.
public JointVelocitiesFilter()
{
init = false;
}
// Initialize the filter with a default set of TransformSmoothParameters.
public void Init()
{
// Specify some defaults
Init(0.5f, 0.5f, 0.5f, 0.05f, 0.04f);
}
/// <summary>
/// Initialize the filter with a set of manually specified TransformSmoothParameters.
/// </summary>
/// <param name="smoothingValue">Smoothing = [0..1], lower values is closer to the raw data and more noisy.</param>
/// <param name="correctionValue">Correction = [0..1], higher values correct faster and feel more responsive.</param>
/// <param name="predictionValue">Prediction = [0..n], how many frames into the future we want to predict.</param>
/// <param name="jitterRadiusValue">JitterRadius = The deviation distance in m that defines jitter.</param>
/// <param name="maxDeviationRadiusValue">MaxDeviation = The maximum distance in m that filtered positions are allowed to deviate from raw data.</param>
public void Init(float smoothingValue, float correctionValue, float predictionValue, float jitterRadiusValue, float maxDeviationRadiusValue)
{
smoothParameters = new SmoothParameters();
smoothParameters.smoothing = smoothingValue; // How much soothing will occur. Will lag when too high
smoothParameters.correction = correctionValue; // How much to correct back from prediction. Can make things springy
smoothParameters.prediction = predictionValue; // Amount of prediction into the future to use. Can over shoot when too high
smoothParameters.jitterRadius = jitterRadiusValue; // Size of the radius where jitter is removed. Can do too much smoothing when too high
smoothParameters.maxDeviationRadius = maxDeviationRadiusValue; // Size of the max prediction radius Can snap back to noisy data when too high
// Check for divide by zero. Use an epsilon of a 10th of a millimeter
smoothParameters.jitterRadius = Math.Max(0.0001f, smoothParameters.jitterRadius);
Reset();
init = true;
}
// Initialize the filter with a set of TransformSmoothParameters.
public void Init(SmoothParameters smoothParameters)
{
this.smoothingType = SmoothingType.Default;
this.smoothParameters = smoothParameters;
Reset();
init = true;
}
// Initialize the filter with a set of SmoothParameters.
public void Init(SmoothingType smoothingType)
{
this.smoothingType = smoothingType;
smoothParameters = new SmoothParameters();
switch (smoothingType)
{
case SmoothingType.Light:
smoothParameters.smoothing = 0.3f;
smoothParameters.correction = 0.35f;
smoothParameters.prediction = 0.35f;
smoothParameters.jitterRadius = 0.15f;
smoothParameters.maxDeviationRadius = 0.15f;
break;
case SmoothingType.Medium:
smoothParameters.smoothing = 0.5f;
smoothParameters.correction = 0.1f;
smoothParameters.prediction = 0.5f;
smoothParameters.jitterRadius = 0.1f;
smoothParameters.maxDeviationRadius = 0.1f;
break;
case SmoothingType.Aggressive:
smoothParameters.smoothing = 0.7f;
smoothParameters.correction = 0.3f;
smoothParameters.prediction = 1.0f;
smoothParameters.jitterRadius = 1.0f;
smoothParameters.maxDeviationRadius = 1.0f;
break;
//case SmoothingType.Default:
default:
smoothParameters.smoothing = 0.5f;
smoothParameters.correction = 0.5f;
smoothParameters.prediction = 0.5f;
smoothParameters.jitterRadius = 0.05f;
smoothParameters.maxDeviationRadius = 0.04f;
break;
}
Reset();
init = true;
}
// Resets the filter to default values.
public void Reset()
{
KinectManager kinectManager = KinectManager.Instance;
int maxBodyCount = 10; // kinectManager.GetMaxBodyCount();
int jointCount = kinectManager.GetJointCount();
//history = new JointHistoryData[kinectManager.GetMaxBodyCount(), kinectManager.GetJointCount()];
history = new BodyHistoryData[maxBodyCount];
for (int i = 0; i < maxBodyCount; i++)
{
history[i] = new BodyHistoryData(jointCount);
}
//Debug.Log("BodyCount: " + kinectManager.GetMaxBodyCount() + ", JointCount: " + kinectManager.GetJointCount());
}
//// Update the filter with a new frame of data and smooth.
//public void UpdateFilter(ref KinectInterop.BodyData[] alTrackedBodies)
//{
// if (init == false)
// {
// Init(); // initialize with default parameters
// }
// SmoothParameters tempSmoothingParams = new SmoothParameters();
// tempSmoothingParams.smoothing = smoothParameters.smoothing;
// tempSmoothingParams.correction = smoothParameters.correction;
// tempSmoothingParams.prediction = smoothParameters.prediction;
// int bodyCount = alTrackedBodies != null ? alTrackedBodies.Length : 0;
// for (int bodyIndex = 0; bodyIndex < bodyCount; bodyIndex++)
// {
// if (alTrackedBodies[bodyIndex].bIsTracked)
// {
// FilterBodyJoints(ref alTrackedBodies[bodyIndex], /**bodyIndex*/ alTrackedBodies[bodyIndex].iBodyIndex, tempSmoothingParams);
// }
// }
//}
// Update the filter with a new frame of data and smooth.
public void UpdateFilter(ref KinectInterop.BodyData bodyData)
{
if (!init)
{
// initialize with by-default parameters
Init();
}
if (smoothingType == SmoothingType.None)
return;
SmoothParameters tempSmoothingParams = new SmoothParameters();
tempSmoothingParams.smoothing = smoothParameters.smoothing;
tempSmoothingParams.correction = smoothParameters.correction;
tempSmoothingParams.prediction = smoothParameters.prediction;
if (bodyData.bIsTracked)
{
// get body index
int bodyIndex = GetUserIndex(bodyData.liTrackingID);
if (bodyIndex < 0)
{
bodyIndex = GetFreeIndex();
if (bodyIndex >= 0)
history[bodyIndex].userId = bodyData.liTrackingID;
//Debug.Log("Created history for userId: " + history[bodyIndex].userId + ", index: " + bodyIndex + ", time: " + DateTime.UtcNow);
}
// filter
if (bodyIndex >= 0)
{
FilterBodyJoints(ref bodyData, bodyIndex, tempSmoothingParams);
}
}
// free unused history
//CleanUpUserHistory();
}
// Update the filter for all body joints
private void FilterBodyJoints(ref KinectInterop.BodyData bodyData, int bodyIndex, SmoothParameters tempSmoothingParams)
{
KinectManager kinectManager = KinectManager.Instance;
int jointCount = kinectManager.GetJointCount();
long lastUpdateTime = history[bodyIndex].lastUpdateTime;
for (int jointIndex = 0; jointIndex < jointCount; jointIndex++)
{
//// If not tracked, we smooth a bit more by using a bigger jitter radius
//// Always filter end joints highly as they are more noisy
//if (bodyData.joint[jointIndex].trackingState != KinectInterop.TrackingState.Tracked ||
// jointIndex == (int)KinectInterop.JointType.FootLeft || jointIndex == (int)KinectInterop.JointType.FootRight ||
// jointIndex == (int)KinectInterop.JointType.HandLeft || jointIndex == (int)KinectInterop.JointType.HandRight ||
// jointIndex == (int)KinectInterop.JointType.HandtipLeft || jointIndex == (int)KinectInterop.JointType.HandtipRight ||
// jointIndex == (int)KinectInterop.JointType.ThumbLeft || jointIndex == (int)KinectInterop.JointType.ThumbRight)
////|| jointIndex == (int)KinectInterop.JointType.Head)
//{
// tempSmoothingParams.jitterRadius = smoothParameters.jitterRadius * 2.0f;
// tempSmoothingParams.maxDeviationRadius = smoothParameters.maxDeviationRadius * 2.0f;
//}
//else
{
tempSmoothingParams.jitterRadius = smoothParameters.jitterRadius;
tempSmoothingParams.maxDeviationRadius = smoothParameters.maxDeviationRadius;
}
bodyData.joint[jointIndex].posVel = FilterJoint(bodyData.joint[jointIndex].posVel, bodyIndex, jointIndex, tempSmoothingParams);
}
//Debug.Log(" updated vel history for userId: " + history[bodyIndex].userId + ", index: " + bodyIndex + ", time: " + history[bodyIndex].lastUpdateTime + " (" + lastUpdateTime + ")");
}
// Update the filter for one joint
private Vector3 FilterJoint(Vector3 rawVelocity, int bodyIndex, int jointIndex, SmoothParameters smoothingParameters)
{
Vector3 filteredVelocity;
Vector3 diffVec;
Vector3 trend;
float diffVal;
Vector3 prevFilteredVelocity = history[bodyIndex].jointHistory[jointIndex].filteredVelocity;
Vector3 prevTrend = history[bodyIndex].jointHistory[jointIndex].trend;
Vector3 prevRawVelocity = history[bodyIndex].jointHistory[jointIndex].rawVelocity;
bool jointIsValid = (rawVelocity != Vector3.zero);
// If joint is invalid, reset the filter
if (!jointIsValid)
{
history[bodyIndex].jointHistory[jointIndex].frameCount = 0;
}
// Initial start values
if (history[bodyIndex].jointHistory[jointIndex].frameCount == 0)
{
filteredVelocity = rawVelocity;
trend = Vector3.zero;
}
else if (history[bodyIndex].jointHistory[jointIndex].frameCount == 1)
{
filteredVelocity = (rawVelocity + prevRawVelocity) * 0.5f;
diffVec = filteredVelocity - prevFilteredVelocity;
trend = (diffVec * smoothingParameters.correction) + (prevTrend * (1.0f - smoothingParameters.correction));
}
else
{
// First apply jitter filter
diffVec = rawVelocity - prevFilteredVelocity;
diffVal = Math.Abs(diffVec.magnitude);
if (diffVal <= smoothingParameters.jitterRadius)
{
filteredVelocity = (rawVelocity * (diffVal / smoothingParameters.jitterRadius)) + (prevFilteredVelocity * (1.0f - (diffVal / smoothingParameters.jitterRadius)));
}
else
{
filteredVelocity = rawVelocity;
}
// Now the double exponential smoothing filter
filteredVelocity = (filteredVelocity * (1.0f - smoothingParameters.smoothing)) + ((prevFilteredVelocity + prevTrend) * smoothingParameters.smoothing);
diffVec = filteredVelocity - prevFilteredVelocity;
trend = (diffVec * smoothingParameters.correction) + (prevTrend * (1.0f - smoothingParameters.correction));
}
// Predict into the future to reduce latency
Vector3 predictedVelocity = filteredVelocity + (trend * smoothingParameters.prediction);
// Check that we are not too far away from raw data
diffVec = predictedVelocity - rawVelocity;
diffVal = Mathf.Abs(diffVec.magnitude);
if (diffVal > smoothingParameters.maxDeviationRadius)
{
predictedVelocity = (predictedVelocity * (smoothingParameters.maxDeviationRadius / diffVal)) + (rawVelocity * (1.0f - (smoothingParameters.maxDeviationRadius / diffVal)));
}
// Save the data from this frame
history[bodyIndex].jointHistory[jointIndex].rawVelocity = rawVelocity;
history[bodyIndex].jointHistory[jointIndex].filteredVelocity = filteredVelocity;
history[bodyIndex].jointHistory[jointIndex].trend = trend;
history[bodyIndex].jointHistory[jointIndex].frameCount++;
DateTime dtNow = DateTime.UtcNow;
history[bodyIndex].lastUpdateTime = dtNow.Ticks;
return predictedVelocity;
}
// returns the history index for the given user, or -1 if not found
private int GetUserIndex(ulong userId)
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == userId)
return i;
}
return -1;
}
// returns the 1st free history index, or -1 if not found
private int GetFreeIndex()
{
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId == 0)
return i;
}
return -1;
}
// frees history indices that were unused for long time
public void CleanUpUserHistory()
{
DateTime dtNow = DateTime.UtcNow;
long timeNow = dtNow.Ticks;
for (int i = 0; i < history.Length; i++)
{
if (history[i].userId != 0 && (timeNow - history[i].lastUpdateTime) >= 10000000)
{
//Debug.Log("Removed vel history for userId " + history[i].userId + ", index: " + i + ", time: " + dtNow + ", not used since: " + (timeNow - history[i].lastUpdateTime) + " ticks");
history[i].userId = 0;
history[i].lastUpdateTime = 0;
for (int j = 0; j < history[i].jointHistory.Length; j++)
{
history[i].jointHistory[j].frameCount = 0;
}
}
}
}
// body history data used by the filter
private struct BodyHistoryData
{
public ulong userId;
public long lastUpdateTime;
public VelocityHistoryData[] jointHistory;
public BodyHistoryData(int jointCount)
{
userId = 0;
lastUpdateTime = 0;
jointHistory = new VelocityHistoryData[jointCount];
}
}
// velocity history data used by the filter
private struct VelocityHistoryData
{
// Gets or sets Historical Velocity.
public Vector3 rawVelocity;
// Gets or sets Historical Filtered Velocity.
public Vector3 filteredVelocity;
// Gets or sets Historical Trend.
public Vector3 trend;
// Gets or sets Historical FrameCount.
public uint frameCount;
}
}
}

12
Assets/Azure/KinectScripts/Filters/JointVelocitiesFilter.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 732d87a0d677e4845a26771e56b5c7d6
timeCreated: 1491826911
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

302
Assets/Azure/KinectScripts/Filters/MahonyAHRS.cs

@ -0,0 +1,302 @@
using System;
namespace AHRS
{
/// <summary>
/// MahonyAHRS class. Madgwick's implementation of Mayhony's AHRS algorithm.
/// </summary>
/// <remarks>
/// See: http://www.x-io.co.uk/node/8#open_source_ahrs_and_imu_algorithms
/// </remarks>
public class MahonyAHRS
{
/// <summary>
/// Gets or sets the sample period.
/// </summary>
public float SamplePeriod { get; set; }
/// <summary>
/// Gets or sets the algorithm proportional gain.
/// </summary>
public float Kp { get; set; }
/// <summary>
/// Gets or sets the algorithm integral gain.
/// </summary>
public float Ki { get; set; }
/// <summary>
/// Gets or sets the Quaternion output.
/// </summary>
public float[] Quaternion { get; set; }
/// <summary>
/// Error squared.
/// </summary>
public float E2 { get; set; }
/// <summary>
/// Gets or sets the integral error.
/// </summary>
private float[] eInt { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="MadgwickAHRS"/> class.
/// </summary>
/// <param name="samplePeriod">
/// Sample period.
/// </param>
public MahonyAHRS(float samplePeriod)
: this(samplePeriod, 1f, 0f)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MadgwickAHRS"/> class.
/// </summary>
/// <param name="samplePeriod">
/// Sample period.
/// </param>
/// <param name="kp">
/// Algorithm proportional gain.
/// </param>
public MahonyAHRS(float samplePeriod, float kp)
: this(samplePeriod, kp, 0f)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="MadgwickAHRS"/> class.
/// </summary>
/// <param name="samplePeriod">
/// Sample period.
/// </param>
/// <param name="kp">
/// Algorithm proportional gain.
/// </param>
/// <param name="ki">
/// Algorithm integral gain.
/// </param>
public MahonyAHRS(float samplePeriod, float kp, float ki)
{
SamplePeriod = samplePeriod;
Kp = kp;
Ki = ki;
Quaternion = new float[] { 1f, 0f, 0f, 0f };
eInt = new float[] { 0f, 0f, 0f };
}
/// <summary>
/// Algorithm AHRS update method. Requires only gyroscope and accelerometer data.
/// </summary>
/// <param name="gx">
/// Gyroscope x axis measurement in radians/s.
/// </param>
/// <param name="gy">
/// Gyroscope y axis measurement in radians/s.
/// </param>
/// <param name="gz">
/// Gyroscope z axis measurement in radians/s.
/// </param>
/// <param name="ax">
/// Accelerometer x axis measurement in any calibrated units.
/// </param>
/// <param name="ay">
/// Accelerometer y axis measurement in any calibrated units.
/// </param>
/// <param name="az">
/// Accelerometer z axis measurement in any calibrated units.
/// </param>
/// <param name="mx">
/// Magnetometer x axis measurement in any calibrated units.
/// </param>
/// <param name="my">
/// Magnetometer y axis measurement in any calibrated units.
/// </param>
/// <param name="mz">
/// Magnetometer z axis measurement in any calibrated units.
/// </param>
/// <remarks>
/// Optimised for minimal arithmetic.
/// </remarks>
public void Update(float gx, float gy, float gz, float ax, float ay, float az, float mx, float my, float mz)
{
float q1 = Quaternion[0], q2 = Quaternion[1], q3 = Quaternion[2], q4 = Quaternion[3]; // short name local variable for readability
float norm;
float hx, hy, bx, bz;
float vx, vy, vz, wx, wy, wz;
float ex, ey, ez;
float pa, pb, pc;
// Auxiliary variables to avoid repeated arithmetic
float q1q1 = q1 * q1;
float q1q2 = q1 * q2;
float q1q3 = q1 * q3;
float q1q4 = q1 * q4;
float q2q2 = q2 * q2;
float q2q3 = q2 * q3;
float q2q4 = q2 * q4;
float q3q3 = q3 * q3;
float q3q4 = q3 * q4;
float q4q4 = q4 * q4;
// Normalise accelerometer measurement
norm = (float)Math.Sqrt(ax * ax + ay * ay + az * az);
if (norm == 0f) return; // handle NaN
norm = 1 / norm; // use reciprocal for division
ax *= norm;
ay *= norm;
az *= norm;
// Normalise magnetometer measurement
norm = (float)Math.Sqrt(mx * mx + my * my + mz * mz);
if (norm == 0f) return; // handle NaN
norm = 1 / norm; // use reciprocal for division
mx *= norm;
my *= norm;
mz *= norm;
// Reference direction of Earth's magnetic field
hx = 2f * mx * (0.5f - q3q3 - q4q4) + 2f * my * (q2q3 - q1q4) + 2f * mz * (q2q4 + q1q3);
hy = 2f * mx * (q2q3 + q1q4) + 2f * my * (0.5f - q2q2 - q4q4) + 2f * mz * (q3q4 - q1q2);
bx = (float)Math.Sqrt((hx * hx) + (hy * hy));
bz = 2f * mx * (q2q4 - q1q3) + 2f * my * (q3q4 + q1q2) + 2f * mz * (0.5f - q2q2 - q3q3);
// Estimated direction of gravity and magnetic field
vx = 2f * (q2q4 - q1q3);
vy = 2f * (q1q2 + q3q4);
vz = q1q1 - q2q2 - q3q3 + q4q4;
wx = 2f * bx * (0.5f - q3q3 - q4q4) + 2f * bz * (q2q4 - q1q3);
wy = 2f * bx * (q2q3 - q1q4) + 2f * bz * (q1q2 + q3q4);
wz = 2f * bx * (q1q3 + q2q4) + 2f * bz * (0.5f - q2q2 - q3q3);
// Error is cross product between estimated direction and measured direction of gravity
ex = (ay * vz - az * vy) + (my * wz - mz * wy);
ey = (az * vx - ax * vz) + (mz * wx - mx * wz);
ez = (ax * vy - ay * vx) + (mx * wy - my * wx);
if (Ki > 0f)
{
eInt[0] += ex; // accumulate integral error
eInt[1] += ey;
eInt[2] += ez;
}
else
{
eInt[0] = 0.0f; // prevent integral wind up
eInt[1] = 0.0f;
eInt[2] = 0.0f;
}
// error squared
E2 = ex * ex + ey * ey + ez * ez;
// Apply feedback terms
gx = gx + Kp * ex + Ki * eInt[0];
gy = gy + Kp * ey + Ki * eInt[1];
gz = gz + Kp * ez + Ki * eInt[2];
// Integrate rate of change of quaternion
pa = q2;
pb = q3;
pc = q4;
q1 = q1 + (-q2 * gx - q3 * gy - q4 * gz) * (0.5f * SamplePeriod);
q2 = pa + (q1 * gx + pb * gz - pc * gy) * (0.5f * SamplePeriod);
q3 = pb + (q1 * gy - pa * gz + pc * gx) * (0.5f * SamplePeriod);
q4 = pc + (q1 * gz + pa * gy - pb * gx) * (0.5f * SamplePeriod);
// Normalise quaternion
norm = (float)Math.Sqrt(q1 * q1 + q2 * q2 + q3 * q3 + q4 * q4);
norm = 1.0f / norm;
Quaternion[0] = q1 * norm;
Quaternion[1] = q2 * norm;
Quaternion[2] = q3 * norm;
Quaternion[3] = q4 * norm;
}
/// <summary>
/// Algorithm IMU update method. Requires only gyroscope and accelerometer data.
/// </summary>
/// <param name="gx">
/// Gyroscope x axis measurement in radians/s.
/// </param>
/// <param name="gy">
/// Gyroscope y axis measurement in radians/s.
/// </param>
/// <param name="gz">
/// Gyroscope z axis measurement in radians/s.
/// </param>
/// <param name="ax">
/// Accelerometer x axis measurement in any calibrated units.
/// </param>
/// <param name="ay">
/// Accelerometer y axis measurement in any calibrated units.
/// </param>
/// <param name="az">
/// Accelerometer z axis measurement in any calibrated units.
/// </param>
public void Update(float gx, float gy, float gz, float ax, float ay, float az)
{
float q1 = Quaternion[0], q2 = Quaternion[1], q3 = Quaternion[2], q4 = Quaternion[3]; // short name local variable for readability
float norm;
float vx, vy, vz;
float ex, ey, ez;
float pa, pb, pc;
// Normalise accelerometer measurement
norm = (float)Math.Sqrt(ax * ax + ay * ay + az * az);
if (norm == 0f) return; // handle NaN
norm = 1 / norm; // use reciprocal for division
ax *= norm;
ay *= norm;
az *= norm;
// Estimated direction of gravity
vx = 2.0f * (q2 * q4 - q1 * q3);
vy = 2.0f * (q1 * q2 + q3 * q4);
vz = q1 * q1 - q2 * q2 - q3 * q3 + q4 * q4;
// Error is cross product between estimated direction and measured direction of gravity
ex = (ay * vz - az * vy);
ey = (az * vx - ax * vz);
ez = (ax * vy - ay * vx);
if (Ki > 0f)
{
eInt[0] += ex; // accumulate integral error
eInt[1] += ey;
eInt[2] += ez;
}
else
{
eInt[0] = 0.0f; // prevent integral wind up
eInt[1] = 0.0f;
eInt[2] = 0.0f;
}
// error squared
E2 = ex * ex + ey * ey + ez * ez;
// Apply feedback terms
gx = gx + Kp * ex + Ki * eInt[0];
gy = gy + Kp * ey + Ki * eInt[1];
gz = gz + Kp * ez + Ki * eInt[2];
// Integrate rate of change of quaternion
pa = q2;
pb = q3;
pc = q4;
q1 = q1 + (-q2 * gx - q3 * gy - q4 * gz) * (0.5f * SamplePeriod);
q2 = pa + (q1 * gx + pb * gz - pc * gy) * (0.5f * SamplePeriod);
q3 = pb + (q1 * gy - pa * gz + pc * gx) * (0.5f * SamplePeriod);
q4 = pc + (q1 * gz + pa * gy - pb * gx) * (0.5f * SamplePeriod);
// Normalise quaternion
norm = (float)Math.Sqrt(q1 * q1 + q2 * q2 + q3 * q3 + q4 * q4);
norm = 1.0f / norm;
Quaternion[0] = q1 * norm;
Quaternion[1] = q2 * norm;
Quaternion[2] = q3 * norm;
Quaternion[3] = q4 * norm;
}
}
}

11
Assets/Azure/KinectScripts/Filters/MahonyAHRS.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 75a75a12bb874514596c7fc355cd0c3f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

80
Assets/Azure/KinectScripts/FollowSensorTransform.cs

@ -0,0 +1,80 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// This component makes the game object follow the position and rotation of the sensor.
/// </summary>
public class FollowSensorTransform : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc.")]
public int sensorIndex = 0;
[Tooltip("Smooth factor used for the game object movement and rotation.")]
public float smoothFactor = 0f;
[Tooltip("Whether to follow the sensor's depth or color camera pose.")]
public ReferencePose referencePose = ReferencePose.DepthCameraPose;
public enum ReferencePose : int { DepthCameraPose = 0, ColorCameraPose = 1 };
// reference to the KinectManager
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
// sensor position and rotation
Vector3 sensorWorldPos = Vector3.zero;
Quaternion sensorWorldRot = Quaternion.identity;
void Start()
{
// get reference to KinectManager
kinectManager = KinectManager.Instance;
sensorData = kinectManager ? kinectManager.GetSensorData(sensorIndex) : null;
}
void Update()
{
if(kinectManager && kinectManager.IsInitialized())
{
Transform sensorTrans = kinectManager.GetSensorTransform(sensorIndex);
if(sensorTrans != null)
{
sensorWorldPos = sensorTrans.position;
sensorWorldRot = sensorTrans.rotation;
if (referencePose != ReferencePose.DepthCameraPose && sensorData != null && sensorData.sensorInterface != null)
{
Matrix4x4 sensorTransMat = Matrix4x4.identity;
sensorTransMat.SetTRS(sensorTrans.position, sensorTrans.rotation, Vector3.one);
Matrix4x4 sensorToRefMat = sensorData.sensorInterface.GetDepthToColorCameraMatrix();
sensorTransMat = sensorTransMat * sensorToRefMat;
sensorWorldPos = sensorTransMat.GetColumn(3);
sensorWorldRot = sensorTransMat.rotation;
}
}
if (smoothFactor != 0f)
{
transform.position = Vector3.Lerp(transform.position, sensorWorldPos, smoothFactor * Time.deltaTime);
transform.rotation = Quaternion.Slerp(transform.rotation, sensorWorldRot, smoothFactor * Time.deltaTime);
}
else
{
transform.position = sensorWorldPos;
transform.rotation = sensorWorldRot;
}
}
}
}
}

11
Assets/Azure/KinectScripts/FollowSensorTransform.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9a17e9eb7ba629243bfc955c820d04a1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

140
Assets/Azure/KinectScripts/FollowUserJointPose.cs

@ -0,0 +1,140 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// FollowUserJointPose makes the game object's transform follow the given user's joint pose.
/// </summary>
public class FollowUserJointPose : MonoBehaviour
{
[Tooltip("Depth sensor index - 0 is the 1st one, 1 - the 2nd one, etc. -1 means the sensor doesn't matter")]
private int sensorIndex = -1;
[Tooltip("Index of the player, tracked by this component. 0 means the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = 0;
[Tooltip("The sensor's joint we want to follow.")]
public KinectInterop.JointType followJoint = KinectInterop.JointType.Head;
[Tooltip("Whether the joint view is mirrored or not.")]
public bool mirroredView = false;
[Tooltip("Whether to move the object's transform.")]
public bool moveTransform = true;
[Tooltip("Whether to rotate the object's transform.")]
public bool rotateTransform = true;
[Tooltip("Scene object that will be used to represent the sensor's position and rotation in the scene.")]
public Transform sensorTransform;
[Tooltip("Offset of the object to the joint's position.")]
public Vector3 positionOffset = Vector3.zero;
[Tooltip("Scale factor of the joint position.")]
public Vector3 motionScale = Vector3.one;
[Tooltip("Scale factor of the joint rotation.")]
private Vector3 rotationFactor = Vector3.zero;
[Tooltip("Smooth factor used for object's position and rotation smoothing.")]
public float smoothFactor = 10f;
private KinectManager kinectManager = null;
private Quaternion initialRotation = Quaternion.identity;
private Vector3 vPosJoint = Vector3.zero;
private Quaternion qRotJoint = Quaternion.identity;
void Start()
{
kinectManager = KinectManager.Instance;
initialRotation = transform.rotation;
//initialRotation = mirroredView ? Quaternion.Euler(0f, 180f, 0f) : Quaternion.identity;
}
void Update()
{
if (kinectManager && kinectManager.IsInitialized())
{
if (sensorIndex >= 0 || kinectManager.IsUserDetected(playerIndex))
{
ulong userId = sensorIndex < 0 ? kinectManager.GetUserIdByIndex(playerIndex) : (ulong)playerIndex;
if (sensorIndex >= 0 || kinectManager.IsJointTracked(userId, followJoint))
{
if (sensorTransform != null)
{
if (sensorIndex < 0)
vPosJoint = kinectManager.GetJointKinectPosition(userId, followJoint, true);
else
vPosJoint = kinectManager.GetSensorJointKinectPosition(sensorIndex, (int)userId, followJoint, true);
}
else
{
if (sensorIndex < 0)
vPosJoint = kinectManager.GetJointPosition(userId, followJoint);
else
vPosJoint = kinectManager.GetSensorJointPosition(sensorIndex, (int)userId, followJoint);
}
if (positionOffset != Vector3.zero)
{
vPosJoint += positionOffset;
}
if (sensorTransform)
{
vPosJoint = sensorTransform.TransformPoint(vPosJoint);
}
if(motionScale != Vector3.one)
{
vPosJoint = new Vector3(vPosJoint.x * motionScale.x, vPosJoint.y * motionScale.y, vPosJoint.z * motionScale.z);
}
if (sensorIndex < 0)
qRotJoint = kinectManager.GetJointOrientation(userId, followJoint, !mirroredView);
else
qRotJoint = kinectManager.GetSensorJointOrientation(sensorIndex, (int)userId, followJoint, !mirroredView);
qRotJoint = initialRotation * qRotJoint;
if(rotationFactor != Vector3.zero)
{
qRotJoint = Quaternion.Euler(rotationFactor) * qRotJoint;
}
if (moveTransform || rotateTransform)
{
if (smoothFactor != 0f)
{
if(moveTransform)
transform.position = Vector3.Lerp(transform.position, vPosJoint, smoothFactor * Time.deltaTime);
if(rotateTransform)
transform.rotation = Quaternion.Slerp(transform.rotation, qRotJoint, smoothFactor * Time.deltaTime);
}
else
{
if(moveTransform)
transform.position = vPosJoint;
if(rotateTransform)
transform.rotation = qRotJoint;
}
}
}
}
}
}
}
}

8
Assets/Azure/KinectScripts/FollowUserJointPose.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: c98460a12114096448a40a69ef6b30e8
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

311
Assets/Azure/KinectScripts/ForegroundBlendRenderer.cs

@ -0,0 +1,311 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// ForegroundBlendRenderer provides volumetric rendering and lighting of the real environment, filtered by the background-removal manager.
/// </summary>
public class ForegroundBlendRenderer : MonoBehaviour
{
[Tooltip("Reference to background removal manager. If left to None, it looks up the first available BR-manager in the scene.")]
public BackgroundRemovalManager backgroundRemovalManager = null;
[Tooltip("Depth value in meters, used for invalid depth points.")]
public float invalidDepthValue = 0f;
[Tooltip("Whether to maximize the rendered object on the screen, or not.")]
public bool maximizeOnScreen = true;
[Tooltip("Whether to apply per-pixel lighting on the foreground, or not.")]
public bool applyLighting = false;
[Tooltip("Camera used to scale the mesh, to fill the camera's background. If left empty, it will default to the main camera in the scene.")]
public Camera foregroundCamera;
[Tooltip("Background image (if any) that needs to be overlayed by this blend renderer.")]
public UnityEngine.UI.RawImage backgroundImage;
// references to KM and data
private KinectManager kinectManager = null;
private KinectInterop.SensorData sensorData = null;
private DepthSensorBase sensorInt = null;
private Material matRenderer = null;
// depth image buffer (in depth camera resolution)
private ComputeBuffer depthImageBuffer = null;
// textures
private Texture alphaTex = null;
private Texture colorTex = null;
// lighting
private FragmentLighting lighting = new FragmentLighting();
// saved screen width & height
private int lastScreenW = 0;
private int lastScreenH = 0;
private int lastColorW = 0;
private int lastColorH = 0;
private float lastAnchorPos = 0f;
private Vector3 initialScale = Vector3.one;
// distances
private float distToBackImage = 0f;
private float distToTransform = 0f;
void Start()
{
kinectManager = KinectManager.Instance;
initialScale = transform.localScale;
if (backgroundRemovalManager == null)
{
backgroundRemovalManager = FindObjectOfType<BackgroundRemovalManager>();
}
// get distance to back image
if(backgroundImage)
{
Canvas canvas = backgroundImage.canvas;
if (canvas.renderMode == RenderMode.ScreenSpaceCamera)
distToBackImage = canvas.planeDistance;
else
distToBackImage = 0f;
}
// get distance to transform
distToTransform = transform.localPosition.z;
// set renderer material
Renderer meshRenderer = GetComponent<Renderer>();
if (meshRenderer)
{
Shader blendShader = Shader.Find("Kinect/ForegroundBlendShader");
if(blendShader != null)
{
matRenderer = new Material(blendShader);
meshRenderer.material = matRenderer;
}
}
// get sensor data
if (kinectManager && kinectManager.IsInitialized() && backgroundRemovalManager && backgroundRemovalManager.enabled)
{
sensorData = kinectManager.GetSensorData(backgroundRemovalManager.sensorIndex);
sensorInt = sensorData != null ? (DepthSensorBase)sensorData.sensorInterface : null;
}
if (foregroundCamera == null)
{
foregroundCamera = Camera.main;
}
// find scene lights
Light[] sceneLights = GameObject.FindObjectsOfType<Light>();
lighting.SetLightsAndBounds(sceneLights, transform.position, new Vector3(20f, 20f, 20f));
//Debug.Log("sceneLights: " + sceneLights.Length);
//for(int i = 0; i < sceneLights.Length; i++)
//{
// Debug.Log(i.ToString() + " - " + sceneLights[i].name + " - " + sceneLights[i].type);
//}
}
void OnDestroy()
{
if (sensorData != null && sensorData.colorDepthBuffer != null)
{
sensorData.colorDepthBuffer.Release();
sensorData.colorDepthBuffer = null;
}
if (depthImageBuffer != null)
{
//depthImageCopy = null;
depthImageBuffer.Release();
depthImageBuffer = null;
}
// release lighting resources
lighting.ReleaseResources();
}
void Update()
{
if (matRenderer == null || sensorData == null || sensorInt == null)
return;
if(alphaTex == null || alphaTex.width != sensorData.colorImageWidth || alphaTex.height != sensorData.colorImageHeight)
{
// alpha texture
alphaTex = backgroundRemovalManager.GetAlphaTex();
if(alphaTex != null)
{
matRenderer.SetTexture("_AlphaTex", alphaTex);
}
}
if(colorTex == null || colorTex.width != sensorData.colorImageWidth || colorTex.height != sensorData.colorImageHeight)
{
// color texture
colorTex = !backgroundRemovalManager.computeAlphaMaskOnly ? backgroundRemovalManager.GetForegroundTex() : alphaTex; // sensorInt.pointCloudColorTexture
if (colorTex != null)
{
matRenderer.SetInt("_TexResX", colorTex.width);
matRenderer.SetInt("_TexResY", colorTex.height);
matRenderer.SetTexture("_ColorTex", colorTex);
}
}
if (colorTex == null || alphaTex == null /**|| foregroundCamera == null*/)
return;
if (sensorInt.pointCloudResolution == DepthSensorBase.PointCloudResolution.DepthCameraResolution)
{
int depthBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight / 2;
if (depthImageBuffer == null || depthImageBuffer.count != depthBufferLength)
{
//int depthImageLength = sensorData.depthImageWidth * sensorData.depthImageHeight;
//depthImageCopy = new ushort[depthImageLength];
depthImageBuffer = KinectInterop.CreateComputeBuffer(depthImageBuffer, depthBufferLength, sizeof(uint));
matRenderer.SetBuffer("_DepthMap", depthImageBuffer);
//Debug.Log("Created depthImageBuffer with len: " + depthBufferLength);
}
if (depthImageBuffer != null && sensorData.depthImage != null)
{
//KinectInterop.CopyBytes(sensorData.depthImage, sizeof(ushort), depthImageCopy, sizeof(ushort));
KinectInterop.SetComputeBufferData(depthImageBuffer, sensorData.depthImage, depthBufferLength, sizeof(uint));
}
//Debug.Log("ForegroundBlendRenderer DepthFrameTime: " + lastDepthFrameTime);
}
else
{
int bufferLength = sensorData.colorImageWidth * sensorData.colorImageHeight / 2;
if (sensorData.colorDepthBuffer == null || sensorData.colorDepthBuffer.count != bufferLength)
{
sensorData.colorDepthBuffer = new ComputeBuffer(bufferLength, sizeof(uint));
matRenderer.SetBuffer("_DepthMap", sensorData.colorDepthBuffer);
//Debug.Log("Created colorDepthBuffer with len: " + bufferLength);
}
//Debug.Log("ForegroundBlendRenderer ColorDepthBufferTime: " + sensorData.lastColorDepthBufferTime);
}
matRenderer.SetFloat("_DepthDistance", 0f);
matRenderer.SetFloat("_InvDepthVal", invalidDepthValue);
int curScreenW = foregroundCamera ? foregroundCamera.pixelWidth : Screen.width;
int curScreenH = foregroundCamera ? foregroundCamera.pixelHeight : Screen.height;
if (lastScreenW != curScreenW || lastScreenH != curScreenH || lastColorW != sensorData.colorImageWidth || lastColorH != sensorData.colorImageHeight)
{
ScaleRendererTransform(curScreenW, curScreenH);
}
Vector2 anchorPos = backgroundImage ? backgroundImage.rectTransform.anchoredPosition : Vector2.zero;
float curAnchorPos = anchorPos.x + anchorPos.y; // Mathf.Abs(anchorPos.x) + Mathf.Abs(anchorPos.y);
if (Mathf.Abs(curAnchorPos - lastAnchorPos) >= 20f)
{
//Debug.Log("anchorPos: " + anchorPos + ", curAnchorPos: " + curAnchorPos + ", lastAnchorPos: " + lastAnchorPos + ", diff: " + Mathf.Abs(curAnchorPos - lastAnchorPos));
CenterRendererTransform(anchorPos, curAnchorPos);
}
// update lighting parameters
lighting.UpdateLighting(matRenderer, applyLighting);
}
// scales the renderer's transform properly
private void ScaleRendererTransform(int curScreenW, int curScreenH)
{
lastScreenW = curScreenW;
lastScreenH = curScreenH;
lastColorW = sensorData.colorImageWidth;
lastColorH = sensorData.colorImageHeight;
Vector3 localScale = Vector3.one; // transform.localScale;
if (maximizeOnScreen && foregroundCamera)
{
float objectZ = distToTransform; // transform.localPosition.z; // the transform should be a child of the camera
float screenW = foregroundCamera.pixelWidth;
float screenH = foregroundCamera.pixelHeight;
if (backgroundImage)
{
PortraitBackground portraitBack = backgroundImage.gameObject.GetComponent<PortraitBackground>();
if (portraitBack != null)
{
Rect backRect = portraitBack.GetBackgroundRect();
screenW = backRect.width;
screenH = backRect.height;
}
}
Vector3 vLeft = foregroundCamera.ScreenToWorldPoint(new Vector3(0f, screenH / 2f, objectZ));
Vector3 vRight = foregroundCamera.ScreenToWorldPoint(new Vector3(screenW, screenH / 2f, objectZ));
float distLeftRight = (vRight - vLeft).magnitude;
Vector3 vBottom = foregroundCamera.ScreenToWorldPoint(new Vector3(screenW / 2f, 0f, objectZ));
Vector3 vTop = foregroundCamera.ScreenToWorldPoint(new Vector3(screenW / 2f, screenH, objectZ));
float distBottomTop = (vTop - vBottom).magnitude;
localScale.x = distLeftRight / initialScale.x;
localScale.y = distBottomTop / initialScale.y;
//Debug.Log("ForegroundRenderer scale: " + localScale + ", screenW: " + screenW + ", screenH: " + screenH + ", objZ: " + objectZ +
// "\nleft: " + vLeft + ", right: " + vRight + ", bottom: " + vBottom + ", vTop: " + vTop +
// "\ndH: " + distLeftRight + ", dV: " + distBottomTop + ", initialScale: " + initialScale);
}
// scale according to color-tex resolution
//localScale.y = localScale.x * colorTex.height / colorTex.width;
// apply color image scale
Vector3 colorImageScale = kinectManager.GetColorImageScale(backgroundRemovalManager.sensorIndex);
if (colorImageScale.x < 0f)
localScale.x = -localScale.x;
if (colorImageScale.y < 0f)
localScale.y = -localScale.y;
transform.localScale = localScale;
}
// centers the renderer's transform, according to the background image
private void CenterRendererTransform(Vector2 anchorPos, float curAnchorPos)
{
lastAnchorPos = curAnchorPos;
if (foregroundCamera && distToBackImage > 0f)
{
float objectZ = distToTransform; // transform.localPosition.z; // the transform should be a child of the camera
float screenW = sensorData.colorImageWidth; // foregroundCamera.pixelWidth;
float screenH = sensorData.colorImageHeight; // foregroundCamera.pixelHeight;
Vector2 screenCenter = new Vector2(screenW / 2f, screenH / 2f);
Vector2 anchorScaled = new Vector2(anchorPos.x * distToTransform / distToBackImage, anchorPos.y * distToTransform / distToBackImage);
Vector3 vCenter = foregroundCamera.ScreenToWorldPoint(new Vector3(screenCenter.x + anchorScaled.x, screenCenter.y + anchorScaled.y, objectZ));
transform.position = vCenter;
//Vector3 vLocalPos = transform.localPosition;
//string sLocalPos = string.Format("({0:F3}, {1:F3}, {2:F3})", vLocalPos.x, vLocalPos.y, vLocalPos.z);
//Debug.Log("ForegroundRenderer anchor: " + anchorPos + ", screenW: " + screenW + ", screenH: " + screenH + ", objZ: " + objectZ + ", localPos: " + sLocalPos);
}
}
}
}

11
Assets/Azure/KinectScripts/ForegroundBlendRenderer.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fd5b5453b69e67b43b1c57c4d95ccada
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

62
Assets/Azure/KinectScripts/ForegroundToRawImage.cs

@ -0,0 +1,62 @@
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// ForegroundToRawImage sets the texture of the RawImage-component to be the BRM's foreground texture.
/// </summary>
public class ForegroundToRawImage : MonoBehaviour
{
private RawImage rawImage;
private KinectManager kinectManager = null;
private BackgroundRemovalManager backManager = null;
void Start()
{
rawImage = GetComponent<RawImage>();
kinectManager = KinectManager.Instance;
backManager = FindObjectOfType<BackgroundRemovalManager>();
}
void Update()
{
if (rawImage && rawImage.texture == null)
{
if (kinectManager && backManager && backManager.enabled /**&& backManager.IsBackgroundRemovalInitialized()*/)
{
rawImage.texture = backManager.GetForegroundTex(); // user's foreground texture
rawImage.rectTransform.localScale = kinectManager.GetColorImageScale(backManager.sensorIndex);
rawImage.color = Color.white;
}
}
//else if (rawImage && rawImage.texture != null)
//{
// if (KinectManager.Instance == null)
// {
// rawImage.texture = null;
// rawImage.color = Color.clear;
// }
//}
}
void OnApplicationPause(bool isPaused)
{
// fix for app pause & restore (UWP)
if (isPaused && rawImage && rawImage.texture != null)
{
rawImage.texture = null;
rawImage.color = Color.clear;
}
}
}
}

12
Assets/Azure/KinectScripts/ForegroundToRawImage.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 0ca948b232bd1ce48b254bb84048cbab
timeCreated: 1505645515
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

80
Assets/Azure/KinectScripts/ForegroundToRenderer.cs

@ -0,0 +1,80 @@
using UnityEngine;
using System.Collections;
using com.rfilkov.kinect;
namespace com.rfilkov.components
{
/// <summary>
/// ForegroundToRenderer sets the texture of the Renderer-component to be the BRM's foreground texture.
/// </summary>
public class ForegroundToRenderer : MonoBehaviour
{
[Tooltip("Reference to background removal manager. If left to None, it looks up the first available BR-manager in the scene.")]
public BackgroundRemovalManager backgroundRemovalManager = null;
// component references
private Renderer thisRenderer = null;
private KinectManager kinectManager = null;
void Start()
{
thisRenderer = GetComponent<Renderer>();
kinectManager = KinectManager.Instance;
if(backgroundRemovalManager == null)
{
backgroundRemovalManager = FindObjectOfType<BackgroundRemovalManager>();
}
if (kinectManager && kinectManager.IsInitialized() && backgroundRemovalManager && backgroundRemovalManager.enabled)
{
Vector3 localScale = transform.localScale;
localScale.z = localScale.x * kinectManager.GetColorImageHeight(backgroundRemovalManager.sensorIndex) / kinectManager.GetColorImageWidth(backgroundRemovalManager.sensorIndex);
//localScale.x = -localScale.x;
// apply color image scale
Vector3 colorImageScale = kinectManager.GetColorImageScale(backgroundRemovalManager.sensorIndex);
if (colorImageScale.x > 0f)
localScale.x = -localScale.x;
if (colorImageScale.y > 0f)
localScale.z = -localScale.z;
transform.localScale = localScale;
}
}
void Update()
{
if (thisRenderer && thisRenderer.material.mainTexture == null)
{
if (kinectManager && backgroundRemovalManager && backgroundRemovalManager.enabled /**&& backManager.IsBackgroundRemovalInitialized()*/)
{
thisRenderer.material.mainTexture = backgroundRemovalManager.GetForegroundTex();
//Debug.Log("BR-manager: " + backroundRemovalManager + ", user index: " + backroundRemovalManager.playerIndex);
}
}
//else if (thisRenderer && thisRenderer.material.mainTexture != null)
//{
// if (KinectManager.Instance == null)
// {
// thisRenderer.sharedMaterial.mainTexture = null;
// }
//}
}
void OnApplicationPause(bool isPaused)
{
// fix for app pause & restore (UWP)
if (isPaused && thisRenderer && thisRenderer.material.mainTexture != null)
{
thisRenderer.sharedMaterial.mainTexture = null;
}
}
}
}

12
Assets/Azure/KinectScripts/ForegroundToRenderer.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 1516a5d945302e546a218c0c7a3f048c
timeCreated: 1478884175
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

184
Assets/Azure/KinectScripts/FragmentLighting.cs

@ -0,0 +1,184 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.components
{
public class FragmentLighting
{
// lighting structures
private Light[] sceneLights = null;
private Bounds lightBounds;
private Vector4[] dirLightData = new Vector4[2];
[SerializeField]
public struct PointLight
{
public Vector4 color;
public float range;
public Vector3 pos;
}
private const int SIZE_POINT_LIGHT = 32;
private const int MAX_POINT_LIGHTS = 8;
[SerializeField]
private PointLight[] pointLights = new PointLight[MAX_POINT_LIGHTS];
private ComputeBuffer pointLightsBuffer = null;
private int pointLightsNumber = 0;
[SerializeField]
public struct SpotLight
{
public Vector4 color;
public Vector3 pos;
public Vector4 dir;
public Vector4 pars;
}
private const int SIZE_SPOT_LIGHT = 60;
private const int MAX_SPOT_LIGHTS = 8;
[SerializeField]
public SpotLight[] spotLights = new SpotLight[MAX_SPOT_LIGHTS];
private ComputeBuffer spotLightsBuffer = null;
private int spotLightsNumber = 0;
const int NUMBER_LIGHTS_MAX = MAX_POINT_LIGHTS / 2 + MAX_SPOT_LIGHTS / 2;
/// <summary>
/// Sets the scene lights and lighted volume bounds.
/// </summary>
public void SetLightsAndBounds(Light[] sceneLights, Vector3 centerPos, Vector3 sizeBounds)
{
this.sceneLights = sceneLights;
this.lightBounds = new Bounds(centerPos, sizeBounds);
}
/// <summary>
/// Releases the used native resources.
/// </summary>
public void ReleaseResources()
{
if (pointLightsBuffer != null)
{
pointLightsBuffer.Release();
pointLightsBuffer = null;
}
if (spotLightsBuffer != null)
{
spotLightsBuffer.Release();
spotLightsBuffer = null;
}
}
/// <summary>
/// Updates the lighting parameters of the material.
/// </summary>
public void UpdateLighting(Material matRenderer, bool bApplyLighting)
{
matRenderer.SetInt("_ApplyLights", bApplyLighting ? 1 : 0);
matRenderer.SetInt("_ApplyShadows", 0);
matRenderer.SetFloat("_Metallic", 0);
ApplyLighting(matRenderer, bApplyLighting);
}
// applies the current lights
private void ApplyLighting(Material matRenderer, bool bApplyLighting)
{
const float interiorCone = 0.1f; // interior cone of the spotlight
dirLightData[1] = Vector4.zero;
int pi = 0;
int si = 0;
if(bApplyLighting)
{
foreach (Light light in sceneLights)
{
if (!light.gameObject.activeInHierarchy || !light.enabled)
continue;
if (light.type == LightType.Directional || Vector3.Distance(lightBounds.center, light.transform.position) < (light.range + lightBounds.extents.x))
{
if (light.type != LightType.Directional && light.shadows != LightShadows.None)
{
light.shadows = LightShadows.None;
}
if (light.type == LightType.Point)
{
if (pi < MAX_POINT_LIGHTS)
{
pointLights[pi].color = light.color * light.intensity;
pointLights[pi].pos = light.gameObject.transform.position;
pointLights[pi].range = light.range;
pi++;
}
}
else if (light.type == LightType.Spot)
{
if (si < MAX_SPOT_LIGHTS)
{
Vector3 vLightFwd = light.gameObject.transform.forward.normalized;
spotLights[si].color = light.color * light.intensity;
spotLights[si].pos = light.gameObject.transform.position;
spotLights[si].dir = new Vector4(vLightFwd.x, vLightFwd.y, vLightFwd.z, Mathf.Cos((light.spotAngle / 2.0f) * Mathf.Deg2Rad));
spotLights[si].pars = new Vector4(light.spotAngle, light.intensity, 1.0f / light.range, interiorCone);
si++;
}
}
else if (light.type == LightType.Directional)
{
Vector3 vLightFwd = light.gameObject.transform.forward.normalized;
dirLightData[0] = new Vector4(vLightFwd.x, vLightFwd.y, vLightFwd.z, 0);
dirLightData[1] = light.color * light.intensity;
}
}
}
}
if (pointLightsBuffer == null)
{
pointLightsBuffer = new ComputeBuffer(MAX_POINT_LIGHTS, SIZE_POINT_LIGHT);
pointLightsBuffer.SetData(pointLights);
matRenderer.SetBuffer("_PointLights", pointLightsBuffer);
}
else
{
pointLightsBuffer.SetData(pointLights);
}
if (spotLightsBuffer == null)
{
spotLightsBuffer = new ComputeBuffer(MAX_SPOT_LIGHTS, SIZE_SPOT_LIGHT);
spotLightsBuffer.SetData(spotLights);
matRenderer.SetBuffer("_SpotLights", spotLightsBuffer);
}
else
{
spotLightsBuffer.SetData(spotLights);
}
pointLightsNumber = pi;
spotLightsNumber = si;
matRenderer.SetInt("_PointLightsNumber", pointLightsNumber);
matRenderer.SetInt("_SpotLightsNumber", spotLightsNumber);
matRenderer.SetVectorArray("_DirectionalLights", dirLightData);
}
}
}

11
Assets/Azure/KinectScripts/FragmentLighting.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fe7f7748efd77d2498b7b6f26646756d
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

184
Assets/Azure/KinectScripts/HmdHeadMover.cs

@ -0,0 +1,184 @@
using UnityEngine;
using System.Collections;
namespace com.rfilkov.components
{
/// <summary>
/// HmdHeadMover moves the avatar model, according to the camera position reported by the HMD tracker.
/// Don't forget to enable the 'External root motion'-setting of the AvatarController-component in this case.
/// </summary>
public class HmdHeadMover : MonoBehaviour
{
[Tooltip("The transform that needs to be followed by the avatar's head, usually the eye-camera position reported by the HMD tracker. When left empty, it defaults to the main camera's position.")]
public Transform targetTransform;
[Tooltip("The transform of the avatar's head. When left empty, it defaults to the head position, as reported by Animator-component.")]
private Transform headTransform;
[Tooltip("Whether the avatar's feet must stick to the ground.")]
public bool groundedFeet = false;
[Tooltip("The transform of the avatar's left toes, if grounding is enabled.")]
private Transform leftToes;
[Tooltip("The transform of the avatar's right toes, if grounding is enabled.")]
private Transform rightToes;
// grounder constants and variables
//private const int raycastLayers = ~2; // Ignore Raycast
private const float maxFootDistanceGround = 0.02f; // maximum distance from lower foot to the ground
private const float maxFootDistanceTime = 0.02f; // 0.2f; // maximum allowed time, the lower foot to be distant from the ground
//private Transform leftFoot, rightFoot;
private float fFootDistanceInitial = 0f;
private float fFootDistance = 0f;
private float fFootDistanceTime = 0f;
void Start()
{
// if the target transform is not set, use the camera transform
if (targetTransform == null && Camera.main != null)
{
targetTransform = Camera.main.transform;
}
}
void LateUpdate()
{
// move the head and body to the target
MoveHeadToTarget();
}
// moves the avatar's head to the target, and the rest of its body too
private void MoveHeadToTarget()
{
if (headTransform == null)
{
Animator animatorComponent = GetComponent<Animator>();
headTransform = animatorComponent ? animatorComponent.GetBoneTransform(HumanBodyBones.Head) : null;
}
if (!targetTransform || !headTransform)
return;
Transform trans = headTransform.transform;
Vector3 posTrans = targetTransform.position;
while (trans.parent != null)
{
Transform transParent = trans.parent;
Vector3 dirParent = transParent.position - trans.position;
posTrans += dirParent;
trans = transParent;
}
if (groundedFeet)
{
// keep the current correction
float fLastTgtY = posTrans.y;
posTrans.y += fFootDistance;
float fNewDistance = GetDistanceToGround();
float fNewDistanceTime = Time.time;
// Debug.Log(string.Format("PosY: {0:F2}, LastY: {1:F2}, TgrY: {2:F2}, NewDist: {3:F2}, Corr: {4:F2}, Time: {5:F2}", bodyRoot != null ? bodyRoot.position.y : transform.position.y,
// fLastTgtY, targetPos.y, fNewDistance, fFootDistance, fNewDistanceTime));
if (Mathf.Abs(fNewDistance) >= 0.01f && Mathf.Abs(fNewDistance - fFootDistanceInitial) >= maxFootDistanceGround)
{
if ((fNewDistanceTime - fFootDistanceTime) >= maxFootDistanceTime)
{
fFootDistance += (fNewDistance - fFootDistanceInitial);
fFootDistanceTime = fNewDistanceTime;
posTrans.y = fLastTgtY + fFootDistance;
// Debug.Log(string.Format(" >> change({0:F2})! - Corr: {1:F2}, LastY: {2:F2}, TgrY: {3:F2} at time {4:F2}",
// (fNewDistance - fFootDistanceInitial), fFootDistance, fLastTgtY, targetPos.y, fFootDistanceTime));
}
}
else
{
fFootDistanceTime = fNewDistanceTime;
}
}
// set root transform position
if (trans)
{
trans.position = posTrans;
}
// Vector3 posDiff = targetTransform.position - headTransform.position;
// transform.position += posDiff;
//Debug.Log("PosTrans: " + posTrans + ", Transofrm: " + transform.position);
}
// returns the lower distance distance from left or right foot to the ground, or 1000f if no LF/RF transforms are found
private float GetDistanceToGround()
{
if (leftToes == null && rightToes == null)
{
Animator animatorComponent = GetComponent<Animator>();
if (animatorComponent)
{
leftToes = animatorComponent.GetBoneTransform(HumanBodyBones.LeftToes);
rightToes = animatorComponent.GetBoneTransform(HumanBodyBones.RightToes);
}
}
float fDistMin = 1000f;
float fDistLeft = leftToes ? GetTransformDistanceToGround(leftToes) : fDistMin;
float fDistRight = rightToes ? GetTransformDistanceToGround(rightToes) : fDistMin;
fDistMin = Mathf.Abs(fDistLeft) < Mathf.Abs(fDistRight) ? fDistLeft : fDistRight;
if (fDistMin == 1000f)
{
fDistMin = 0f; // fFootDistanceInitial;
}
// Debug.Log (string.Format ("LFootY: {0:F2}, Dist: {1:F2}, RFootY: {2:F2}, Dist: {3:F2}, Min: {4:F2}", leftToes ? leftToes.position.y : 0f, fDistLeft,
// rightToes ? rightToes.position.y : 0f, fDistRight, fDistMin));
return fDistMin;
}
// returns distance from the given transform to the underlying object.
private float GetTransformDistanceToGround(Transform trans)
{
if (!trans)
return 0f;
// RaycastHit hit;
// if(Physics.Raycast(trans.position, Vector3.down, out hit, 2f, raycastLayers))
// {
// return -hit.distance;
// }
// else if(Physics.Raycast(trans.position, Vector3.up, out hit, 2f, raycastLayers))
// {
// return hit.distance;
// }
// else
// {
// if (trans.position.y < 0)
// return -trans.position.y;
// else
// return 1000f;
// }
return -trans.position.y;
}
}
}

12
Assets/Azure/KinectScripts/HmdHeadMover.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7dcb1ccbe48396140817168afcabd300
timeCreated: 1505201861
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

390
Assets/Azure/KinectScripts/InteractionInputModule.cs

@ -0,0 +1,390 @@
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.Serialization;
using UnityEngine.EventSystems;
namespace com.rfilkov.components
{
/// <summary>
/// InteractionInputModule is the input module that can be used as component of the Unity-UI EventSystem.
/// </summary>
public class InteractionInputModule : PointerInputModule, InteractionListenerInterface
{
[Tooltip("Index of the player, tracked by the respective InteractionManager. 0 means the 1st player, 1 - the 2nd one, 2 - the 3rd one, etc.")]
public int playerIndex = 0;
[Tooltip("Whether the left hand interaction is allowed by the respective InteractionManager.")]
public bool leftHandInteraction = true;
[Tooltip("Whether the right hand interaction is allowed by the respective InteractionManager.")]
public bool rightHandInteraction = true;
[Tooltip("Whether to process the hand cursor movements (i.e for hovering ui-elements), or not.")]
public bool processCursorMovement = false;
//private bool m_isLeftHand = false;
private bool m_leftHandGrip = false;
private bool m_rightHandGrip = false;
private Vector3 m_handCursorPos = Vector3.zero;
private Vector2 m_lastCursorPos = Vector2.zero;
private PointerEventData.FramePressState m_framePressState = PointerEventData.FramePressState.NotChanged;
private readonly MouseState m_MouseState = new MouseState();
// interaction manager for the same player
private InteractionManager intManager;
// The single instance of InteractionInputModule
//private static InteractionInputModule instance;
///// <summary>
///// Gets the single InteractionInputModule instance.
///// </summary>
///// <value>The InteractionInputModule instance.</value>
//public static InteractionInputModule Instance
//{
// get
// {
// return instance;
// }
//}
//protected InteractionInputModule()
//{
// instance = this;
//}
protected override void Awake()
{
base.Awake();
intManager = InteractionManager.GetInstance(playerIndex, leftHandInteraction, rightHandInteraction);
}
[SerializeField]
[FormerlySerializedAs("m_AllowActivationOnMobileDevice")]
private bool m_ForceModuleActive;
public bool forceModuleActive
{
get { return m_ForceModuleActive; }
set { m_ForceModuleActive = value; }
}
public override bool IsModuleSupported()
{
return m_ForceModuleActive || intManager != null;
}
public override bool ShouldActivateModule()
{
if (!base.ShouldActivateModule())
return false;
if (intManager == null)
{
intManager = InteractionManager.GetInstance(playerIndex, leftHandInteraction, rightHandInteraction);
}
//bool shouldActivate |= (InteractionManager.Instance != null && InteractionManager.Instance.IsInteractionInited());
bool shouldActivate = m_ForceModuleActive || (m_framePressState != PointerEventData.FramePressState.NotChanged);
if (!shouldActivate && processCursorMovement && intManager &&
(intManager.IsLeftHandPrimary() || intManager.IsRightHandPrimary()))
{
bool bIsLeftHand = intManager.IsLeftHandPrimary();
// check for cursor pos change
Vector2 handCursorPos = bIsLeftHand ? intManager.GetLeftHandScreenPos() : intManager.GetRightHandScreenPos();
if (handCursorPos != m_lastCursorPos)
{
m_lastCursorPos = handCursorPos;
shouldActivate = true;
}
}
return shouldActivate;
}
// public override void ActivateModule()
// {
// base.ActivateModule();
//
// var toSelect = eventSystem.currentSelectedGameObject;
// if (toSelect == null)
// toSelect = eventSystem.firstSelectedGameObject;
//
// eventSystem.SetSelectedGameObject(toSelect, GetBaseEventData());
// }
// public override void DeactivateModule()
// {
// base.DeactivateModule();
// ClearSelection();
// }
public override void Process()
{
if (intManager == null)
{
intManager = InteractionManager.GetInstance(playerIndex, leftHandInteraction, rightHandInteraction);
}
CheckGrippedCursorPosition();
ProcessInteractionEvent();
}
private void CheckGrippedCursorPosition()
{
if (intManager)
{
bool bIsLeftHand = intManager.IsLeftHandPrimary();
// check for gripped hand
bool bHandGrip = bIsLeftHand ? m_leftHandGrip : m_rightHandGrip;
// check for cursor pos change
Vector2 handCursorPos = bIsLeftHand ? intManager.GetLeftHandScreenPos() : intManager.GetRightHandScreenPos();
if (bHandGrip && handCursorPos != (Vector2)m_handCursorPos)
{
// emulate new press
m_framePressState = PointerEventData.FramePressState.Pressed;
m_handCursorPos = handCursorPos;
}
else if (processCursorMovement)
{
m_handCursorPos = handCursorPos;
}
}
}
protected void ProcessInteractionEvent()
{
// Emulate mouse data
var mouseData = GetMousePointerEventData(0);
var leftButtonData = mouseData.GetButtonState(PointerEventData.InputButton.Left).eventData;
// Process the interaction data
ProcessHandPressRelease(leftButtonData);
ProcessMove(leftButtonData.buttonData);
ProcessDrag(leftButtonData.buttonData);
}
protected override MouseState GetMousePointerEventData(int id)
{
// Populate the left button...
PointerEventData leftData;
var created = GetPointerData(kMouseLeftId, out leftData, true);
leftData.Reset();
Vector2 handPos = new Vector2(m_handCursorPos.x * Screen.width, m_handCursorPos.y * Screen.height);
if (created)
{
leftData.position = handPos;
}
leftData.delta = handPos - leftData.position;
leftData.position = handPos;
//leftData.scrollDelta = 0f;
leftData.button = PointerEventData.InputButton.Left;
eventSystem.RaycastAll(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, m_framePressState, leftData);
m_framePressState = PointerEventData.FramePressState.NotChanged;
return m_MouseState;
}
/// <summary>
/// Process the current hand press or release event.
/// </summary>
protected void ProcessHandPressRelease(MouseButtonEventData data)
{
var pointerEvent = data.buttonData;
var currentOverGo = pointerEvent.pointerCurrentRaycast.gameObject;
// PointerDown notification
if (data.PressedThisFrame())
{
pointerEvent.eligibleForClick = true;
pointerEvent.delta = Vector2.zero;
pointerEvent.dragging = false;
pointerEvent.useDragThreshold = true;
pointerEvent.pressPosition = pointerEvent.position;
pointerEvent.pointerPressRaycast = pointerEvent.pointerCurrentRaycast;
DeselectIfSelectionChanged(currentOverGo, pointerEvent);
// search for the control that will receive the press
// if we can't find a press handler set the press
// handler to be what would receive a click.
var newPressed = ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.pointerDownHandler);
// didnt find a press handler... search for a click handler
if (newPressed == null)
newPressed = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
//Debug.Log("Pressed: " + newPressed);
float time = Time.unscaledTime;
if (newPressed == pointerEvent.lastPress)
{
var diffTime = time - pointerEvent.clickTime;
if (diffTime < 0.3f)
++pointerEvent.clickCount;
else
pointerEvent.clickCount = 1;
pointerEvent.clickTime = time;
}
else
{
pointerEvent.clickCount = 1;
}
pointerEvent.pointerPress = newPressed;
pointerEvent.rawPointerPress = currentOverGo;
pointerEvent.clickTime = time;
// Save the drag handler as well
pointerEvent.pointerDrag = ExecuteEvents.GetEventHandler<IDragHandler>(currentOverGo);
if (pointerEvent.pointerDrag != null)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.initializePotentialDrag);
}
// PointerUp notification
if (data.ReleasedThisFrame())
{
// Debug.Log("Executing pressup on: " + pointer.pointerPress);
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
// see if we mouse up on the same element that we clicked on...
var pointerUpHandler = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// PointerClick and Drop events
if (pointerEvent.pointerPress != null && pointerEvent.pointerPress == pointerUpHandler && pointerEvent.eligibleForClick)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerClickHandler);
}
else if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
{
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.dropHandler);
}
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.endDragHandler);
pointerEvent.dragging = false;
pointerEvent.pointerDrag = null;
// redo pointer enter / exit to refresh state
// so that if we moused over somethign that ignored it before
// due to having pressed on something else
// it now gets it.
if (currentOverGo != pointerEvent.pointerEnter)
{
HandlePointerExitAndEnter(pointerEvent, null);
HandlePointerExitAndEnter(pointerEvent, currentOverGo);
}
}
}
public void HandGripDetected(ulong userId, int userIndex, bool isRightHand, bool isHandInteracting, Vector3 handScreenPos)
{
if (userIndex != playerIndex || !isHandInteracting)
return;
bool bHandValid = (leftHandInteraction && !isRightHand) || (rightHandInteraction && isRightHand);
if (!bHandValid)
return;
//Debug.Log("HandGripDetected");
m_framePressState = PointerEventData.FramePressState.Pressed;
//m_isLeftHand = !isRightHand;
m_handCursorPos = handScreenPos;
if (!isRightHand)
m_leftHandGrip = true;
else
m_rightHandGrip = true;
}
public void HandReleaseDetected(ulong userId, int userIndex, bool isRightHand, bool isHandInteracting, Vector3 handScreenPos)
{
if (userIndex != playerIndex || !isHandInteracting)
return;
bool bHandValid = (leftHandInteraction && !isRightHand) || (rightHandInteraction && isRightHand);
if (!bHandValid)
return;
//Debug.Log("HandReleaseDetected");
m_framePressState = PointerEventData.FramePressState.Released;
//m_isLeftHand = !isRightHand;
m_handCursorPos = handScreenPos;
if (!isRightHand)
m_leftHandGrip = false;
else
m_rightHandGrip = false;
}
public bool HandClickDetected(ulong userId, int userIndex, bool isRightHand, Vector3 handScreenPos)
{
if (userIndex != playerIndex)
return false;
bool bHandValid = (leftHandInteraction && !isRightHand) || (rightHandInteraction && isRightHand);
if (!bHandValid)
return false;
//Debug.Log("HandClickDetected");
StartCoroutine(EmulateMouseClick(isRightHand, handScreenPos));
return true;
}
private IEnumerator EmulateMouseClick(bool isRightHand, Vector3 handScreenPos)
{
m_framePressState = PointerEventData.FramePressState.Pressed;
//m_isLeftHand = !isRightHand;
m_handCursorPos = handScreenPos;
yield return new WaitForSeconds(0.2f);
m_framePressState = PointerEventData.FramePressState.Released;
//m_isLeftHand = !isRightHand;
m_handCursorPos = handScreenPos;
yield return null;
}
}
}

12
Assets/Azure/KinectScripts/InteractionInputModule.cs.meta

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 243c3bcf7560250429841bbe09055f45
timeCreated: 1475044533
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

1123
Assets/Azure/KinectScripts/InteractionManager.cs

File diff suppressed because it is too large

8
Assets/Azure/KinectScripts/InteractionManager.cs.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a669d15f0035bbf4889c8092b5bcd201
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

8
Assets/Azure/KinectScripts/Interfaces.meta

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 9170a0cec4179764c9b1bf2a9a9c38f6
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

3689
Assets/Azure/KinectScripts/Interfaces/DepthSensorBase.cs

File diff suppressed because it is too large

11
Assets/Azure/KinectScripts/Interfaces/DepthSensorBase.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73b91e3d1fd43b147958460a9f767c75
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

59
Assets/Azure/KinectScripts/Interfaces/DepthSensorDescriptor.cs

@ -0,0 +1,59 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// Descriptor of the sensor interface.
/// </summary>
[System.Serializable]
public class DepthSensorDescriptor
{
/// <summary>
/// Sensor type.
/// </summary>
public string sensorType;
/// <summary>
/// Full path to the sensor interface.
/// </summary>
public string sensorInterface;
/// <summary>
/// Settings of the sensor interface.
/// </summary>
public string sensorIntSettings;
/// <summary>
/// Sensor interface version.
/// </summary>
public string sensorIntVersion;
/// <summary>
/// Transform position.
/// </summary>
public Vector3 transformPos;
/// <summary>
/// Transform rotation.
/// </summary>
public Vector3 transformRot;
/// <summary>
/// Full class path to the depth predictor.
/// </summary>
public string depthPredictor;
/// <summary>
/// Full class path to the body tracking predictor.
/// </summary>
public string bodyTrackingPredictor;
/// <summary>
/// Full class path to the body segmentation predictor.
/// </summary>
public string bodySegmentationPredictor;
}
}

11
Assets/Azure/KinectScripts/Interfaces/DepthSensorDescriptor.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f862c1359c67d47e4a77a4b5c762744b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

168
Assets/Azure/KinectScripts/Interfaces/DepthSensorInterface.cs

@ -0,0 +1,168 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
namespace com.rfilkov.kinect
{
/// <summary>
/// DepthSensorInterface is the template for all sensor-interface implementations.
/// </summary>
public interface DepthSensorInterface
{
// returns the depth sensor platform
KinectInterop.DepthSensorPlatform GetSensorPlatform();
// returns the device-id of the currently opened sensor
string GetSensorDeviceId();
// returns the type of sensor interface settings
System.Type GetSensorSettingsType();
// returns sensor interface settings
DepthSensorBase.BaseSensorSettings GetSensorSettings(DepthSensorBase.BaseSensorSettings settings);
// sets sensor interface settings
void SetSensorSettings(DepthSensorBase.BaseSensorSettings settings);
// returns the list of available sensors, controlled by this sensor interface
List<KinectInterop.SensorDeviceInfo> GetAvailableSensors();
// opens the given sensor and inits needed resources. returns new sensor-data object
KinectInterop.SensorData OpenSensor(KinectManager kinectManager, KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth);
// closes the sensor and frees used resources
void CloseSensor(KinectInterop.SensorData sensorData);
// enables or disables the pose stream
void EnablePoseStream(KinectInterop.SensorData sensorData, bool bEnable);
// enables or disables synchronization of frames between the master & sub sensors
bool EnableSensorSync(KinectInterop.SensorData sensorData, bool bEnable);
// returns true if the sensor is master, false if it's standalone or subordinate
bool IsSensorMaster();
// checks if the given sensor frame timestamp is synched with the master or not
bool IsSensorFrameSynched(ulong frameTime, ulong masterTime);
// set minimum & maximum infrared values, used in IR texture generation
void SetMinMaxInfraredValues(float minValue, float maxValue);
// initializes the secondary sensor data, after sensor initialization
void InitSensorData(KinectInterop.SensorData sensorData, KinectManager kinectManager);
// checks whether the sensor data is valid. can wait for valid data, as in case of the net-interface
bool IsSensorDataValid();
// returns the body tracker orientation angle (Z-angle), in degrees
float GetBodyTrackerOrientationAngle();
// polls data frames in the sensor-specific thread
void PollSensorFrames(KinectInterop.SensorData sensorData);
// polls coordinate transformation frames and data in the sensor-specific thread
void PollCoordTransformFrames(KinectInterop.SensorData sensorData);
// post-processes the sensor data after polling
void PollSensorFrameTimes(KinectInterop.SensorData sensorData);
// updates sensor data, if needed
// returns true if update is successful, false otherwise
bool UpdateSensorData(KinectInterop.SensorData sensorData, KinectManager kinectManager, bool isPlayMode);
// updates transformed frame textures, if needed
// returns true if update is successful, false otherwise
bool UpdateTransformedFrameTextures(KinectInterop.SensorData sensorData, KinectManager kinectManager);
// updates the selected sensor textures, if needed
// returns true if update is successful, false otherwise
bool UpdateSensorTextures(KinectInterop.SensorData sensorData, KinectManager kinectManager, ulong prevDepthFrameTime, ulong prevIrFrameTime);
// returns sensor transform. Please note transform updates depend on the getPoseFrames-KM setting.
Transform GetSensorTransform();
// returns depth-to-color-camera matrix
Matrix4x4 GetDepthToColorCameraMatrix();
// returns sensor-to-world matrix
Matrix4x4 GetSensorToWorldMatrix();
// sets sensor-to-world matrix
void SetSensorToWorldMatrix(Vector3 sensorWorldPosition, Quaternion sensorWorldRotation, bool isUpdateTransform);
// sets sensor-to-world matrix
void SetSensorToWorldMatrix(Matrix4x4 mSensor2World, bool isUpdateTransform);
// returns the depth camera space table
Vector3[] GetDepthCameraSpaceTable(KinectInterop.SensorData sensorData);
// returns the color camera space table
Vector3[] GetColorCameraSpaceTable(KinectInterop.SensorData sensorData);
// returns depth camera space coordinates for the given depth image point
Vector3 MapDepthPointToSpaceCoords(KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal);
// returns depth image coordinates for the given depth camera space point
Vector2 MapSpacePointToDepthCoords(KinectInterop.SensorData sensorData, Vector3 spacePos);
// returns color camera space coordinates for the given color image point
Vector3 MapColorPointToSpaceCoords(KinectInterop.SensorData sensorData, Vector2 colorPos, ushort depthVal);
// returns color image coordinates for the given color camera space point
Vector2 MapSpacePointToColorCoords(KinectInterop.SensorData sensorData, Vector3 spacePos);
// returns color image coordinates for the given depth image point
Vector2 MapDepthPointToColorCoords(KinectInterop.SensorData sensorData, Vector2 depthPos, ushort depthVal);
// returns depth image coordinates for the given color image point
Vector2 MapColorPointToDepthCoords(KinectInterop.SensorData sensorData, Vector2 colorPos, int minDist, int maxDist);
// returns the anchor position of the background raw image
Vector2 GetBackgroundImageAnchorPos(KinectInterop.SensorData sensorData);
// returns the resolution in pixels of the point-cloud textures
Vector2Int GetPointCloudTexResolution(KinectInterop.SensorData sensorData);
// returns the net-sensor-data for network exchange
KinectInterop.NetSensorData GetNetSensorData(KinectInterop.SensorData sensorData);
// sets the local sensor data from the network exchange data
void SetNetSensorData(KinectInterop.NetSensorData netSensorData, KinectInterop.SensorData sensorData, KinectManager kinectManager);
// returns the sensor pose data for network exchange
KinectInterop.NetPoseData GetSensorNetPoseData(KinectInterop.SensorData sensorData);
// sets the local sensor pose data from the network exchange data
void SetSensorNetPoseData(KinectInterop.NetPoseData netPoseData, KinectInterop.SensorData sensorData, KinectManager kinectManager);
// enables or disables depth camera color frame processing
void EnableDepthCameraColorFrame(KinectInterop.SensorData sensorData, bool isEnable);
// returns the latest depth camera color frame texture along with the last frame time
Texture GetDepthCameraColorFrameTexture(KinectInterop.SensorData sensorData, ref Texture2D copyToTex2D, ref ulong frameTime);
// enables or disables color camera depth frame processing
void EnableColorCameraDepthFrame(KinectInterop.SensorData sensorData, bool isEnable);
// returns the latest color camera depth frame along with the last frame time. the returned data is ushort array.
ushort[] GetColorCameraDepthFrame(KinectInterop.SensorData sensorData, ref ushort[] copyToFrame, ref ulong frameTime);
// returns the latest color camera depth frame along with the last frame time. the returned data frame is byte array.
byte[] GetColorCameraDepthFrameBytes(KinectInterop.SensorData sensorData, ref byte[] copyToFrame, ref ulong frameTime);
// enables or disables color camera infrared frame processing
void EnableColorCameraInfraredFrame(KinectInterop.SensorData sensorData, bool isEnableRawData, bool isEnableTexture);
// returns the latest color camera infrared frame along with the last frame time. the returned data is ushort array.
ushort[] GetColorCameraInfraredFrame(KinectInterop.SensorData sensorData, ref ushort[] copyToFrame, ref ulong frameTime);
// returns the latest color camera infrared frame texture along with the last frame time
Texture GetColorCameraInfraredFrameTexture(KinectInterop.SensorData sensorData, ref Texture2D copyToTex2D, ref ulong frameTime);
// enables or disables color camera body-index frame processing
void EnableColorCameraBodyIndexFrame(KinectInterop.SensorData sensorData, bool isEnable);
// returns the latest color camera body-index frame along with the last frame time
byte[] GetColorCameraBodyIndexFrame(KinectInterop.SensorData sensorData, ref byte[] copyToFrame, ref ulong frameTime);
}
}

11
Assets/Azure/KinectScripts/Interfaces/DepthSensorInterface.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 87d17d44f68821041bda50d9aa091b2a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

112
Assets/Azure/KinectScripts/Interfaces/DummyK4AInterface.cs

@ -0,0 +1,112 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
namespace com.rfilkov.kinect
{
/// <summary>
/// DummyK4AInterface is dummy sensor-interface to the Azure Kinect sensors.
/// </summary>
public class DummyK4AInterface : DepthSensorBase
{
public override KinectInterop.DepthSensorPlatform GetSensorPlatform()
{
return KinectInterop.DepthSensorPlatform.DummyK4A;
}
public override List<KinectInterop.SensorDeviceInfo> GetAvailableSensors()
{
List<KinectInterop.SensorDeviceInfo> alSensorInfo = new List<KinectInterop.SensorDeviceInfo>();
KinectInterop.SensorDeviceInfo sensorInfo = new KinectInterop.SensorDeviceInfo();
sensorInfo.sensorId = "DummyK4A";
sensorInfo.sensorName = "Dummy Kinect-for-Azure";
sensorInfo.sensorCaps = KinectInterop.FrameSource.TypeAll;
alSensorInfo.Add(sensorInfo);
return alSensorInfo;
}
public override KinectInterop.SensorData OpenSensor(KinectManager kinectManager, KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth)
{
// save initial parameters
base.OpenSensor(kinectManager, dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth);
List<KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors();
if (deviceIndex < 0 || deviceIndex >= alSensors.Count)
return null;
sensorDeviceId = alSensors[deviceIndex].sensorId;
sensorPlatform = KinectInterop.DepthSensorPlatform.DummyK4A;
KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
sensorData.sensorIntPlatform = sensorPlatform;
sensorData.sensorId = alSensors[deviceIndex].sensorId;
sensorData.sensorName = alSensors[deviceIndex].sensorName;
sensorData.sensorCaps = alSensors[deviceIndex].sensorCaps;
sensorData.colorImageWidth = 1920; // 1080p
sensorData.colorImageHeight = 1080;
sensorData.depthImageWidth = 640; // NFOV Unbinned
sensorData.depthImageHeight = 576;
sensorData.depthCamIntr = JsonUtility.FromJson<KinectInterop.CameraIntrinsics>(jsonDepthCamIntr);
sensorData.colorCamIntr = JsonUtility.FromJson<KinectInterop.CameraIntrinsics>(jsonColorCamIntr);
sensorData.depth2ColorExtr = JsonUtility.FromJson<KinectInterop.CameraExtrinsics>(jsonDepth2ColorExtr);
sensorData.color2DepthExtr = JsonUtility.FromJson<KinectInterop.CameraExtrinsics>(jsonColor2DepthExtr);
float[] r = sensorData.depth2ColorExtr.rotation;
float[] t = sensorData.depth2ColorExtr.translation;
depth2colorCamMat = new Matrix4x4(new Vector4(r[0], r[3], r[6], 0), new Vector4(r[1], r[4], r[7], 0), new Vector4(r[2], r[5], r[8], 0), new Vector4(t[0] * 0.001f, t[1] * 0.001f, t[2] * 0.001f, 1));
//Debug.Log("Depth2colorCamMat Pos: " + (Vector3)depth2colorCamMat.GetColumn(3) + ", Rot: " + depth2colorCamMat.rotation.eulerAngles);
// flip color & depth image vertically
sensorData.colorImageScale = new Vector3(-1f, -1f, 1f);
sensorData.depthImageScale = new Vector3(-1f, -1f, 1f);
sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f);
sensorData.sensorSpaceScale = new Vector3(-1f, -1f, 1f);
sensorData.unitToMeterFactor = 0.001f;
// depth camera offset & matrix z-flip
sensorRotOffset = Vector3.zero; // new Vector3(6f, 0f, 0f); // the depth camera is tilted 6 degrees downwards
sensorRotFlipZ = true;
sensorRotIgnoreY = true;
// color camera data & intrinsics
sensorData.colorImageFormat = TextureFormat.BGRA32;
sensorData.colorImageStride = 4; // 4 bytes per pixel
if(consoleLogMessages)
Debug.Log("D" + deviceIndex + " DummyK4A-sensor opened");
return sensorData;
}
public override void CloseSensor(KinectInterop.SensorData sensorData)
{
if (consoleLogMessages)
Debug.Log("D" + deviceIndex + " DummyK4A-sensor closed");
}
private const string jsonDepthCamIntr = "{ \"cameraType\": 0, \"width\": 640, \"height\": 576, \"ppx\": 319.3891296386719, \"ppy\": 339.0096435546875," +
"\"fx\": 505.0830078125, \"fy\": 505.2060546875, \"distType\": 4, \"distCoeffs\": [0.45811858773231509,-0.09587264806032181,-0.008291528560221196,0.7999407649040222,-0.01724848523736,-0.03864333778619766]," +
"\"codx\": 0.0, \"cody\": 0.0, \"p2\": -0.00007324512989725918, \"p1\": -0.00015797713422216475, \"maxRadius\": 0.0, \"hFOV\": 64.7133560180664, \"vFOV\": 59.371849060058597 }";
private const string jsonColorCamIntr = "{ \"cameraType\": 1, \"width\": 1920, \"height\": 1080, \"ppx\": 953.6868286132813, \"ppy\": 553.8844604492188," +
"\"fx\": 903.1810913085938, \"fy\": 903.4053955078125, \"distType\": 4, \"distCoeffs\": [0.8302328586578369,-2.98026442527771,1.6583690643310547,0.7071738839149475,-2.815004825592041,1.5919547080993653]," +
"\"codx\": 0.0, \"cody\": 0.0, \"p2\": -0.0001697207917459309, \"p1\": 0.0007688929326832295, \"maxRadius\": 0.0, \"hFOV\": 93.49346160888672, \"vFOV\": 61.73675537109375 }";
private const string jsonDepth2ColorExtr = "{ \"rotation\": [0.9999944567680359,0.003319731680676341,-0.00013891232083551586,-0.0032980330288410188," +
"0.9968001842498779,0.07986554503440857,0.00040359998820349574,-0.07986464351415634,0.9968056082725525]," +
"\"translation\": [-31.988178253173829,-2.296376943588257,4.040627956390381] }";
private const string jsonColor2DepthExtr = "{ \"rotation\": [1.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0]," +
"\"translation\": [0.0,0.0,0.0] }";
}
}

11
Assets/Azure/KinectScripts/Interfaces/DummyK4AInterface.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 71599eb36be4254469c6650b3f455914
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

1846
Assets/Azure/KinectScripts/Interfaces/Kinect2Interface.cs

File diff suppressed because it is too large

11
Assets/Azure/KinectScripts/Interfaces/Kinect2Interface.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3187c833c6106e54facb6071b57475a2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

2881
Assets/Azure/KinectScripts/Interfaces/Kinect4AzureInterface.cs

File diff suppressed because it is too large

11
Assets/Azure/KinectScripts/Interfaces/Kinect4AzureInterface.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e8f864d895d4d9c4cb2a9b96bb86c08b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

426
Assets/Azure/KinectScripts/Interfaces/Kinect4AzureSyncher.cs

@ -0,0 +1,426 @@
using Microsoft.Azure.Kinect.Sensor;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace com.rfilkov.kinect
{
/// <summary>
/// Kinect4AzureSyncher synhronizes the captures received from master and sub k4a-devices.
/// </summary>
public class Kinect4AzureSyncher
{
// data for each sensor
private class SyncherSensorData
{
public long expDelay;
public long capTimestamp;
public Capture capture;
public long pushCapTimestamp;
public Capture pushCapture;
public long btTimestamp;
}
// max allowed timestamp error
private const long MAX_TIME_ERROR = 100000; // 10000; // 1000;
// available sensor interfaces
private List<Kinect4AzureInterface> sensorInts = new List<Kinect4AzureInterface>();
private List<KinectInterop.SensorData> sensorDatas = new List<KinectInterop.SensorData>();
private List<long> expectedDelays = new List<long>();
// number of sensors and index of the master
private int numSensors = 0;
private int iMaster = -1; // index of the master interface
// master play time
private long masterPlayTime = 0;
private object masterPlayLock = new object();
// syncher sensor data
private SyncherSensorData[] syncherData = null;
// syncher lock object
private object syncherLock = new object();
public Kinect4AzureSyncher()
{
}
// initializes the syncher for the given sensor interface
public int StartSyncherForSensor(Kinect4AzureInterface sensorInt, KinectInterop.SensorData sensorData, bool isMaster, long expectedDelay)
{
if (sensorInt == null)
return -1;
int sensorIndex = numSensors;
sensorInts.Add(sensorInt);
sensorDatas.Add(sensorData);
expectedDelays.Add(expectedDelay);
numSensors++;
if(isMaster)
{
if (iMaster >= 0)
throw new Exception("Master index already set at " + iMaster + ". Current interface index is " + sensorIndex + ". Multiple masters are not supported.");
iMaster = sensorIndex;
}
//Debug.Log("Started syncher for sensor D" + sensorInt.deviceIndex + ", delay: " + expectedDelay + ", index: " + sensorIndex + ", master: " + iMaster);
return sensorIndex;
}
// releases the resources taken by the syncher data
public void StopSyncher()
{
lock(syncherLock)
{
for (int i = numSensors - 1; i >= 0; i--)
{
if (syncherData != null && syncherData[i] != null)
{
if (syncherData[i].capture != null)
{
syncherData[i].capture.Dispose();
syncherData[i].capture = null;
}
if (syncherData[i].pushCapture != null)
{
syncherData[i].pushCapture.Dispose();
syncherData[i].pushCapture = null;
}
}
}
}
//Debug.Log("Stopped syncher for " + numSensors + " sensors.");
}
// sets master device play time
public void SetMasterPlayTime(long playTime)
{
lock(masterPlayLock)
{
masterPlayTime = playTime;
}
}
// returns the latest master play time
public long GetMasterPlayTime()
{
long playTime = 0;
lock(masterPlayLock)
{
playTime = masterPlayTime;
}
return playTime;
}
// returns the master index, or -1 if no master is set
public int GetMasterIndex()
{
return iMaster;
}
// checks if the given frame time is synched or not
public bool IsSensorFrameSynched(int sensorIndex, ulong frameTime, ulong masterTime)
{
if(syncherData != null && sensorIndex >= 0 && sensorIndex < syncherData.Length)
{
long expTime = (long)masterTime + syncherData[sensorIndex].expDelay;
long subError = (long)frameTime - expTime;
if (frameTime != 0 && subError >= -MAX_TIME_ERROR && subError <= MAX_TIME_ERROR)
{
return true;
}
}
return false;
}
// updates the sensor capture
public void UpdateCapture(int sensorIndex, long capTimestamp, Capture capture)
{
if (capture == null)
return;
if (capTimestamp == 0)
{
//Debug.Log("Ignoring capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp);
capture.Dispose();
return;
}
lock (syncherLock)
{
if (syncherData == null || numSensors != syncherData.Length || syncherData[sensorIndex] == null)
{
CreateSyncherData(sensorIndex);
}
// dispose current capture
if (syncherData[sensorIndex].capture != null)
{
//Debug.Log("Disposing capture for syncher index " + sensorIndex + ". Timestamp: " + syncherData[sensorIndex].capTimestamp);
syncherData[sensorIndex].capture.Dispose();
syncherData[sensorIndex].capture = null;
}
// set new capture
//Debug.Log("Setting capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp);
syncherData[sensorIndex].capTimestamp = capTimestamp;
syncherData[sensorIndex].capture = capture;
// check for synched captures
bool bAllSynched = numSensors > 1 && iMaster >= 0 && syncherData[iMaster] != null && syncherData[iMaster].capTimestamp != 0;
if (bAllSynched)
{
long masterTime = syncherData[iMaster].capTimestamp;
for (int i = 0; i < numSensors; i++)
{
if (syncherData[i] == null || syncherData[i].capTimestamp == 0)
{
bAllSynched = false;
break;
}
long subTime = syncherData[i].capTimestamp;
long expTime = masterTime + syncherData[i].expDelay;
long subError = subTime - expTime;
if (i != iMaster && (subTime == 0 || subError < -MAX_TIME_ERROR || subError > MAX_TIME_ERROR))
{
bAllSynched = false;
break;
}
}
}
if (bAllSynched)
{
//Debug.Log("Synched captures. Index: " + sensorIndex + " MasterTime: " + syncherData[iMaster].capTimestamp);
// process synched sensor captures
for (int i = 0; i < numSensors; i++)
{
Kinect4AzureInterface sensorInt = sensorInts[i];
KinectInterop.SensorData sensorData = sensorDatas[i];
//Debug.Log(" Processing capture " + i + ". Timestamp: " + syncherData[i].capTimestamp);
sensorInt.ProcessSensorCapture(sensorData, syncherData[i].capture);
syncherData[i].capture = null;
}
}
else
{
//Debug.Log("Captures not synched. Index: " + sensorIndex + " ThisTime: " + syncherData[sensorIndex].capTimestamp +
// ", MasterTime: " + syncherData[iMaster].capTimestamp + ", diff: " + (syncherData[iMaster].capTimestamp - syncherData[sensorIndex].capTimestamp));
}
}
}
// updates the push bt-capture
public void UpdatePushBtCapture(int sensorIndex, long capTimestamp, Capture capture)
{
if (capture == null || capture.Depth == null)
return;
if (capTimestamp == 0)
{
//Debug.Log("Ignoring push-capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp);
capture.Dispose();
return;
}
lock (syncherLock)
{
if (syncherData == null || numSensors != syncherData.Length || syncherData[sensorIndex] == null)
{
CreateSyncherData(sensorIndex);
}
// dispose current capture
if (syncherData[sensorIndex].pushCapture != null)
{
//Debug.Log("Disposing push-capture for syncher index " + sensorIndex + ". Timestamp: " + syncherData[sensorIndex].pushCapTimestamp);
syncherData[sensorIndex].pushCapture.Dispose();
syncherData[sensorIndex].pushCapture = null;
}
// set new capture
//Debug.Log("Setting push-capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp);
syncherData[sensorIndex].pushCapTimestamp = capTimestamp;
syncherData[sensorIndex].pushCapture = capture;
// check for synched captures
bool bAllSynched = numSensors > 1 && iMaster >= 0 && syncherData[iMaster] != null && syncherData[iMaster].pushCapTimestamp != 0;
if (bAllSynched)
{
long masterTime = syncherData[iMaster].pushCapTimestamp;
for (int i = 0; i < numSensors; i++)
{
if (syncherData[i] == null || syncherData[i].pushCapTimestamp == 0)
{
bAllSynched = false;
break;
}
long subTime = syncherData[i].pushCapTimestamp;
long expTime = masterTime + syncherData[i].expDelay;
long subError = subTime - expTime;
if (i != iMaster && (subTime == 0 || subError < -MAX_TIME_ERROR || subError > MAX_TIME_ERROR))
{
bAllSynched = false;
break;
}
}
}
if (bAllSynched)
{
//Debug.Log("Synched push-captures. Index: " + sensorIndex + " MasterTime: " + syncherData[iMaster].pushCapTimestamp);
// process synched sensor captures
for (int i = 0; i < numSensors; i++)
{
Kinect4AzureInterface sensorInt = sensorInts[i];
KinectInterop.SensorData sensorData = sensorDatas[i];
//Debug.Log(" Processing push capture " + i + ". Timestamp: " + syncherData[i].pushCapTimestamp);
sensorInt.PushBodyFrame(sensorData, syncherData[i].pushCapture, true);
syncherData[i].pushCapture = null;
}
}
else
{
//Debug.Log("Push-captures not synched. Index: " + sensorIndex + " ThisTime: " + syncherData[sensorIndex].pushCapTimestamp +
// ", MasterTime: " + syncherData[iMaster].pushCapTimestamp + ", diff: " + (syncherData[iMaster].pushCapTimestamp - syncherData[sensorIndex].pushCapTimestamp));
}
}
}
// updates the body tracking frame
public void UpdateBtFrame(int sensorIndex, long frameTimestamp)
{
if(frameTimestamp == 0)
{
//Debug.Log("Ignoring bt-frame for syncher index " + sensorIndex + ". Timestamp: " + frameTimestamp);
return;
}
lock (syncherLock)
{
if(syncherData == null || numSensors != syncherData.Length || syncherData[sensorIndex] == null)
{
CreateSyncherData(sensorIndex);
}
// set new frame
//Debug.Log("Setting bt-frame for syncher index " + sensorIndex + ". Timestamp: " + frameTimestamp);
syncherData[sensorIndex].btTimestamp = frameTimestamp;
// check for synched body frames
bool bAllSynched = numSensors > 1 && iMaster >= 0 && syncherData[iMaster] != null && syncherData[iMaster].btTimestamp != 0;
if (bAllSynched)
{
long masterTime = syncherData[iMaster].btTimestamp;
for (int i = 0; i < numSensors; i++)
{
if(syncherData[i] == null || syncherData[i].btTimestamp == 0)
{
bAllSynched = false;
break;
}
long subTime = syncherData[i].btTimestamp;
long expTime = masterTime + syncherData[i].expDelay;
long subError = subTime - expTime;
if (i != iMaster && (subTime == 0 || subError < -MAX_TIME_ERROR || subError > MAX_TIME_ERROR))
{
bAllSynched = false;
break;
}
}
}
if (bAllSynched)
{
//Debug.Log("Synched bt-frames. Index: " + sensorIndex + " MasterTime: " + syncherData[iMaster].btTimestamp);
// process synched body frames
for (int i = 0; i < numSensors; i++)
{
Kinect4AzureInterface sensorInt = sensorInts[i];
KinectInterop.SensorData sensorData = sensorDatas[i];
//Debug.Log(" Processing bt-frame " + i + ". Timestamp: " + syncherData[i].btTimestamp);
sensorInt.ProcessBodyFrame(sensorData, IntPtr.Zero, true);
sensorInt.ProcessBtSensorCapture(sensorData);
}
}
else
{
//Debug.Log("Bt-frames not synched. Index: " + sensorIndex + " ThisTime: " + syncherData[sensorIndex].btTimestamp +
// ", MasterTime: " + syncherData[iMaster].btTimestamp + ", diff: " + (syncherData[iMaster].btTimestamp - syncherData[sensorIndex].btTimestamp));
}
}
}
// creates and returns syncher data, as needed
private SyncherSensorData CreateSyncherData(int sensorIndex)
{
if(syncherData == null || numSensors != syncherData.Length)
{
syncherData = new SyncherSensorData[numSensors];
}
if(syncherData[sensorIndex] == null)
{
syncherData[sensorIndex] = new SyncherSensorData();
syncherData[sensorIndex].expDelay = expectedDelays[sensorIndex];
syncherData[sensorIndex].capture = null;
syncherData[sensorIndex].pushCapture = null;
}
return syncherData[sensorIndex];
}
}
}

11
Assets/Azure/KinectScripts/Interfaces/Kinect4AzureSyncher.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a4978d3a33592e54f9bcb609ec842a27
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

628
Assets/Azure/KinectScripts/Interfaces/KinectFloorDetector.cs

@ -0,0 +1,628 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using com.rfilkov.kinect;
namespace com.rfilkov.kinect
{
/// <summary>
/// KinectFloorDetector is based on the 'Azure Kinect Floor Plane Detection Sample' that demonstrates one way to estimate the floor plane.
/// </summary>
public class KinectFloorDetector
{
/// <summary>
/// Smoothing factor used for sensor position and rotation update.
/// </summary>
public float smoothFactor = 5f;
// reference to the sensor data
private KinectInterop.SensorData sensorData = null;
private Vector3 spaceScale = Vector3.one;
private ulong lastDepthFrameTime = 0;
// IMU data
private Vector3 imuUpVector = Vector3.zero;
//private Transform imuVectorTrans = null;
// data buffers
private Vector3[] depth2SpaceTable = null;
private int depth2SpaceWidth = 0, depth2SpaceHeight = 0;
private float[] histMinMax = null;
private float[] planePosNorm = null;
private int binAggregation = 6;
public int minFloorPointCount = 1024;
public float planeMaxTiltInDeg = 5f;
private float histBinSize = 0f;
private int histBufferLength = 0;
// compute buffers
private ComputeBuffer pointCloudSpaceBuffer = null;
private ComputeBuffer pointCloudDepthBuffer = null;
private ComputeBuffer pointCloudPosBuffer = null;
private ComputeBuffer pointCloudOfsBuffer = null;
private ComputeBuffer pointCloudMaskBuffer = null;
private ComputeBuffer ofsHistMinMaxBuffer = null;
private ComputeBuffer ofsHistBinLeftBuffer = null;
private ComputeBuffer ofsHistBinCountBuffer = null;
private ComputeBuffer histCumulativeCountBuffer = null;
private ComputeBuffer planeIndicesBuffer = null;
private ComputeBuffer planePosNormBuffer = null;
// compute shaders
private ComputeShader floorDetOffsetEstShader = null;
private int floorDetOffsetEstKernel = -1;
private ComputeShader floorDetOffsetMinMaxShader = null;
private int floorDetOffsetMinMaxKernel = -1;
private ComputeShader floorDetOffsetHistShader = null;
private int floorDetOffsetHistKernel = -1;
private ComputeShader floorDetPlaneEstShader = null;
private int floorDetPlaneEstKernel = -1;
// results
private bool bPlaneValid = false;
private Vector3 vPlanePos = Vector3.zero;
private Vector3 vPlaneNorm = Vector3.up;
private Quaternion qSensorRot = Quaternion.identity;
private Plane floorPlane = new Plane();
private float fSensorHeight = 1f;
// time
private const float SMOOTH_TIME_THRESHOLD = 1f;
private float fLastTimeSecs = 0f;
// routine params
private const int WAIT_FRAMES_BEFORE_GPUGET = 2;
private int minDepthDistance = 0;
private int maxDepthDistance = 10000;
//private bool isImuVectorSet = false;
private bool isDepthFrameSet = false;
private MonoBehaviour callerInstance = null;
private IEnumerator floorRoutine = null;
private bool isRoutineRunning = false;
/// <summary>
/// Checks if a floor plane has been detected or not.
/// </summary>
/// <returns>true if the floor plane is valid, false otherwise</returns>
public bool IsFloorValid()
{
return bPlaneValid;
}
/// <summary>
/// Gets the timestamp of the depth frame used for floor plane detection.
/// </summary>
/// <returns>Depth frame timestamp</returns>
public ulong GetDepthTimestamp()
{
return lastDepthFrameTime;
}
/// <summary>
/// Gets the floor plane position.
/// </summary>
/// <returns>Floor plane position</returns>
public Vector3 GetFloorPosition()
{
return vPlanePos;
}
/// <summary>
/// Gets the floor plane normal.
/// </summary>
/// <returns>Floor plane normal</returns>
public Vector3 GetFloorNormal()
{
return vPlaneNorm;
}
/// <summary>
/// Gets the detected floor plane.
/// </summary>
/// <returns>Detected floor plane</returns>
public Plane GetFloorPlane()
{
return floorPlane;
}
/// <summary>
/// Gets the estimated sensor position, in meters.
/// </summary>
/// <returns>Sensor position, in meters</returns>
public Vector3 GetSensorPosition()
{
return new Vector3(0f, fSensorHeight, 0f);
}
/// <summary>
/// Gets the estimated sensor rotation.
/// </summary>
/// <returns>Sensor rotation</returns>
public Quaternion GetSensorRotation()
{
return qSensorRot;
}
/// <summary>
/// Initializes the buffers and shaders used by the floor detector.
/// </summary>
/// <param name="sensorData">Sensor data</param>
/// <param name="maxDepthMm">Max depth distance in mm</param>
public void InitFloorDetector(MonoBehaviour caller, KinectInterop.SensorData sensorData, int maxDepthMm)
{
this.callerInstance = caller;
this.sensorData = sensorData;
if (sensorData == null || sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0)
return;
if (floorDetOffsetEstShader == null)
{
floorDetOffsetEstShader = Resources.Load("FloorDetectionOffsetEstShader") as ComputeShader;
floorDetOffsetEstKernel = floorDetOffsetEstShader != null ? floorDetOffsetEstShader.FindKernel("EstimatePointCloudPosOfs") : -1;
}
if (floorDetOffsetMinMaxShader == null)
{
floorDetOffsetMinMaxShader = Resources.Load("FloorDetectionOffsetMinMaxShader") as ComputeShader;
floorDetOffsetMinMaxKernel = floorDetOffsetMinMaxShader != null ? floorDetOffsetMinMaxShader.FindKernel("EstimateOffsetMinMax") : -1;
}
if (floorDetOffsetHistShader == null)
{
floorDetOffsetHistShader = Resources.Load("FloorDetectionOffsetHistShader") as ComputeShader;
floorDetOffsetHistKernel = floorDetOffsetHistShader != null ? floorDetOffsetHistShader.FindKernel("EstimateOffsetHist") : -1;
}
if (floorDetPlaneEstShader == null)
{
floorDetPlaneEstShader = Resources.Load("FloorDetectionPlanePointsShader") as ComputeShader;
floorDetPlaneEstKernel = floorDetPlaneEstShader != null ? floorDetPlaneEstShader.FindKernel("EstimatePlanePoints") : -1;
}
if (pointCloudSpaceBuffer == null)
{
int spaceBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight * 3;
pointCloudSpaceBuffer = new ComputeBuffer(spaceBufferLength, sizeof(float));
}
if(pointCloudDepthBuffer == null)
{
int depthBufferLength = (sensorData.depthImageWidth * sensorData.depthImageHeight) >> 1;
pointCloudDepthBuffer = new ComputeBuffer(depthBufferLength, sizeof(uint));
}
if (pointCloudPosBuffer == null)
{
int posBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight * 3;
pointCloudPosBuffer = new ComputeBuffer(posBufferLength, sizeof(float));
}
if (pointCloudOfsBuffer == null)
{
int ofsBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight;
pointCloudOfsBuffer = new ComputeBuffer(ofsBufferLength, sizeof(float));
}
if (pointCloudMaskBuffer == null)
{
int maskBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight;
pointCloudMaskBuffer = new ComputeBuffer(maskBufferLength, sizeof(int));
}
if (ofsHistMinMaxBuffer == null)
{
histMinMax = new float[2];
ofsHistMinMaxBuffer = new ComputeBuffer(histMinMax.Length, sizeof(float));
}
// hist bin size
float planeDisplacementRangeInMeters = 0.050f; // 5 cm in meters
//binAggregation = 6;
histBinSize = planeDisplacementRangeInMeters / binAggregation;
float fMaxDepth = (float)maxDepthMm / 1000f;
histBufferLength = Mathf.FloorToInt(2 * fMaxDepth / histBinSize) + 1;
//Debug.Log("histBinSize: " + histBinSize + ", histBufferLength: " + histBufferLength);
if (ofsHistBinLeftBuffer == null)
{
ofsHistBinLeftBuffer = new ComputeBuffer(histBufferLength, sizeof(float));
}
if (ofsHistBinCountBuffer == null)
{
ofsHistBinCountBuffer = new ComputeBuffer(histBufferLength, sizeof(uint));
}
if (histCumulativeCountBuffer == null)
{
histCumulativeCountBuffer = new ComputeBuffer(histBufferLength, sizeof(uint));
}
if(planeIndicesBuffer == null)
{
int planeIndicesLength = sensorData.depthImageWidth * sensorData.depthImageHeight;
planeIndicesBuffer = new ComputeBuffer(planeIndicesLength, sizeof(uint));
}
if(planePosNormBuffer == null)
{
planePosNorm = new float[4 * 3]; // pos & norm are v3
planePosNormBuffer = new ComputeBuffer(planePosNorm.Length, sizeof(float));
}
spaceScale = sensorData.sensorSpaceScale;
//minFloorPointCount = 1024;
//planeMaxTiltInDeg = 5f;
imuUpVector = Vector3.up;
bPlaneValid = false;
if(callerInstance != null)
{
isRoutineRunning = true;
floorRoutine = UpdateFloorAsync();
callerInstance.StartCoroutine(floorRoutine);
}
}
/// <summary>
/// Releases the buffers and shaders used by the floor detector.
/// </summary>
public void FinishFloorDetector()
{
if(isRoutineRunning)
{
isRoutineRunning = false;
callerInstance.StopCoroutine(floorRoutine);
floorRoutine = null;
}
if (pointCloudSpaceBuffer != null)
{
pointCloudSpaceBuffer.Dispose();
pointCloudSpaceBuffer = null;
}
if (pointCloudDepthBuffer != null)
{
pointCloudDepthBuffer.Dispose();
pointCloudDepthBuffer = null;
}
if (pointCloudPosBuffer != null)
{
pointCloudPosBuffer.Dispose();
pointCloudPosBuffer = null;
}
if (pointCloudOfsBuffer != null)
{
pointCloudOfsBuffer.Dispose();
pointCloudOfsBuffer = null;
}
if (pointCloudMaskBuffer != null)
{
pointCloudMaskBuffer.Dispose();
pointCloudMaskBuffer = null;
}
if (ofsHistMinMaxBuffer != null)
{
ofsHistMinMaxBuffer.Dispose();
ofsHistMinMaxBuffer = null;
}
if (ofsHistBinLeftBuffer != null)
{
ofsHistBinLeftBuffer.Dispose();
ofsHistBinLeftBuffer = null;
}
if (ofsHistBinCountBuffer != null)
{
ofsHistBinCountBuffer.Dispose();
ofsHistBinCountBuffer = null;
}
if (histCumulativeCountBuffer != null)
{
histCumulativeCountBuffer.Dispose();
histCumulativeCountBuffer = null;
}
if(planeIndicesBuffer != null)
{
planeIndicesBuffer.Dispose();
planeIndicesBuffer = null;
}
if(planePosNormBuffer != null)
{
planePosNormBuffer.Dispose();
planePosNormBuffer = null;
}
if (floorDetOffsetEstShader != null)
{
floorDetOffsetEstShader = null;
}
if (floorDetOffsetMinMaxShader != null)
{
floorDetOffsetMinMaxShader = null;
}
if (floorDetOffsetHistShader != null)
{
floorDetOffsetHistShader = null;
}
if(floorDetPlaneEstShader != null)
{
floorDetPlaneEstShader = null;
}
}
///// <summary>
///// Updates the IMU up vector from the sample.
///// </summary>
///// <param name="imuAcc">IMU accelerometer sample</param>
///// <param name="accDepthRot">Extrinsics rotation between the accelerometer and depth sensor</param>
//public void UpdateImuUpVector(Vector3 imuAcc, float[] accDepthRot)
//{
// Vector3 Rx = new Vector3(accDepthRot[0], accDepthRot[1], accDepthRot[2]);
// Vector3 Ry = new Vector3(accDepthRot[3], accDepthRot[4], accDepthRot[5]);
// Vector3 Rz = new Vector3(accDepthRot[6], accDepthRot[7], accDepthRot[8]);
// Vector3 depthAcc = new Vector3( Vector3.Dot(Rx, imuAcc), Vector3.Dot(Ry, imuAcc), Vector3.Dot(Rz, imuAcc));
// //Vector3 depthGravity = depthAcc * -1f;
// //imuUpVector = (depthGravity * -1f).normalized;
// imuUpVector = depthAcc.normalized;
// //isImuVectorSet = true;
// //Debug.Log("imuUpVector: " + imuUpVector);
//}
/// <summary>
/// Updates the IMU up vector.
/// </summary>
/// <param name="imuUpVector">IMU up vector</param>
public void UpdateImuUpVector(Vector3 imuUpVector)
{
this.imuUpVector = imuUpVector.normalized;
}
/// <summary>
/// Executes the floor detector shaders with the current depth frame data.
/// </summary>
/// <param name="depthFrame">Depth frame data</param>
/// <param name="depthFrameTime">Depth frame time</param>
/// <param name="depthFrameLock">Depth frame lock object</param>
/// <param name="minDistance">Min depth distance, in meters</param>
/// <param name="maxDistance">Max depth distance, in meters</param>
/// <returns>true if the floor plane is detected, false otherwise</returns>
public bool UpdateFloorDetector(ushort[] depthFrame, ulong depthFrameTime, ref object depthFrameLock, float minDistance, float maxDistance)
{
if (sensorData == null || depthFrame == null || sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0)
return false;
if (lastDepthFrameTime == depthFrameTime)
return false;
lastDepthFrameTime = depthFrameTime;
minDepthDistance = (int)(minDistance * 1000f);
maxDepthDistance = (int)(maxDistance * 1000f);
if (depth2SpaceWidth != sensorData.depthImageWidth || depth2SpaceHeight != sensorData.depthImageHeight)
{
depth2SpaceTable = sensorData.sensorInterface.GetDepthCameraSpaceTable(sensorData);
depth2SpaceWidth = sensorData.depthImageWidth;
depth2SpaceHeight = sensorData.depthImageHeight;
pointCloudSpaceBuffer.SetData(depth2SpaceTable);
depth2SpaceTable = null;
//Debug.Log("Set space table for width: " + depth2SpaceWidth + ", height: " + depth2SpaceHeight);
}
// FloorDetectionOffsetEstShader
//lock(depthFrameLock)
{
KinectInterop.SetComputeBufferData(pointCloudDepthBuffer, depthFrame, depthFrame.Length >> 1, sizeof(uint));
}
isDepthFrameSet = true;
////Debug.Log("imuUpVector: " + imuUpVector);
//if(imuVectorTrans == null)
//{
// GameObject imuVectorObj = GameObject.CreatePrimitive(PrimitiveType.Cube);
// imuVectorObj.name = "ImuVectorObj";
// imuVectorTrans = imuVectorObj.transform;
// imuVectorTrans.localScale = new Vector3(0.1f, 0.2f, 0.5f);
// imuVectorTrans.position = new Vector3(0, 1f, 1f);
//}
//imuVectorTrans.rotation = Quaternion.LookRotation(imuUpVector.normalized);
if(bPlaneValid)
{
bPlaneValid = false;
return true;
}
return false;
}
// updates the floor parameters async
private IEnumerator UpdateFloorAsync()
{
while(isRoutineRunning)
{
// wait for imu vector & depth frame
while (/**!isImuVectorSet ||*/ !isDepthFrameSet)
{
yield return null;
}
//isImuVectorSet = false;
isDepthFrameSet = false;
KinectInterop.SetComputeShaderInt2(floorDetOffsetEstShader, "PointCloudRes", sensorData.depthImageWidth, sensorData.depthImageHeight);
//KinectInterop.SetComputeShaderFloat2(floorDetOffsetEstShader, "SpaceScale", sensorData.sensorSpaceScale.x, sensorData.sensorSpaceScale.y);
KinectInterop.SetComputeShaderFloat3(floorDetOffsetEstShader, "ImuUpVector", imuUpVector);
floorDetOffsetEstShader.SetInt("MinDepth", minDepthDistance);
floorDetOffsetEstShader.SetInt("MaxDepth", maxDepthDistance);
floorDetOffsetEstShader.SetBuffer(floorDetOffsetEstKernel, "SpaceTable", pointCloudSpaceBuffer);
floorDetOffsetEstShader.SetBuffer(floorDetOffsetEstKernel, "DepthMap", pointCloudDepthBuffer);
floorDetOffsetEstShader.SetBuffer(floorDetOffsetEstKernel, "PointCloudPos", pointCloudPosBuffer);
floorDetOffsetEstShader.SetBuffer(floorDetOffsetEstKernel, "PointCloudOfs", pointCloudOfsBuffer);
floorDetOffsetEstShader.SetBuffer(floorDetOffsetEstKernel, "PointCloudMask", pointCloudMaskBuffer);
floorDetOffsetEstShader.Dispatch(floorDetOffsetEstKernel, sensorData.depthImageWidth / 8, sensorData.depthImageHeight / 8, 1);
// FloorDetectionOffsetMinMaxShader
KinectInterop.SetComputeShaderInt2(floorDetOffsetMinMaxShader, "PointCloudRes", sensorData.depthImageWidth, sensorData.depthImageHeight);
floorDetOffsetMinMaxShader.SetInt("OfsHistBinLength", histBufferLength);
floorDetOffsetMinMaxShader.SetBuffer(floorDetOffsetMinMaxKernel, "PointCloudOfs", pointCloudOfsBuffer);
floorDetOffsetMinMaxShader.SetBuffer(floorDetOffsetMinMaxKernel, "PointCloudMask", pointCloudMaskBuffer);
floorDetOffsetMinMaxShader.SetBuffer(floorDetOffsetMinMaxKernel, "OfsMinMax", ofsHistMinMaxBuffer);
floorDetOffsetMinMaxShader.SetBuffer(floorDetOffsetMinMaxKernel, "OfsHistBinCount", ofsHistBinCountBuffer);
floorDetOffsetMinMaxShader.Dispatch(floorDetOffsetMinMaxKernel, 1, 1, 1);
//ofsHistMinMaxBuffer.GetData(histMinMax);
//Debug.Log("Hist min: " + histMinMax[0] + ", max: " + histMinMax[1]);
// FloorDetectionOffsetHistShader
KinectInterop.SetComputeShaderInt2(floorDetOffsetHistShader, "PointCloudRes", sensorData.depthImageWidth, sensorData.depthImageHeight);
//floorDetOffsetHistShader.SetInt("PointCloudOfsLength", sensorData.depthImageWidth * sensorData.depthImageHeight);
floorDetOffsetHistShader.SetInt("OfsHistBinLength", histBufferLength);
floorDetOffsetHistShader.SetFloat("BinSize", histBinSize);
floorDetOffsetHistShader.SetBuffer(floorDetOffsetHistKernel, "PointCloudOfs", pointCloudOfsBuffer);
floorDetOffsetHistShader.SetBuffer(floorDetOffsetHistKernel, "PointCloudMask", pointCloudMaskBuffer);
floorDetOffsetHistShader.SetBuffer(floorDetOffsetHistKernel, "OfsMinMax", ofsHistMinMaxBuffer);
floorDetOffsetHistShader.SetBuffer(floorDetOffsetHistKernel, "OfsHistBinCount", ofsHistBinCountBuffer);
//floorDetOffsetHistShader.SetBuffer(floorDetOffsetHistKernel, "OfsHistBinLeft", ofsHistBinLeftBuffer);
floorDetOffsetHistShader.Dispatch(floorDetOffsetHistKernel, sensorData.depthImageWidth / 1, sensorData.depthImageHeight / 1, 1);
//floorDetOffsetHistShader.Dispatch(floorDetOffsetHistKernel, 1, 1, 1);
// FloorDetectionPlanePointsShader
floorDetPlaneEstShader.SetInt("OfsHistBinLength", histBufferLength);
floorDetPlaneEstShader.SetInt("PointCloudOfsLength", sensorData.depthImageWidth * sensorData.depthImageHeight);
floorDetPlaneEstShader.SetFloat("BinSize", histBinSize);
floorDetPlaneEstShader.SetInt("BinAggregation", binAggregation);
floorDetPlaneEstShader.SetInt("MinimumFloorPointCount", minFloorPointCount / 4);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "OfsHistBinCount", ofsHistBinCountBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "PointCloudPos", pointCloudPosBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "PointCloudOfs", pointCloudOfsBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "PointCloudMask", pointCloudMaskBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "OfsMinMax", ofsHistMinMaxBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "OfsHistBinLeft", ofsHistBinLeftBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "HistCumulativeCount", histCumulativeCountBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "InlierIndices", planeIndicesBuffer);
floorDetPlaneEstShader.SetBuffer(floorDetPlaneEstKernel, "PlanePosNorm", planePosNormBuffer);
floorDetPlaneEstShader.Dispatch(floorDetPlaneEstKernel, 1, 1, 1);
// wait some frames before GetData()
for (int i = 0; i < WAIT_FRAMES_BEFORE_GPUGET; i++)
{
yield return null;
}
//uint[] histCumCount = new uint[histBufferLength];
//histCumulativeCountBuffer.GetData(histCumCount);
//uint maxDiffCount = 0;
//System.Text.StringBuilder sbCumCount = new System.Text.StringBuilder();
//for(int i = 1; (i + binAggregation) < histCumCount.Length; i++) // i += binAggregation
//{
// uint diffCount = histCumCount[i + binAggregation - 1] - histCumCount[i - 1];
// if (maxDiffCount < diffCount)
// maxDiffCount = diffCount;
// if (diffCount > 0)
// sbCumCount.Append(i).Append('-').Append(diffCount).Append(" ");
//}
//Debug.Log("histCumCount(" + maxDiffCount + "): " + sbCumCount);
planePosNormBuffer.GetData(planePosNorm);
vPlanePos = new Vector3(planePosNorm[0], planePosNorm[1], planePosNorm[2]);
vPlaneNorm = new Vector3(planePosNorm[3], planePosNorm[4], planePosNorm[5]);
//Vector3 vPlaneOfs = new Vector3(planePosNorm[6], planePosNorm[7], planePosNorm[8]);
//Vector3 vPlaneOfs2 = new Vector3(planePosNorm[9], planePosNorm[10], planePosNorm[11]);
bPlaneValid = (vPlaneNorm != Vector3.zero);
if (bPlaneValid)
{
//Debug.Log("Plane pos: " + vPlanePos + ", norm: " + vPlaneNorm.normalized + ", rot: " + qSensorRot.eulerAngles + ", ofs: " + vPlaneOfs + ", ofs2: " + vPlaneOfs2);
vPlaneNorm = vPlaneNorm.normalized;
if (Vector3.Dot(vPlaneNorm, imuUpVector) < 0f)
{
vPlaneNorm = -vPlaneNorm;
//Debug.Log("Inverted plane normal: " + vPlaneNorm);
}
float floorTiltInDeg = Mathf.Acos(Vector3.Dot(vPlaneNorm, imuUpVector)) * Mathf.Rad2Deg;
if (floorTiltInDeg < planeMaxTiltInDeg)
{
// For reduced jitter, use gravity for floor normal.
vPlaneNorm = imuUpVector;
//Debug.Log("Used gravity for normal: " + vPlaneNorm + ", tiltAngle: " + floorTiltInDeg);
}
// get results
float fCurTimeSecs = Time.time;
bool bSmoothResult = (fCurTimeSecs - fLastTimeSecs) < SMOOTH_TIME_THRESHOLD;
//Debug.Log("SmoothResult: " + bSmoothResult);
fLastTimeSecs = fCurTimeSecs;
vPlanePos = new Vector3(vPlanePos.x * spaceScale.x, vPlanePos.y * spaceScale.y, vPlanePos.z * spaceScale.z);
vPlaneNorm = new Vector3(vPlaneNorm.x * spaceScale.x, vPlaneNorm.y * spaceScale.y, vPlaneNorm.z * spaceScale.z);
Quaternion curSensorRot = Quaternion.FromToRotation(vPlaneNorm, Vector3.up);
qSensorRot = bSmoothResult ? Quaternion.Slerp(qSensorRot, curSensorRot, smoothFactor * Time.deltaTime) : curSensorRot;
floorPlane = new Plane(vPlaneNorm, vPlanePos);
float curSensorHeight = floorPlane.GetDistanceToPoint(Vector3.zero);
fSensorHeight = bSmoothResult ? Mathf.Lerp(fSensorHeight, curSensorHeight, smoothFactor * Time.deltaTime) : curSensorHeight;
//Debug.Log("Floor pos: " + vPlanePos + ", norm: " + vPlaneNorm + ", rot: " + qSensorRot.eulerAngles + ", height: " + curSensorHeight + ", smoothed: " + fSensorHeight);
}
}
}
}
}

11
Assets/Azure/KinectScripts/Interfaces/KinectFloorDetector.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 11fddd48107804c42827afb3bbf5533a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

2388
Assets/Azure/KinectScripts/Interfaces/NetClientInterface.cs

File diff suppressed because it is too large

11
Assets/Azure/KinectScripts/Interfaces/NetClientInterface.cs.meta

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: eb612f05db283eb42a3801236d897315
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

1093
Assets/Azure/KinectScripts/Interfaces/RealSenseInterface.cs

File diff suppressed because it is too large

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save