Reputation: 311
I'm working on a Unity project that uses the Azure Kinect SDK to track a user's body movements and apply them to clothing objects. However, I'm facing an issue where the clothing objects are moving in the opposite direction of the user's movement.
I've tried adjusting the position calculations and applying rotation offsets, but the problem persists. I'm using the Kinect body tracking data to update the positions and rotations of the top and bottom clothing objects based on the spine chest and pelvis joints.
Here's the relevant code snippet:
using Microsoft.Azure.Kinect.Sensor;
using Microsoft.Azure.Kinect.BodyTracking;
using UnityEngine;
using UnityEngine.UI;
public class KinectManager2 : MonoBehaviour
{
public GameObject topClothingObject;
public GameObject bottomClothingObject;
private Device kinectDevice;
private Tracker bodyTracker;
private Texture2D colorTexture;
public RawImage backgroundImage;
private Vector3 initialPelvisPosition;
private Vector3 initialSpineChestPosition;
void Start()
{
InitializeKinect();
InitializeBodyTracker();
}
void Update()
{
if (kinectDevice != null)
{
using (Capture capture = kinectDevice.GetCapture())
{
if (capture != null)
{
if (capture.Color != null)
{
ProcessColorFrame(capture.Color);
}
if (bodyTracker != null)
{
ProcessBodyFrame(capture);
}
}
}
}
}
void OnDestroy()
{
if (bodyTracker != null)
{
bodyTracker.Dispose();
}
if (kinectDevice != null)
{
kinectDevice.StopCameras();
kinectDevice.Dispose();
}
}
private void InitializeKinect()
{
kinectDevice = Device.Open();
if (kinectDevice != null)
{
kinectDevice.StartCameras(new DeviceConfiguration
{
ColorFormat = ImageFormat.ColorBGRA32,
ColorResolution = ColorResolution.R1080p,
DepthMode = DepthMode.WFOV_Unbinned,
CameraFPS = FPS.FPS15,
SynchronizedImagesOnly = true
});
colorTexture = new Texture2D(1920, 1080, TextureFormat.BGRA32, false);
}
else
{
Debug.LogError("Failed to open Kinect device.");
}
}
private void InitializeBodyTracker()
{
if (kinectDevice != null)
{
bodyTracker = Tracker.Create(kinectDevice.GetCalibration(), new TrackerConfiguration
{
ProcessingMode = TrackerProcessingMode.Gpu,
SensorOrientation = SensorOrientation.Default
});
if (bodyTracker == null)
{
Debug.LogError("Failed to create body tracker.");
}
}
}
private void ProcessColorFrame(Microsoft.Azure.Kinect.Sensor.Image colorImage)
{
if (colorImage.Format == ImageFormat.ColorBGRA32)
{
int width = colorImage.WidthPixels;
int height = colorImage.HeightPixels;
byte[] colorData = new byte[width * height * 4];
colorImage.Memory.CopyTo(colorData);
byte[] flippedColorData = new byte[width * height * 4];
for (int y = 0; y < height; y++)
{
int srcIndex = y * width * 4;
int dstIndex = (height - 1 - y) * width * 4;
System.Buffer.BlockCopy(colorData, srcIndex, flippedColorData, dstIndex, width * 4);
}
colorTexture.LoadRawTextureData(flippedColorData);
colorTexture.Apply();
backgroundImage.texture = colorTexture;
}
else
{
Debug.LogError("Unsupported color image format: " + colorImage.Format);
}
}
private void ProcessBodyFrame(Capture capture)
{
bodyTracker.EnqueueCapture(capture);
using (Frame frame = bodyTracker.PopResult())
{
if (frame != null && frame.NumberOfBodies > 0)
{
Body body = frame.GetBody(0);
UpdateClothing(body);
long latency = (long)(frame.DeviceTimestamp.TotalMilliseconds - capture.Depth.DeviceTimestamp.TotalMilliseconds);
Debug.Log($"Latency: {latency} ms");
}
}
}
private void UpdateClothing(Body body)
{
if (topClothingObject != null && bottomClothingObject != null)
{
Vector3 pelvisPosition = GetVector3FromJoint(body.Skeleton.GetJoint(JointId.Pelvis));
Vector3 spineChestPosition = GetVector3FromJoint(body.Skeleton.GetJoint(JointId.SpineChest));
float distanceToCamera = pelvisPosition.z;
float scaleFactor = Mathf.Clamp(1f / (distanceToCamera * 0.01f), 0.5f, 2f);
Vector3 cameraPosition = new Vector3(0f, 0f, 0f); // Assuming the Kinect camera is at the origin
Vector3 topClothingPosition = spineChestPosition - cameraPosition;
topClothingPosition.y += 0.2f;
topClothingObject.transform.position = topClothingPosition;
Vector3 bottomClothingPosition = pelvisPosition - cameraPosition;
bottomClothingPosition.y -= 0.1f;
bottomClothingObject.transform.position = bottomClothingPosition;
Quaternion spineChestRotation = GetQuaternionFromJoint(body.Skeleton.GetJoint(JointId.SpineChest));
Quaternion topRotationOffset = Quaternion.Euler(90f, 90f, 0f);
Quaternion topFinalRotation = spineChestRotation * topRotationOffset;
topClothingObject.transform.rotation = topFinalRotation;
Quaternion pelvisRotation = GetQuaternionFromJoint(body.Skeleton.GetJoint(JointId.Pelvis));
Quaternion bottomRotationOffset = Quaternion.Euler(0f, 120f, 10f);
Quaternion bottomFinalRotation = pelvisRotation * bottomRotationOffset;
bottomClothingObject.transform.rotation = bottomFinalRotation;
topClothingObject.transform.localScale = Vector3.one * scaleFactor;
bottomClothingObject.transform.localScale = Vector3.one * scaleFactor;
}
else
{
Debug.LogWarning("Clothing objects are null. Please assign FBX objects to the topClothingObject and bottomClothingObject variables.");
}
}
private Quaternion GetQuaternionFromJoint(Microsoft.Azure.Kinect.BodyTracking.Joint joint)
{
return new Quaternion(joint.Quaternion.X, joint.Quaternion.Y, joint.Quaternion.Z, joint.Quaternion.W);
}
private Vector3 GetVector3FromJoint(Microsoft.Azure.Kinect.BodyTracking.Joint joint)
{
float scaleFactor = 0.001f;
Vector3 jointPosition = new Vector3(joint.Position.X * scaleFactor, joint.Position.Y * scaleFactor, joint.Position.Z * scaleFactor);
return jointPosition;
}
}
I've also tried calculating the positions relative to the Kinect camera's position, but it didn't solve the issue.
I would greatly appreciate any suggestions or insights on how to fix this problem and make the clothing objects move in the same direction as the user's movement.
Thank you in advance for your help!
Upvotes: 1
Views: 24