microsoft / Azure-Kinect-Sensor-SDK

A cross platform (Linux and Windows) user mode SDK to read data from your Azure Kinect device.
https://Azure.com/Kinect
MIT License
1.49k stars 619 forks source link

Azure Kinect BodyTrackingSamples & Unity | multiple skeletons #1731

Open leGITplease opened 2 years ago

leGITplease commented 2 years ago

Hi guys,

Currently i have 1 skeleton tracking myself as i stand in front of the Azure Kinect, what i want to achieve is having multiple skeletons that each represent 1 user in Unity. Right now i copied the KinectAzureTracker.. i suppose there is a way to use the copy for the 2nd user?

afbeelding

For instance, if i stand with another person in front of the camera i want the 2nd skeleton to track their body so you have 2 different skeletons with each their own movements linked to each single user.

Im using this repo: https://github.com/microsoft/Azure-Kinect-Samples/tree/master/body-tracking-samples

Any ideas on how to do that?

KanGeQiu commented 2 years ago

Hi, have you solved this problem, I have the same problem now, if you have solved this problem can you tell me how to solve it.

Thaina commented 1 year ago

You can modified SkeletalTrackingProvider.cs directly. It was just give you only one skeleton even the device actually give many skeleton bodies

hemmkim commented 11 months ago

You can modified SkeletalTrackingProvider.cs directly. It was just give you only one skeleton even the device actually give many skeleton bodies

Hello, I was trying to modify SkeletalTrackingProvider.cs but could not solve how to fix it. Could you please help me how to modify the script?

I'd very much appreciate it if you can help!

Thaina commented 11 months ago

@hemmkim This is too long and I mostly forget what I have done. There are many file I have modified from the original project

But if it might be useful, this is the current SkeletalTrackingProvider.cs in my project

using Microsoft.Azure.Kinect.BodyTracking;
using Microsoft.Azure.Kinect.Sensor;
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Unity.Mathematics;
using UnityEngine;
using UnityEngine.SceneManagement;

public class SkeletalTrackingProvider : BackgroundDataProvider
{
    bool readFirstFrame = false;
    TimeSpan initialTimestamp;

    public float3 accelSample;
    public SkeletalTrackingProvider(int id) : base(id)
    {
        Debug.Log("in the skeleton provider constructor");
    }

    System.Runtime.Serialization.Formatters.Binary.BinaryFormatter binaryFormatter { get; set; } = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();

    public Stream RawDataLoggingFile = null;
    public SensorOrientation sensorOrientation = SensorOrientation.Default;
    public TrackerProcessingMode trackerProcessingMode = TrackerProcessingMode.Cuda;
    protected override void RunBackgroundThreadAsync(int id, CancellationToken token)
    {
        try
        {
            UnityEngine.Debug.Log("Starting body tracker background thread.");
            DebugTime.isDetectCamera = true;
            // Buffer allocations.
            var currentFrameData = new BackgroundData();
            // Open device.
            using (var device = Device.Open(id))
            {
                device.StartCameras(new DeviceConfiguration() {
                    CameraFPS = FPS.FPS15,
                    ColorResolution = ColorResolution.Off,
                    DepthMode = DepthMode.NFOV_Unbinned,
                    WiredSyncMode = WiredSyncMode.Standalone,
                });

                Task.Run(() => {
                    device.StartImu();
                    try
                    {
                        while(!token.IsCancellationRequested)
                        {
                            try
                            {
                                Thread.Sleep(100);
                                var imu = device.GetImuSample(TimeSpan.FromMilliseconds(100));
                                accelSample = new float3(imu.AccelerometerSample.FromKinectToUnity()).yzx;
                            }
                            catch(Exception e)
                            {
                                Debug.LogException(e);
                            }
                        }
                    }
                    catch(Exception e)
                    {
                        Debug.LogException(e);
                    }
                    finally
                    {
                        device.StopImu();
                    }
                });

                UnityEngine.Debug.Log("Open K4A device successful. id " + id + "sn:" + device.SerialNum);

                while (!token.IsCancellationRequested)
                {
                    IsRunning = false;
                    var deviceCalibration = device.GetCalibration();
                    var config = new TrackerConfiguration() { ProcessingMode = trackerProcessingMode, SensorOrientation = sensorOrientation };
                    using (var tracker = Tracker.Create(deviceCalibration,config))
                    {
                        UnityEngine.Debug.Log("Body tracker created.");
                        while (!token.IsCancellationRequested && config.SensorOrientation == sensorOrientation && config.ProcessingMode == trackerProcessingMode)
                        {
                            using (var sensorCapture = device.GetCapture())
                            {
                                // Queue latest frame from the sensor.
                                tracker.EnqueueCapture(sensorCapture);
                            }

                            // Try getting latest tracker frame.
                            using (var frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                if (frame == null)
                                {
                                    UnityEngine.Debug.Log("Pop result from tracker timeout!");
                                }
                                else
                                {
                                    IsRunning = true;
                                    // Get number of bodies in the current frame.
                                    currentFrameData.NumOfBodies = frame.NumberOfBodies;

                                    // Copy bodies.
                                    for (uint i = 0; i < currentFrameData.NumOfBodies; i++)
                                    {
                                        currentFrameData.Bodies[i].CopyFromBodyTrackingSdk(frame.GetBody(i), deviceCalibration);
                                    }

                                    // Store depth image.
                                    Capture bodyFrameCapture = frame.Capture;
                                    Image depthImage = bodyFrameCapture.Depth;
                                    if (!readFirstFrame)
                                    {
                                        readFirstFrame = true;
                                        initialTimestamp = depthImage.DeviceTimestamp;
                                    }
                                    currentFrameData.TimestampInMs = (float)(depthImage.DeviceTimestamp - initialTimestamp).TotalMilliseconds;
                                    currentFrameData.DepthImageWidth = depthImage.WidthPixels;
                                    currentFrameData.DepthImageHeight = depthImage.HeightPixels;

                                    // Read image data from the SDK.
                                    var depthFrame = MemoryMarshal.Cast<byte, ushort>(depthImage.Memory.Span);

                                    // Repack data and store image data.
                                    int byteCounter = 0;
                                    currentFrameData.DepthImageSize = currentFrameData.DepthImageWidth * currentFrameData.DepthImageHeight * 3;

                                    for (int it = currentFrameData.DepthImageWidth * currentFrameData.DepthImageHeight - 1; it > 0; it--)
                                    {
                                        byte b = (byte)(depthFrame[it] / (ConfigLoader.Instance.Configs.SkeletalTracking.MaximumDisplayedDepthInMillimeters) * 255);
                                        currentFrameData.DepthImage[byteCounter++] = b;
                                        currentFrameData.DepthImage[byteCounter++] = b;
                                        currentFrameData.DepthImage[byteCounter++] = b;
                                    }

                                    if (RawDataLoggingFile != null && RawDataLoggingFile.CanWrite)
                                    {
                                        binaryFormatter.Serialize(RawDataLoggingFile, currentFrameData);
                                    }

                                    // Update data variable that is being read in the UI thread.
                                    SetCurrentFrameData(ref currentFrameData);
                                }
                            }
                        }

                        Debug.Log("dispose of tracker now!!!!!");
                    }
                }
            }

            if (RawDataLoggingFile != null)
            {
                RawDataLoggingFile.Close();
            }            
        }
        catch (Exception e)
        {
            DebugTime.isDetectCamera = false;
            Debug.LogException(e);
            token.ThrowIfCancellationRequested();                        
        }
    }  
}
hemmkim commented 11 months ago

Hello,

Thank you so much for your reply. I very much appreciate it.

I'll try this on my project again soon.

Best wishes, HyeMin


김혜민 (HyeMin Kim) +82) 10-7471-7343

@fruityhye @higosjardin @orange.planet

-----Original Message----- From: "Thaina @.> To: @.>; Cc: @.>; @.>; Sent: 2023-10-11 (수) 15:55:33 (GMT+09:00) Subject: Re: [microsoft/Azure-Kinect-Sensor-SDK] Azure Kinect BodyTrackingSamples & Unity | multiple skeletons (Issue #1731)

@hemmkim This is too long and I mostly forget what I have done. There are many file I have modified from the original project But if it might be useful, this is the current SkeletalTrackingProvider.cs in my project using Microsoft.Azure.Kinect.BodyTracking; using Microsoft.Azure.Kinect.Sensor; using System; using System.IO; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Unity.Mathematics; using UnityEngine; using UnityEngine.SceneManagement; public class SkeletalTrackingProvider : BackgroundDataProvider { bool readFirstFrame = false; TimeSpan initialTimestamp; public float3 accelSample; public SkeletalTrackingProvider(int id) : base(id) { Debug.Log("in the skeleton provider constructor"); } System.Runtime.Serialization.Formatters.Binary.BinaryFormatter binaryFormatter { get; set; } = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); public Stream RawDataLoggingFile = null; public SensorOrientation sensorOrientation = SensorOrientation.Default; public TrackerProcessingMode trackerProcessingMode = TrackerProcessingMode.Cuda; protected override void RunBackgroundThreadAsync(int id, CancellationToken token) { try { UnityEngine.Debug.Log("Starting body tracker background thread."); DebugTime.isDetectCamera = true; // Buffer allocations. var currentFrameData = new BackgroundData(); // Open device. using (var device = Device.Open(id)) { device.StartCameras(new DeviceConfiguration() { CameraFPS = FPS.FPS15, ColorResolution = ColorResolution.Off, DepthMode = DepthMode.NFOV_Unbinned, WiredSyncMode = WiredSyncMode.Standalone, }); Task.Run(() => { device.StartImu(); try { while(!token.IsCancellationRequested) { try { Thread.Sleep(100); var imu = device.GetImuSample(TimeSpan.FromMilliseconds(100)); accelSample = new float3(imu.AccelerometerSample.FromKinectToUnity()).yzx; } catch(Exception e) { Debug.LogException(e); } } } catch(Exception e) { Debug.LogException(e); } finally { device.StopImu(); } }); UnityEngine.Debug.Log("Open K4A device successful. id " + id + "sn:" + device.SerialNum); while (!token.IsCancellationRequested) { IsRunning = false; var deviceCalibration = device.GetCalibration(); var config = new TrackerConfiguration() { ProcessingMode = trackerProcessingMode, SensorOrientation = sensorOrientation }; using (var tracker = Tracker.Create(deviceCalibration,config)) { UnityEngine.Debug.Log("Body tracker created."); while (!token.IsCancellationRequested && config.SensorOrientation == sensorOrientation && config.ProcessingMode == trackerProcessingMode) { using (var sensorCapture = device.GetCapture()) { // Queue latest frame from the sensor. tracker.EnqueueCapture(sensorCapture); } // Try getting latest tracker frame. using (var frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false)) { if (frame == null) { UnityEngine.Debug.Log("Pop result from tracker timeout!"); } else { IsRunning = true; // Get number of bodies in the current frame. currentFrameData.NumOfBodies = frame.NumberOfBodies; // Copy bodies. for (uint i = 0; i < currentFrameData.NumOfBodies; i++) { currentFrameData.Bodies[i].CopyFromBodyTrackingSdk(frame.GetBody(i), deviceCalibration); } // Store depth image. Capture bodyFrameCapture = frame.Capture; Image depthImage = bodyFrameCapture.Depth; if (!readFirstFrame) { readFirstFrame = true; initialTimestamp = depthImage.DeviceTimestamp; } currentFrameData.TimestampInMs = (float)(depthImage.DeviceTimestamp - initialTimestamp).TotalMilliseconds; currentFrameData.DepthImageWidth = depthImage.WidthPixels; currentFrameData.DepthImageHeight = depthImage.HeightPixels; // Read image data from the SDK. var depthFrame = MemoryMarshal.Cast<byte, ushort>(depthImage.Memory.Span); // Repack data and store image data. int byteCounter = 0; currentFrameData.DepthImageSize = currentFrameData.DepthImageWidth currentFrameData.DepthImageHeight 3; for (int it = currentFrameData.DepthImageWidth currentFrameData.DepthImageHeight - 1; it > 0; it--) { byte b = (byte)(depthFrame[it] / (ConfigLoader.Instance.Configs.SkeletalTracking.MaximumDisplayedDepthInMillimeters) 255); currentFrameData.DepthImage[byteCounter++] = b; currentFrameData.DepthImage[byteCounter++] = b; currentFrameData.DepthImage[byteCounter++] = b; } if (RawDataLoggingFile != null && RawDataLoggingFile.CanWrite) { binaryFormatter.Serialize(RawDataLoggingFile, currentFrameData); } // Update data variable that is being read in the UI thread. SetCurrentFrameData(ref currentFrameData); } } } Debug.Log("dispose of tracker now!!!!!"); } } } if (RawDataLoggingFile != null) { RawDataLoggingFile.Close(); } } catch (Exception e) { DebugTime.isDetectCamera = false; Debug.LogException(e); token.ThrowIfCancellationRequested(); } } } — Reply to this email directly, view it on GitHub, or unsubscribe. You are receiving this because you were mentioned.Message ID: @.***>