using System;
using System.Collections.Generic;
using System.IO;
using JetBrains.Annotations;
using Newtonsoft.Json.Linq;
using Unity.Collections;
using Unity.Simulation;
using UnityEngine;
#pragma warning disable 649
namespace UnityEngine.Perception.GroundTruth
{
///
/// Global manager for frame scheduling and output capture for simulations.
/// Data capture follows the schema defined in *TODO: Expose schema publicly*
///
public static class DatasetCapture
{
static readonly Guid k_DatasetGuid = Guid.NewGuid();
internal static SimulationState SimulationState { get; private set; } = CreateSimulationData();
internal static string OutputDirectory => SimulationState.GetOutputDirectoryNoCreate();
///
/// The json metadata schema version the DatasetCapture's output conforms to.
///
public static string SchemaVersion => "0.0.1";
///
/// Called when the simulation ends. The simulation ends on playmode exit, application exit, or when is called.
///
public static event Action SimulationEnding;
///
/// Register a new ego. Used along with RegisterSensor to organize sensors under a top-level ego container.
///
/// A human-readable description for the ego
/// An , which can be used to organize sensors under a common ego.
public static EgoHandle RegisterEgo(string description)
{
var ego = new EgoHandle(Guid.NewGuid(), description);
SimulationState.AddEgo(ego);
return ego;
}
///
/// Register a new sensor under the given ego.
///
/// The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.
/// The kind of the sensor (ex. "camera", "lidar")
/// A human-readable description of the sensor (ex. "front-left rgb camera")
/// The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.
/// The method of triggering captures for this sensor.
/// The simulation frame time (seconds) requested by this sensor.
/// The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame.
/// Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time
/// A , which should be used to check each frame to determine whether to capture (or render) that frame.
/// It is also used to report captures, annotations, and metrics on the sensor.
/// Thrown if ego is invalid.
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float simulationDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming = false)
{
if (!SimulationState.Contains(egoHandle.Id))
throw new ArgumentException("Supplied ego is not part of the simulation.", nameof(egoHandle));
var sensor = new SensorHandle(Guid.NewGuid());
SimulationState.AddSensor(egoHandle, modality, description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming, sensor);
return sensor;
}
///
/// Creates a metric type, which can be used to produce metrics during the simulation.
/// See , , ,
/// , ,
///
/// Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
/// Description of the annotation.
/// The ID for this metric. This allows metric types to be shared across simulations and sequences.
/// A MetricDefinition, which can be used during this simulation to report metrics.
public static MetricDefinition RegisterMetricDefinition(string name, string description = null, Guid id = default)
{
return RegisterMetricDefinition(name, null, description, id);
}
///
/// Creates a metric type, which can be used to produce metrics during the simulation.
/// See , , ,
/// , ,
///
/// Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
/// Description of the annotation.
/// Format-specific specification for the metric values. Will be converted to json automatically.
/// The ID for this metric. This allows metric types to be shared across simulations and sequences.
/// The type of the struct to write.
/// A MetricDefinition, which can be used during this simulation to report metrics.
public static MetricDefinition RegisterMetricDefinition(string name, TSpec[] specValues, string description = null, Guid id = default)
{
return SimulationState.RegisterMetricDefinition(name, specValues, description, id);
}
///
/// Creates an annotation type, which can be used to produce annotations during the simulation.
/// See , and .
///
/// Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
/// Description of the annotation.
/// Optional format name.
/// The ID for this annotation type. This allows annotation types to be shared across simulations and sequences.
/// An AnnotationDefinition. If the given has already been defined, its AnnotationDefinition is returned.
public static AnnotationDefinition RegisterAnnotationDefinition(string name, string description = null, string format = "json", Guid id = default)
{
return RegisterAnnotationDefinition(name, null, description, format, id);
}
///
/// Creates an annotation type, which can be used to produce annotations during the simulation.
/// See , and .
///
/// Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
/// Description of the annotation.
/// Optional format name.
/// Format-specific specification for the annotation values (ex. label-value mappings for semantic segmentation images)
/// The ID for this annotation type. This allows annotation types to be shared across simulations and sequences.
/// The type of the values for the spec array in the resulting json.
/// An AnnotationDefinition. If the given has already been defined, its AnnotationDefinition is returned.
public static AnnotationDefinition RegisterAnnotationDefinition(string name, TSpec[] specValues, string description = null, string format = "json", Guid id = default)
{
return SimulationState.RegisterAnnotationDefinition(name, specValues, description, format, id);
}
///
/// Report a metric not associated with any sensor or annotation.
///
/// The MetricDefinition associated with this metric.
/// An array to be converted to json and put in the "values" field of the metric
/// The type of the array
public static void ReportMetric(MetricDefinition metricDefinition, T[] values)
{
SimulationState.ReportMetric(metricDefinition, values, default, default);
}
///
/// Report a metric not associated with any sensor or annotation.
///
/// The MetricDefinition associated with this metric.
/// A string-based JSON array to be placed in the "values" field of the metric
public static void ReportMetric(MetricDefinition metricDefinition, string valuesJsonArray)
{
SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), default, default);
}
///
/// Report a metric not associated with any sensor or annotation.
///
/// The metric definition of the metric being reported
/// An which should be used to report the metric values, potentially in a later frame
public static AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => SimulationState.CreateAsyncMetric(metricDefinition);
///
/// Starts a new sequence in the capture.
///
public static void StartNewSequence() => SimulationState.StartNewSequence();
internal static bool IsValid(Guid id) => SimulationState.Contains(id);
static SimulationState CreateSimulationData()
{
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
return new SimulationState($"Dataset{k_DatasetGuid}");
}
[RuntimeInitializeOnLoadMethod]
static void OnInitializeOnLoad()
{
Manager.Instance.ShutdownNotification += ResetSimulation;
}
///
/// Stop the current simulation and start a new one. All pending data is written to disk before returning.
///
public static void ResetSimulation()
{
//this order ensures that exceptions thrown by End() do not prevent the state from being reset
SimulationEnding?.Invoke();
var oldSimulationState = SimulationState;
SimulationState = CreateSimulationData();
oldSimulationState.End();
}
}
///
/// Capture trigger modes for sensors.
///
public enum CaptureTriggerMode
{
///
/// Captures happen automatically based on a start frame and frame delta time.
///
Scheduled,
///
/// Captures should be triggered manually through calling the manual capture method of the sensor using this trigger mode.
///
Manual
}
///
/// A handle to a sensor managed by the . It can be used to check whether the sensor
/// is expected to capture this frame and report captures, annotations, and metrics regarding the sensor.
///
public struct SensorHandle : IDisposable, IEquatable
{
///
/// The unique ID of the sensor. This ID is used to refer to this sensor in the json metadata.
///
public Guid Id { get; }
internal SensorHandle(Guid id)
{
Id = id;
}
///
/// Whether the sensor is currently enabled. When disabled, the DatasetCapture will no longer schedule frames for running captures on this sensor.
///
public bool Enabled
{
get => DatasetCapture.SimulationState.IsEnabled(this);
set
{
CheckValid();
DatasetCapture.SimulationState.SetEnabled(this, value);
}
}
///
/// Report a file-based annotation related to this sensor in this frame.
///
/// The AnnotationDefinition of this annotation.
/// The path to the file containing the annotation data.
/// A handle to the reported annotation for reporting annotation-based metrics.
/// Thrown if this method is called during a frame where is false.
/// Thrown if the given AnnotationDefinition is invalid.
public Annotation ReportAnnotationFile(AnnotationDefinition annotationDefinition, string filename)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationFile(annotationDefinition, this, filename);
}
///
/// Report a value-based annotation related to this sensor in this frame.
///
/// The AnnotationDefinition of this annotation.
/// The annotation data, which will be automatically converted to json.
/// The type of the values array.
/// Returns a handle to the reported annotation for reporting annotation-based metrics.
/// Thrown if this method is called during a frame where is false.
/// Thrown if the given AnnotationDefinition is invalid.
public Annotation ReportAnnotationValues(AnnotationDefinition annotationDefinition, T[] values)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationValues(annotationDefinition, this, values);
}
///
/// Creates an async annotation for reporting the values for an annotation during a future frame.
///
/// The AnnotationDefinition of this annotation.
/// Returns a handle to the , which can be used to report annotation data during a subsequent frame.
/// Thrown if this method is called during a frame where is false.
/// Thrown if the given AnnotationDefinition is invalid.
public AsyncAnnotation ReportAnnotationAsync(AnnotationDefinition annotationDefinition)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationAsync(annotationDefinition, this);
}
///
/// Report a sensor capture recorded to disk. This should be called on the same frame as the capture is taken, and may be called before the file is written to disk.
///
/// The path to the capture data.
/// Spatial data describing the sensor and the ego containing it.
/// Additional values to be emitted as json name/value pairs on the sensor object under the capture.
/// Thrown if ReportCapture is being called when ShouldCaptureThisFrame is false or it has already been called this frame.
public void ReportCapture(string filename, SensorSpatialData sensorSpatialData, params(string, object)[] additionalSensorValues)
{
if (!ShouldCaptureThisFrame)
{
throw new InvalidOperationException("Capture reported in frame when ShouldCaptureThisFrame is false.");
}
DatasetCapture.SimulationState.ReportCapture(this, filename, sensorSpatialData, additionalSensorValues);
}
///
/// Whether the sensor should capture this frame. Sensors are expected to call this method each frame to determine whether
/// they should capture during the frame. Captures should only be reported when this is true.
///
public bool ShouldCaptureThisFrame => DatasetCapture.SimulationState.ShouldCaptureThisFrame(this);
///
/// Requests a capture from this sensor on the next rendered frame. Can only be used with manual capture mode ( ).
///
public void RequestCapture()
{
DatasetCapture.SimulationState.SetNextCaptureTimeToNowForSensor(this);
}
///
/// Report a metric regarding this sensor in the current frame.
///
/// The of the metric.
/// An array to be converted to json and put in the "values" field of the metric
/// The value type
/// Thrown if values is null
/// Thrown if is false.
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] T[] values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, values, this, default);
}
///
/// Report a metric regarding this sensor in the current frame.
///
/// The of the metric.
/// A string-based JSON array to be placed in the "values" field of the metric
/// Thrown if values is null
/// Thrown if is false.
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] string valuesJsonArray)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), this, default);
}
///
/// Start an async metric for reporting metric values for this frame in a subsequent frame.
///
/// The of the metric
/// Thrown if is false
/// An which should be used to report the metric values, potentially in a later frame
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
return DatasetCapture.SimulationState.CreateAsyncMetric(metricDefinition, this);
}
///
/// Dispose this SensorHandle.
///
public void Dispose()
{
this.Enabled = false;
}
///
/// Returns whether this SensorHandle is valid in the current simulation. Nil SensorHandles are never valid.
///
public bool IsValid => DatasetCapture.IsValid(this.Id);
///
/// Returns true if this SensorHandle was default-instantiated.
///
public bool IsNil => this == default;
void CheckValid()
{
if (!DatasetCapture.IsValid(this.Id))
throw new InvalidOperationException("SensorHandle has been disposed or its simulation has ended");
}
///
public bool Equals(SensorHandle other)
{
return Id.Equals(other.Id);
}
///
public override bool Equals(object obj)
{
return obj is SensorHandle other && Equals(other);
}
///
public override int GetHashCode()
{
return Id.GetHashCode();
}
///
/// Compares two instances for equality.
///
/// The first SensorHandle.
/// The second SensorHandle.
/// Returns true if the two SensorHandles refer to the same sensor.
public static bool operator==(SensorHandle left, SensorHandle right)
{
return left.Equals(right);
}
///
/// Compares two instances for inequality.
///
/// The first SensorHandle.
/// The second SensorHandle.
/// Returns false if the two SensorHandles refer to the same sensor.
public static bool operator!=(SensorHandle left, SensorHandle right)
{
return !left.Equals(right);
}
}
///
/// Handle to a metric whose values may be reported in a subsequent frame.
///
public struct AsyncMetric
{
internal readonly int Id;
readonly SimulationState m_SimulationState;
internal AsyncMetric(MetricDefinition metricDefinition, int id, SimulationState simulationState)
{
this.Id = id;
MetricDefinition = metricDefinition;
m_SimulationState = simulationState;
}
///
/// The MetricDefinition associated with this AsyncMetric.
///
public readonly MetricDefinition MetricDefinition;
///
/// True if the simulation is still running.
///
public bool IsValid => !IsNil && m_SimulationState.IsRunning;
///
/// True if ReportValues has not been called yet.
///
public bool IsPending => !IsNil && m_SimulationState.IsPending(ref this);
///
/// Returns true if the AsyncMetric is its default value.
///
public bool IsNil => m_SimulationState == null && Id == default;
///
/// Report the values for this AsyncMetric. Calling this method will transition to false.
/// ReportValues may only be called once per AsyncMetric.
///
/// The values to report for the metric. These values will be converted to json.
/// The type of the values
/// Thrown if values is null
public void ReportValues(T[] values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncMetricResult(this, values: values);
}
///
/// Report the values for this AsyncMetric. Calling this method will transition to false.
/// ReportValues may only be called once per AsyncMetric.
///
/// A JSON array in string form.
/// Thrown if values is null
public void ReportValues(string valuesJsonArray)
{
if (valuesJsonArray == null)
throw new ArgumentNullException(nameof(valuesJsonArray));
m_SimulationState.ReportAsyncMetricResult(this, valuesJsonArray);
}
}
///
/// A handle to an async annotation, used to report values for an annotation after the frame for the annotation has past.
/// See
///
public struct AsyncAnnotation
{
internal AsyncAnnotation(Annotation annotation, SimulationState simulationState)
{
Annotation = annotation;
m_SimulationState = simulationState;
}
///
/// The annotation associated with this AsyncAnnotation. Can be used to report metrics on the annotation.
///
public readonly Annotation Annotation;
readonly SimulationState m_SimulationState;
///
/// True if the annotation is nil (was created using default instantiation)
///
internal bool IsNil => m_SimulationState == null && Annotation.IsNil;
///
/// True if the annotation is generated by the currently running simulation.
///
public bool IsValid => !IsNil && m_SimulationState.IsRunning;
///
/// True if neither nor have been called.
///
public bool IsPending => !IsNil && m_SimulationState.IsPending(Annotation);
///
/// Report a file-based data for this annotation.
///
/// The path to the file containing the annotation data.
/// Thrown if path is null
public void ReportFile(string path)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
m_SimulationState.ReportAsyncAnnotationResult(this, path);
}
///
/// Report file-based and value-based data for this annotation.
///
/// The path to the file containing the annotation data.
/// The annotation data.
/// The type of the data.
/// Thrown if path or values is null
public void ReportFileAndValues(string path, IEnumerable values)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, path, values);
}
///
/// Report a value-based data for this annotation.
///
/// The annotation data.
/// The type of the data.
/// Thrown if values is null
public void ReportValues(IEnumerable values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, values: values);
}
///
/// Report a value-based data for this annotation.
///
/// The annotation data.
/// The type of the data.
/// Thrown if values is null
public void ReportValues(NativeSlice values) where T : struct
{
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, values: values);
}
}
///
/// A handle to an annotation. Can be used to report metrics on the annotation.
///
public struct Annotation : IEquatable
{
///
/// The ID of the annotation which will be used in the json metadata.
///
public readonly Guid Id;
///
/// The step on which the annotation was reported.
///
public readonly int Step;
///
/// The SensorHandle on which the annotation was reported
///
public readonly SensorHandle SensorHandle;
internal Annotation(SensorHandle sensorHandle, int step)
{
Id = Guid.NewGuid();
SensorHandle = sensorHandle;
Step = step;
}
///
/// Returns true if the annotation is nil (created using default instantiation).
///
public bool IsNil => Id == Guid.Empty;
///
/// Reports a metric on this annotation. May only be called in the same frame as the annotation was reported.
///
///
///
///
/// Thrown if values is null
/// Thrown if reports false for .
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] T[] values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
if (!SensorHandle.ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, values, SensorHandle, this);
}
///
/// Reports a metric on this annotation. May only be called in the same frame as the annotation was reported.
///
///
/// A string-based JSON array to be placed in the "values" field of the metric
/// Thrown if values is null
/// Thrown if reports false for
/// .
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] string valuesJsonArray)
{
if (valuesJsonArray == null)
throw new ArgumentNullException(nameof(valuesJsonArray));
if (!SensorHandle.ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), SensorHandle, this);
}
///
/// Report a metric whose values will be supplied in a later frame.
///
/// The type of the metric.
/// A handle to an AsyncMetric, which can be used to report values for this metric in future frames.
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => DatasetCapture.SimulationState.CreateAsyncMetric(metricDefinition, SensorHandle, this);
///
public bool Equals(Annotation other)
{
return Id.Equals(other.Id);
}
///
public override bool Equals(object obj)
{
return obj is Annotation other && Equals(other);
}
///
public override int GetHashCode()
{
return Id.GetHashCode();
}
}
///
/// An ego, which is used to group multiple sensors under a single frame of reference.
///
public struct EgoHandle : IEquatable
{
///
/// The ID for this ego. This ID will be used to refer to this ego in the json metadata.
///
public readonly Guid Id;
///
/// A human-readable description of this ego.
///
public readonly string Description;
internal EgoHandle(Guid id, string description)
{
this.Id = id;
this.Description = description;
}
///
public bool Equals(EgoHandle other)
{
return Id.Equals(other.Id);
}
///
public override bool Equals(object obj)
{
return obj is EgoHandle other && Equals(other);
}
///
public override int GetHashCode()
{
return Id.GetHashCode();
}
///
/// Compares two instances for equality.
///
/// The first EgoHandle.
/// The second EgoHandle.
/// Returns true if the two EgoHandles refer to the same ego.
public static bool operator==(EgoHandle left, EgoHandle right)
{
return left.Equals(right);
}
///
/// Compares two instances for inequality.
///
/// The first EgoHandle.
/// The second EgoHandle.
/// Returns true if the two EgoHandles refer to the same ego.
public static bool operator!=(EgoHandle left, EgoHandle right)
{
return !left.Equals(right);
}
}
///
/// A metric type, used to define a kind of metric. .
///
public struct MetricDefinition : IEquatable
{
///
/// The ID of the metric
///
public readonly Guid Id;
internal MetricDefinition(Guid id)
{
Id = id;
}
///
public bool Equals(MetricDefinition other)
{
return Id.Equals(other.Id);
}
///
public override bool Equals(object obj)
{
return obj is MetricDefinition other && Equals(other);
}
///
public override int GetHashCode()
{
return Id.GetHashCode();
}
}
///
/// A metric type, used to define a kind of annotation. .
///
public struct AnnotationDefinition : IEquatable
{
///
public bool Equals(AnnotationDefinition other)
{
return Id.Equals(other.Id);
}
///
public override bool Equals(object obj)
{
return obj is AnnotationDefinition other && Equals(other);
}
///
public override int GetHashCode()
{
return Id.GetHashCode();
}
///
/// The ID of the annotation type. Used in the json metadata to associate anntations with the type.
///
public readonly Guid Id;
internal bool IsValid => DatasetCapture.IsValid(Id);
internal AnnotationDefinition(Guid id)
{
Id = id;
}
}
///
/// Container holding the poses of the ego and sensor. Also optionally contains the ego velocity and acceleration.
///
public struct SensorSpatialData
{
///
/// The pose of the ego.
///
public Pose EgoPose;
///
/// The pose of the sensor relative to the ego.
///
public Pose SensorPose;
///
/// The velocity of the ego (optional).
///
public Vector3? EgoVelocity;
///
/// The acceleration of the ego (optional).
///
public Vector3? EgoAcceleration;
///
/// Create a new SensorSpatialData with the given values.
///
/// The pose of the ego.
/// The pose of the sensor relative to the ego.
/// The velocity of the ego.
/// The acceleration of the ego.
public SensorSpatialData(Pose egoPose, Pose sensorPose, Vector3? egoVelocity, Vector3? egoAcceleration)
{
EgoPose = egoPose;
SensorPose = sensorPose;
EgoVelocity = egoVelocity;
EgoAcceleration = egoAcceleration;
}
///
/// Create a SensorSpatialData from two s, one representing the ego and the other representing the sensor.
///
/// The ego GameObject.
/// The sensor GameObject.
/// Returns a SensorSpatialData filled out with EgoPose and SensorPose based on the given objects.
public static SensorSpatialData FromGameObjects(GameObject ego, GameObject sensor)
{
ego = ego == null ? sensor : ego;
var egoRotation = ego.transform.rotation;
var egoPosition = ego.transform.position;
var sensorSpatialData = new SensorSpatialData()
{
EgoPose = new Pose(egoPosition, egoRotation),
SensorPose = new Pose(sensor.transform.position - egoPosition, sensor.transform.rotation * Quaternion.Inverse(egoRotation))
};
return sensorSpatialData;
}
}
}