using System;
using System.Collections.Generic;
using UnityEngine.Perception.GroundTruth.Exporters.Solo;
namespace UnityEngine.Perception.GroundTruth.DataModel
{
public interface IMessageProducer
{
void ToMessage(IMessageBuilder builder);
}
[Serializable]
public class SensorDefinition : IMessageProducer
{
public SensorDefinition(string id, string modality, string definition)
{
this.id = id;
this.modality = modality;
this.definition = definition;
this.firstCaptureFrame = 0;
this.captureTriggerMode = string.Empty;
this.simulationDeltaTime = 0.0f;
this.framesBetweenCaptures = 0;
this.manualSensorsAffectTiming = false;
}
public virtual bool IsValid()
{
return id != string.Empty && definition != string.Empty;
}
public string id;
public string modality;
public string definition;
public float firstCaptureFrame;
public string captureTriggerMode;
public float simulationDeltaTime;
public int framesBetweenCaptures;
public bool manualSensorsAffectTiming;
public void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", id);
builder.AddString("modality", modality);
builder.AddString("definition", definition);
builder.AddFloat("first_capture_frame", firstCaptureFrame);
builder.AddString("capture_trigger_mode", captureTriggerMode);
builder.AddFloat("simulation_delta_time", simulationDeltaTime);
builder.AddInt("frames_between_captures", framesBetweenCaptures);
builder.AddBoolean("manual_sensors_affect_timing", manualSensorsAffectTiming);
}
}
[Serializable]
public abstract class AnnotationDefinition : IMessageProducer
{
public string id = string.Empty;
public string description = string.Empty;
public string annotationType = string.Empty;
public AnnotationDefinition() { }
public AnnotationDefinition(string id, string description, string annotationType)
{
this.id = id;
this.description = description;
this.annotationType = annotationType;
}
public virtual bool IsValid()
{
return id != string.Empty && description != string.Empty && annotationType != string.Empty;
}
public virtual void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", id);
builder.AddString("description", description);
builder.AddString("annotation_type", annotationType);
}
}
[Serializable]
public class MetricDefinition : IMessageProducer
{
public string id = string.Empty;
public string description = string.Empty;
bool isRegistered { get; set; }= false;
public MetricDefinition() { }
public MetricDefinition(string id, string description)
{
this.id = id;
this.description = description;
}
public virtual bool IsValid()
{
return id != string.Empty && description != string.Empty;
}
public virtual void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", id);
builder.AddString("description", description);
}
}
///
/// The top level structure that holds all of the artifacts of a simulation
/// frame. This is only reported after all of the captures, annotations, and
/// metrics are ready to report for a single frame.
///
[Serializable]
public class Frame : IMessageProducer
{
public Frame(int frame, int sequence, int step)
{
this.frame = frame;
this.sequence = sequence;
this.step = step;
sensors = new List();
}
///
/// The perception frame number of this record
///
public int frame;
///
/// The sequence that this record is a part of
///
public int sequence;
///
/// The step in the sequence that this record is a part of
///
public int step;
public float timestamp;
///
/// A list of all of the sensor captures recorded for the frame.
///
public IEnumerable sensors;
public void ToMessage(IMessageBuilder builder)
{
builder.AddInt("frame", frame);
builder.AddInt("sequence", sequence);
builder.AddInt("step", step);
foreach (var s in sensors)
{
var nested = builder.AddNestedMessageToVector("sensors");
s.ToMessage(nested);
}
}
}
///
/// Abstract sensor class that holds all of the common information for a sensor.
///
[Serializable]
public abstract class Sensor : IMessageProducer
{
///
/// The unique, human readable ID for the sensor.
///
public string Id;
///
/// The type of the sensor.
///
public string sensorType;
public string description;
///
/// The position (xyz) of the sensor in the world.
///
public Vector3 position;
///
/// The rotation in euler angles.
///
public Vector3 rotation;
///
/// The current velocity (xyz) of the sensor.
///
public Vector3 velocity;
///
/// The current acceleration (xyz) of the sensor.
///
public Vector3 acceleration;
// TODO put in camera intrinsic
// TODO put in projection
///
/// A list of all of the annotations recorded recorded for the frame.
///
public IEnumerable annotations = new List();
///
/// A list of all of the metrics recorded recorded for the frame.
///
public IEnumerable metrics = new List();
public virtual void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", Id);
builder.AddString("sensor_id", sensorType);
builder.AddFloatVector("position", Utils.ToFloatVector(position));
builder.AddFloatVector("rotation", Utils.ToFloatVector(rotation));
builder.AddFloatVector("velocity", Utils.ToFloatVector(velocity));
builder.AddFloatVector("acceleration", Utils.ToFloatVector(acceleration));
foreach (var annotation in annotations)
{
var nested = builder.AddNestedMessageToVector("annotations");
annotation.ToMessage(nested);
}
foreach (var metric in metrics)
{
var nested = builder.AddNestedMessageToVector("metrics");
metric.ToMessage(nested);
}
}
}
///
/// The concrete class for an RGB sensor.
///
[Serializable]
public class RgbSensor : Sensor
{
// The format of the image type
public string imageFormat;
// The dimensions (width, height) of the image
public Vector2 dimension;
// The raw bytes of the image file
public byte[] buffer;
public override void ToMessage(IMessageBuilder builder)
{
base.ToMessage(builder);
builder.AddString("image_format", imageFormat);
builder.AddFloatVector("dimension", Utils.ToFloatVector(dimension));
builder.AddPngImage("camera", buffer);
}
}
///
/// Abstract class that holds the common data found in all
/// annotations. Concrete instances of this class will add
/// data for their specific annotation type.
///
[Serializable]
public abstract class Annotation : IMessageProducer
{
///
/// The unique, human readable ID for the annotation.
///
public string Id;
///
/// The sensor that this annotation is associated with.
///
public string sensorId;
///
/// The description of the annotation.
///
public string description;
///
/// The type of the annotation, this will map directly to one of the
/// annotation subclasses that are concrete implementations of this abstract
/// class.
///
public string annotationType;
public virtual void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", Id);
builder.AddString("sensor_id", sensorId);
builder.AddString("description", description);
builder.AddString("annotation_type", annotationType);
}
}
///
/// Abstract class that holds the common data found in all
/// metrics. Concrete instances of this class will add
/// data for their specific metric type.
///
[Serializable]
public abstract class Metric : IMessageProducer
{
public string Id;
///
/// The sensor ID that this metric is associated with
///
public string sensorId;
///
/// The annotation ID that this metric is associated with. If the value is none ("")
/// then the metric is capture wide, and not associated with a specific annotation.
///
public string annotationId;
///
/// A human readable description of what this metric is for.
///
public string description;
///
/// Additional key/value pair metadata that can be associated with
/// any metric.
///
public Dictionary metadata;
public virtual void ToMessage(IMessageBuilder builder)
{
builder.AddString("id", Id);
builder.AddString("sensor_id", sensorId);
builder.AddString("annotation_id", annotationId);
builder.AddString("description", description);
}
}
///
/// Metadata describing the simulation.
///
[Serializable]
public class SimulationMetadata
{
public SimulationMetadata()
{
unityVersion = "figure out how to do unity version";
perceptionVersion = "0.8.0-preview.4";
#if HDRP_PRESENT
renderPipeline = "HDRP";
#elif URP_PRESENT
renderPipeline = "URP";
#else
renderPipeline = "built-in";
#endif
metadata = new Dictionary();
}
///
/// The version of the Unity editor executing the simulation.
///
public string unityVersion;
///
/// The version of the perception package used to generate the data.
///
public string perceptionVersion;
///
/// The render pipeline used to create the data. Currently either URP or HDRP.
///
public string renderPipeline;
///
/// Additional key/value pair metadata that can be associated with
/// the simulation.
///
public Dictionary metadata;
// We could probably list all of the randomizers here...
}
///
/// Metadata describing the final metrics of the simulation.
///
[Serializable]
public class CompletionMetadata : SimulationMetadata
{
public CompletionMetadata()
: base() { }
public struct Sequence
{
///
/// The ID of the sequence
///
public int id;
///
/// The number of steps in the sequence.
///
public int numberOfSteps;
}
///
/// Total frames processed in the simulation. These frames are distributed
/// over sequence and steps.
///
public int totalFrames;
///
/// A list of all of the sequences and the number of steps in the sequence for
/// a simulation.
///
public List sequences;
}
static class Utils
{
internal static int[] ToIntVector(Color32 c)
{
return new[] { (int)c.r, (int)c.g, (int)c.b, (int)c.a };
}
internal static float[] ToFloatVector(Vector2 v)
{
return new[] { v.x, v.y };
}
internal static float[] ToFloatVector(Vector3 v)
{
return new[] { v.x, v.y, v.z };
}
}
}