Steve Borkman
3 年前
当前提交
5da96a7a
共有 40 个文件被更改,包括 1578 次插入 和 1463 次删除
-
8com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs
-
684com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs
-
4com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBox3DLabeler.cs
-
26com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBoxLabeler.cs
-
76com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs
-
4com.unity.perception/Runtime/GroundTruth/Labelers/KeypointLabeler.cs
-
95com.unity.perception/Runtime/GroundTruth/Labelers/ObjectCountLabeler.cs
-
6com.unity.perception/Runtime/GroundTruth/Labelers/RenderedObjectInfoLabeler.cs
-
4com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs
-
52com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
-
731com.unity.perception/Runtime/GroundTruth/SimulationState.cs
-
505com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs
-
5com.unity.perception/Runtime/Randomization/Scenarios/PerceptionScenario.cs
-
2com.unity.perception/Runtime/Randomization/Scenarios/UnitySimulationScenario.cs
-
4com.unity.perception/Tests/Runtime/GroundTruthTests/VisualizationTests.cs
-
2com.unity.perception/Runtime/GroundTruth/ConsumerEndpoint.cs.meta
-
5com.unity.perception/Runtime/GroundTruth/IMessageBuilder.cs
-
2com.unity.perception/Runtime/GroundTruth/IMessageBuilder.cs.meta
-
17com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionConsumer.cs
-
17com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionJsonFactory.cs
-
7com.unity.perception/Runtime/GroundTruth/Consumers/SoloMessageBuilder.cs
-
33com.unity.perception/Runtime/GroundTruth/ConsumerEndpoint.cs
-
8com.unity.perception/Runtime/GroundTruth/Consumers.meta
-
421com.unity.perception/Runtime/GroundTruth/DataModel.cs
-
11com.unity.perception/Runtime/GroundTruth/DataModel.cs.meta
-
296com.unity.perception/Runtime/GroundTruth/Consumers/SoloConsumer.cs
-
8com.unity.perception/Runtime/GroundTruth/Exporters.meta
-
8com.unity.perception/Runtime/GroundTruth/SoloDesign.meta
-
0/com.unity.perception/Runtime/GroundTruth/ConsumerEndpoint.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/SoloConsumer.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/IMessageBuilder.cs
-
0/com.unity.perception/Runtime/GroundTruth/IMessageBuilder.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionConsumer.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionJsonFactory.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/SoloMessageBuilder.cs.meta
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionConsumer.cs
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/OldPerceptionJsonFactory.cs
-
0/com.unity.perception/Runtime/GroundTruth/Consumers/SoloMessageBuilder.cs
|
|||
using UnityEngine.Perception.GroundTruth.DataModel; |
|||
|
|||
namespace UnityEngine.Perception.GroundTruth |
|||
{ |
|||
public abstract class ConsumerEndpoint : MonoBehaviour |
|||
{ |
|||
/// <summary>
|
|||
/// Called when the simulation begins. Provides simulation wide metadata to
|
|||
/// the consumer.
|
|||
/// </summary>
|
|||
/// <param name="metadata">Metadata describing the active simulation</param>
|
|||
public abstract void OnSimulationStarted(SimulationMetadata metadata); |
|||
|
|||
public virtual void OnSensorRegistered(SensorDefinition sensor) { } |
|||
|
|||
public virtual void OnAnnotationRegistered(AnnotationDefinition annotationDefinition) { } |
|||
public virtual void OnMetricRegistered(MetricDefinition metricDefinition) { } |
|||
|
|||
/// <summary>
|
|||
/// Called at the end of each frame. Contains all of the generated data for the
|
|||
/// frame. This method is called after the frame has entirely finished processing.
|
|||
/// </summary>
|
|||
/// <param name="frame">The frame data.</param>
|
|||
public abstract void OnFrameGenerated(Frame frame); |
|||
|
|||
/// <summary>
|
|||
/// Called at the end of the simulation. Contains metadata describing the entire
|
|||
/// simulation process.
|
|||
/// </summary>
|
|||
/// <param name="metadata">Metadata describing the entire simulation process</param>
|
|||
public abstract void OnSimulationCompleted(CompletionMetadata metadata); |
|||
} |
|||
} |
|
|||
fileFormatVersion: 2 |
|||
guid: e818e37774a99724e8f5cfbeeba26b22 |
|||
folderAsset: yes |
|||
DefaultImporter: |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
using System; |
|||
using System.Collections.Generic; |
|||
using UnityEngine.Perception.GroundTruth.Exporters.Solo; |
|||
|
|||
namespace UnityEngine.Perception.GroundTruth.DataModel |
|||
{ |
|||
public interface IMessageProducer |
|||
{ |
|||
void ToMessage(IMessageBuilder builder); |
|||
} |
|||
|
|||
[Serializable] |
|||
public class SensorDefinition : IMessageProducer |
|||
{ |
|||
public SensorDefinition(string id, string modality, string definition) |
|||
{ |
|||
this.id = id; |
|||
this.modality = modality; |
|||
this.definition = definition; |
|||
this.firstCaptureFrame = 0; |
|||
this.captureTriggerMode = string.Empty; |
|||
this.simulationDeltaTime = 0.0f; |
|||
this.framesBetweenCaptures = 0; |
|||
this.manualSensorsAffectTiming = false; |
|||
} |
|||
|
|||
public virtual bool IsValid() |
|||
{ |
|||
return id != string.Empty && definition != string.Empty; |
|||
} |
|||
|
|||
public string id; |
|||
public string modality; |
|||
public string definition; |
|||
public float firstCaptureFrame; |
|||
public string captureTriggerMode; |
|||
public float simulationDeltaTime; |
|||
public int framesBetweenCaptures; |
|||
public bool manualSensorsAffectTiming; |
|||
|
|||
public void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", id); |
|||
builder.AddString("modality", modality); |
|||
builder.AddString("definition", definition); |
|||
builder.AddFloat("first_capture_frame", firstCaptureFrame); |
|||
builder.AddString("capture_trigger_mode", captureTriggerMode); |
|||
builder.AddFloat("simulation_delta_time", simulationDeltaTime); |
|||
builder.AddInt("frames_between_captures", framesBetweenCaptures); |
|||
builder.AddBoolean("manual_sensors_affect_timing", manualSensorsAffectTiming); |
|||
} |
|||
} |
|||
|
|||
[Serializable] |
|||
public abstract class AnnotationDefinition : IMessageProducer |
|||
{ |
|||
public string id = string.Empty; |
|||
public string description = string.Empty; |
|||
public string annotationType = string.Empty; |
|||
|
|||
public AnnotationDefinition() { } |
|||
|
|||
public AnnotationDefinition(string id, string description, string annotationType) |
|||
{ |
|||
this.id = id; |
|||
this.description = description; |
|||
this.annotationType = annotationType; |
|||
} |
|||
|
|||
public virtual bool IsValid() |
|||
{ |
|||
return id != string.Empty && description != string.Empty && annotationType != string.Empty; |
|||
} |
|||
|
|||
public virtual void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", id); |
|||
builder.AddString("description", description); |
|||
builder.AddString("annotation_type", annotationType); |
|||
} |
|||
} |
|||
|
|||
[Serializable] |
|||
public class MetricDefinition : IMessageProducer |
|||
{ |
|||
public string id = string.Empty; |
|||
public string description = string.Empty; |
|||
bool isRegistered { get; set; }= false; |
|||
|
|||
public MetricDefinition() { } |
|||
|
|||
public MetricDefinition(string id, string description) |
|||
{ |
|||
this.id = id; |
|||
this.description = description; |
|||
} |
|||
|
|||
public virtual bool IsValid() |
|||
{ |
|||
return id != string.Empty && description != string.Empty; |
|||
} |
|||
|
|||
public virtual void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", id); |
|||
builder.AddString("description", description); |
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// The top level structure that holds all of the artifacts of a simulation
|
|||
/// frame. This is only reported after all of the captures, annotations, and
|
|||
/// metrics are ready to report for a single frame.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public class Frame : IMessageProducer |
|||
{ |
|||
public Frame(int frame, int sequence, int step) |
|||
{ |
|||
this.frame = frame; |
|||
this.sequence = sequence; |
|||
this.step = step; |
|||
sensors = new List<Sensor>(); |
|||
|
|||
} |
|||
|
|||
/// <summary>
|
|||
/// The perception frame number of this record
|
|||
/// </summary>
|
|||
public int frame; |
|||
/// <summary>
|
|||
/// The sequence that this record is a part of
|
|||
/// </summary>
|
|||
public int sequence; |
|||
/// <summary>
|
|||
/// The step in the sequence that this record is a part of
|
|||
/// </summary>
|
|||
public int step; |
|||
|
|||
public float timestamp; |
|||
|
|||
/// <summary>
|
|||
/// A list of all of the sensor captures recorded for the frame.
|
|||
/// </summary>
|
|||
public IEnumerable<Sensor> sensors; |
|||
|
|||
|
|||
public void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddInt("frame", frame); |
|||
builder.AddInt("sequence", sequence); |
|||
builder.AddInt("step", step); |
|||
foreach (var s in sensors) |
|||
{ |
|||
var nested = builder.AddNestedMessageToVector("sensors"); |
|||
s.ToMessage(nested); |
|||
} |
|||
|
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Abstract sensor class that holds all of the common information for a sensor.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public abstract class Sensor : IMessageProducer |
|||
{ |
|||
/// <summary>
|
|||
/// The unique, human readable ID for the sensor.
|
|||
/// </summary>
|
|||
public string Id; |
|||
/// <summary>
|
|||
/// The type of the sensor.
|
|||
/// </summary>
|
|||
public string sensorType; |
|||
|
|||
public string description; |
|||
|
|||
/// <summary>
|
|||
/// The position (xyz) of the sensor in the world.
|
|||
/// </summary>
|
|||
public Vector3 position; |
|||
/// <summary>
|
|||
/// The rotation in euler angles.
|
|||
/// </summary>
|
|||
public Vector3 rotation; |
|||
/// <summary>
|
|||
/// The current velocity (xyz) of the sensor.
|
|||
/// </summary>
|
|||
public Vector3 velocity; |
|||
/// <summary>
|
|||
/// The current acceleration (xyz) of the sensor.
|
|||
/// </summary>
|
|||
public Vector3 acceleration; |
|||
|
|||
// TODO put in camera intrinsic
|
|||
// TODO put in projection
|
|||
|
|||
/// <summary>
|
|||
/// A list of all of the annotations recorded recorded for the frame.
|
|||
/// </summary>
|
|||
public IEnumerable<Annotation> annotations = new List<Annotation>(); |
|||
|
|||
/// <summary>
|
|||
/// A list of all of the metrics recorded recorded for the frame.
|
|||
/// </summary>
|
|||
public IEnumerable<Metric> metrics = new List<Metric>(); |
|||
|
|||
|
|||
public virtual void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", Id); |
|||
builder.AddString("sensor_id", sensorType); |
|||
builder.AddFloatVector("position", Utils.ToFloatVector(position)); |
|||
builder.AddFloatVector("rotation", Utils.ToFloatVector(rotation)); |
|||
builder.AddFloatVector("velocity", Utils.ToFloatVector(velocity)); |
|||
builder.AddFloatVector("acceleration", Utils.ToFloatVector(acceleration)); |
|||
|
|||
foreach (var annotation in annotations) |
|||
{ |
|||
var nested = builder.AddNestedMessageToVector("annotations"); |
|||
annotation.ToMessage(nested); |
|||
} |
|||
|
|||
foreach (var metric in metrics) |
|||
{ |
|||
var nested = builder.AddNestedMessageToVector("metrics"); |
|||
metric.ToMessage(nested); |
|||
} |
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// The concrete class for an RGB sensor.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public class RgbSensor : Sensor |
|||
{ |
|||
// The format of the image type
|
|||
public string imageFormat; |
|||
|
|||
// The dimensions (width, height) of the image
|
|||
public Vector2 dimension; |
|||
|
|||
// The raw bytes of the image file
|
|||
public byte[] buffer; |
|||
|
|||
public override void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
base.ToMessage(builder); |
|||
builder.AddString("image_format", imageFormat); |
|||
builder.AddFloatVector("dimension", Utils.ToFloatVector(dimension)); |
|||
builder.AddPngImage("camera", buffer); |
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Abstract class that holds the common data found in all
|
|||
/// annotations. Concrete instances of this class will add
|
|||
/// data for their specific annotation type.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public abstract class Annotation : IMessageProducer |
|||
{ |
|||
/// <summary>
|
|||
/// The unique, human readable ID for the annotation.
|
|||
/// </summary>
|
|||
public string Id; |
|||
/// <summary>
|
|||
/// The sensor that this annotation is associated with.
|
|||
/// </summary>
|
|||
public string sensorId; |
|||
/// <summary>
|
|||
/// The description of the annotation.
|
|||
/// </summary>
|
|||
public string description; |
|||
/// <summary>
|
|||
/// The type of the annotation, this will map directly to one of the
|
|||
/// annotation subclasses that are concrete implementations of this abstract
|
|||
/// class.
|
|||
/// </summary>
|
|||
public string annotationType; |
|||
|
|||
public virtual void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", Id); |
|||
builder.AddString("sensor_id", sensorId); |
|||
builder.AddString("description", description); |
|||
builder.AddString("annotation_type", annotationType); |
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Abstract class that holds the common data found in all
|
|||
/// metrics. Concrete instances of this class will add
|
|||
/// data for their specific metric type.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public abstract class Metric : IMessageProducer |
|||
{ |
|||
public string Id; |
|||
/// <summary>
|
|||
/// The sensor ID that this metric is associated with
|
|||
/// </summary>
|
|||
public string sensorId; |
|||
/// <summary>
|
|||
/// The annotation ID that this metric is associated with. If the value is none ("")
|
|||
/// then the metric is capture wide, and not associated with a specific annotation.
|
|||
/// </summary>
|
|||
public string annotationId; |
|||
/// <summary>
|
|||
/// A human readable description of what this metric is for.
|
|||
/// </summary>
|
|||
public string description; |
|||
/// <summary>
|
|||
/// Additional key/value pair metadata that can be associated with
|
|||
/// any metric.
|
|||
/// </summary>
|
|||
public Dictionary<string, object> metadata; |
|||
public virtual void ToMessage(IMessageBuilder builder) |
|||
{ |
|||
builder.AddString("id", Id); |
|||
builder.AddString("sensor_id", sensorId); |
|||
builder.AddString("annotation_id", annotationId); |
|||
builder.AddString("description", description); |
|||
|
|||
} |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Metadata describing the simulation.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public class SimulationMetadata |
|||
{ |
|||
public SimulationMetadata() |
|||
{ |
|||
unityVersion = "figure out how to do unity version"; |
|||
perceptionVersion = "0.8.0-preview.4"; |
|||
#if HDRP_PRESENT
|
|||
renderPipeline = "HDRP"; |
|||
#elif URP_PRESENT
|
|||
renderPipeline = "URP"; |
|||
#else
|
|||
renderPipeline = "built-in"; |
|||
#endif
|
|||
metadata = new Dictionary<string, object>(); |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// The version of the Unity editor executing the simulation.
|
|||
/// </summary>
|
|||
public string unityVersion; |
|||
/// <summary>
|
|||
/// The version of the perception package used to generate the data.
|
|||
/// </summary>
|
|||
public string perceptionVersion; |
|||
/// <summary>
|
|||
/// The render pipeline used to create the data. Currently either URP or HDRP.
|
|||
/// </summary>
|
|||
public string renderPipeline; |
|||
/// <summary>
|
|||
/// Additional key/value pair metadata that can be associated with
|
|||
/// the simulation.
|
|||
/// </summary>
|
|||
public Dictionary<string, object> metadata; |
|||
|
|||
// We could probably list all of the randomizers here...
|
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Metadata describing the final metrics of the simulation.
|
|||
/// </summary>
|
|||
[Serializable] |
|||
public class CompletionMetadata : SimulationMetadata |
|||
{ |
|||
public CompletionMetadata() |
|||
: base() { } |
|||
|
|||
public struct Sequence |
|||
{ |
|||
/// <summary>
|
|||
/// The ID of the sequence
|
|||
/// </summary>
|
|||
public int id; |
|||
/// <summary>
|
|||
/// The number of steps in the sequence.
|
|||
/// </summary>
|
|||
public int numberOfSteps; |
|||
} |
|||
|
|||
/// <summary>
|
|||
/// Total frames processed in the simulation. These frames are distributed
|
|||
/// over sequence and steps.
|
|||
/// </summary>
|
|||
public int totalFrames; |
|||
/// <summary>
|
|||
/// A list of all of the sequences and the number of steps in the sequence for
|
|||
/// a simulation.
|
|||
/// </summary>
|
|||
public List<Sequence> sequences; |
|||
} |
|||
|
|||
static class Utils |
|||
{ |
|||
internal static int[] ToIntVector(Color32 c) |
|||
{ |
|||
return new[] { (int)c.r, (int)c.g, (int)c.b, (int)c.a }; |
|||
} |
|||
|
|||
internal static float[] ToFloatVector(Vector2 v) |
|||
{ |
|||
return new[] { v.x, v.y }; |
|||
} |
|||
|
|||
internal static float[] ToFloatVector(Vector3 v) |
|||
{ |
|||
return new[] { v.x, v.y, v.z }; |
|||
} |
|||
} |
|||
} |
|
|||
fileFormatVersion: 2 |
|||
guid: 6f209872135679b48b67c5cbf3edacd5 |
|||
MonoImporter: |
|||
externalObjects: {} |
|||
serializedVersion: 2 |
|||
defaultReferences: [] |
|||
executionOrder: 0 |
|||
icon: {instanceID: 0} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
using System; |
|||
using System.Globalization; |
|||
using System.IO; |
|||
using System.Text; |
|||
using Newtonsoft.Json; |
|||
using Newtonsoft.Json.Linq; |
|||
using UnityEngine.Perception.GroundTruth.DataModel; |
|||
|
|||
namespace UnityEngine.Perception.GroundTruth.Consumers |
|||
{ |
|||
public class SoloConsumer : ConsumerEndpoint |
|||
{ |
|||
public string _baseDirectory = "D:/PerceptionOutput/SoloConsumer"; |
|||
public string soloDatasetName = "solo"; |
|||
static string currentDirectory = ""; |
|||
|
|||
SimulationMetadata m_CurrentMetadata; |
|||
|
|||
void Start() |
|||
{ |
|||
// Only here to get the check mark to show up in Unity Editor
|
|||
} |
|||
|
|||
public override void OnSimulationStarted(SimulationMetadata metadata) |
|||
{ |
|||
Debug.Log("SC - On Simulation Started"); |
|||
m_CurrentMetadata = metadata; |
|||
|
|||
var i = 0; |
|||
while (true) |
|||
{ |
|||
var n = $"{soloDatasetName}_{i++}"; |
|||
n = Path.Combine(_baseDirectory, n); |
|||
if (!Directory.Exists(n)) |
|||
{ |
|||
Directory.CreateDirectory(n); |
|||
currentDirectory = n; |
|||
break; |
|||
} |
|||
} |
|||
} |
|||
|
|||
static string GetSequenceDirectoryPath(Frame frame) |
|||
{ |
|||
var path = $"sequence.{frame.sequence}"; |
|||
|
|||
// verify that a directory already exists for a sequence,
|
|||
// if not, create it.
|
|||
path = Path.Combine(currentDirectory, path); |
|||
|
|||
if (!Directory.Exists(path)) |
|||
{ |
|||
Directory.CreateDirectory(path); |
|||
} |
|||
|
|||
return path; |
|||
} |
|||
|
|||
void WriteJTokenToFile(string filePath, JToken jToken) |
|||
{ |
|||
var stringWriter = new StringWriter(new StringBuilder(256), CultureInfo.InvariantCulture); |
|||
using (var jsonTextWriter = new JsonTextWriter(stringWriter)) |
|||
{ |
|||
jsonTextWriter.Formatting = Formatting.Indented; |
|||
jToken.WriteTo(jsonTextWriter); |
|||
} |
|||
|
|||
var contents = stringWriter.ToString(); |
|||
|
|||
File.WriteAllText(filePath, contents); |
|||
} |
|||
|
|||
public override void OnFrameGenerated(Frame frame) |
|||
{ |
|||
var path = GetSequenceDirectoryPath(frame); |
|||
path = Path.Combine(path, $"step{frame.step}.frame_data.json"); |
|||
|
|||
WriteJTokenToFile(path, ToFrame(frame)); |
|||
|
|||
Debug.Log("SC - On Frame Generated"); |
|||
} |
|||
|
|||
public override void OnSimulationCompleted(CompletionMetadata metadata) |
|||
{ |
|||
Debug.Log("SC - On Simulation Completed"); |
|||
} |
|||
|
|||
static JToken ToFrame(Frame frame) |
|||
{ |
|||
var frameJson = new JObject |
|||
{ |
|||
["frame"] = frame.frame, |
|||
["sequence"] = frame.sequence, |
|||
["step"] = frame.step |
|||
}; |
|||
|
|||
var captures = new JArray(); |
|||
|
|||
|
|||
foreach (var sensor in frame.sensors) |
|||
{ |
|||
switch (sensor) |
|||
{ |
|||
case RgbSensor rgb: |
|||
captures.Add(ConvertSensor(frame, rgb)); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
|
|||
|
|||
frameJson["captures"] = captures; |
|||
|
|||
|
|||
return frameJson; |
|||
} |
|||
|
|||
static JArray FromVector3(Vector3 vector3) |
|||
{ |
|||
return new JArray |
|||
{ |
|||
vector3.x, vector3.y, vector3.z |
|||
}; |
|||
} |
|||
|
|||
static JArray FromVector2(Vector2 vector2) |
|||
{ |
|||
return new JArray |
|||
{ |
|||
vector2.x, vector2.y |
|||
}; |
|||
} |
|||
|
|||
static JArray FromColor32(Color32 color) |
|||
{ |
|||
return new JArray |
|||
{ |
|||
color.r, color.g, color.b, color.a |
|||
}; |
|||
} |
|||
|
|||
static JToken ToSensorHeader(Frame frame, Sensor sensor) |
|||
{ |
|||
var token = new JObject |
|||
{ |
|||
["Id"] = sensor.Id, |
|||
["sensorType"] = sensor.sensorType, |
|||
["position"] = FromVector3(sensor.position), |
|||
["rotation"] = FromVector3(sensor.rotation), |
|||
["velocity"] = FromVector3(sensor.velocity), |
|||
["acceleration"] = FromVector3(sensor.acceleration) |
|||
}; |
|||
return token; |
|||
} |
|||
|
|||
|
|||
static JToken ConvertSensor(Frame frame, RgbSensor sensor) |
|||
{ |
|||
// write out the png data
|
|||
var path = GetSequenceDirectoryPath(frame); |
|||
|
|||
path = Path.Combine(path, $"step{frame.step}.{sensor.sensorType}.{sensor.imageFormat}"); |
|||
var file = File.Create(path, 4096); |
|||
file.Write(sensor.buffer, 0, sensor.buffer.Length); |
|||
file.Close(); |
|||
|
|||
var outRgb = ToSensorHeader(frame, sensor); |
|||
outRgb["fileName"] = path; |
|||
outRgb["imageFormat"] = sensor.imageFormat; |
|||
outRgb["dimension"] = FromVector2(sensor.dimension); |
|||
|
|||
var annotations = new JArray(); |
|||
var metrics = new JArray(); |
|||
|
|||
foreach (var annotation in sensor.annotations) |
|||
{ |
|||
switch (annotation) |
|||
{ |
|||
case BoundingBox2DLabeler.BoundingBoxAnnotation bbox: |
|||
annotations.Add(ConvertAnnotation(frame, bbox)); |
|||
break; |
|||
case InstanceSegmentationLabeler.InstanceSegmentation seg: |
|||
annotations.Add(ConvertAnnotation(frame, seg)); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
foreach (var metric in sensor.metrics) |
|||
{ |
|||
switch (metric) |
|||
{ |
|||
case ObjectCountLabeler.ObjectCountMetric objCount: |
|||
metrics.Add(ConvertMetric(frame, objCount)); |
|||
break; |
|||
} |
|||
} |
|||
|
|||
outRgb["annotations"] = annotations; |
|||
outRgb["metrics"] = metrics; |
|||
|
|||
return outRgb; |
|||
} |
|||
|
|||
static JToken ToAnnotationHeader(Frame frame, Annotation annotation) |
|||
{ |
|||
return new JObject |
|||
{ |
|||
["Id"] = annotation.Id, |
|||
["definition"] = annotation.description, |
|||
["sequence"] = frame.sequence, |
|||
["step"] = frame.step, |
|||
["sensor"] = annotation.sensorId |
|||
}; |
|||
} |
|||
|
|||
static JToken ToMetricHeader(Frame frame, Metric metric) |
|||
{ |
|||
return new JObject |
|||
{ |
|||
["sensorId"] = metric.sensorId, |
|||
["annotationId"] = metric.annotationId, |
|||
["description"] = metric.description |
|||
}; |
|||
} |
|||
|
|||
static JToken ConvertAnnotation(Frame frame, BoundingBox2DLabeler.BoundingBoxAnnotation bbox) |
|||
{ |
|||
var outBox = ToAnnotationHeader(frame, bbox); |
|||
var values = new JArray(); |
|||
|
|||
foreach (var box in bbox.boxes) |
|||
{ |
|||
values.Add(new JObject |
|||
{ |
|||
["frame"] = frame.frame, |
|||
["label_name"] = box.labelName, |
|||
["instance_id"] = box.instanceId, |
|||
["origin"] = FromVector2(box.origin), |
|||
["dimension"] = FromVector2(box.dimension) |
|||
}); |
|||
} |
|||
|
|||
outBox["values"] = values; |
|||
|
|||
return outBox; |
|||
} |
|||
|
|||
static JToken ConvertMetric(Frame frame, ObjectCountLabeler.ObjectCountMetric count) |
|||
{ |
|||
var outCount = ToMetricHeader(frame, count); |
|||
var values = new JArray(); |
|||
|
|||
foreach (var i in count.objectCounts) |
|||
{ |
|||
values.Add(new JObject |
|||
{ |
|||
["label_name"] = i.labelName, |
|||
["count"] = i.count |
|||
}); |
|||
} |
|||
|
|||
outCount["object_counts"] = values; |
|||
return outCount; |
|||
} |
|||
|
|||
static JToken ConvertAnnotation(Frame frame, InstanceSegmentationLabeler.InstanceSegmentation segmentation) |
|||
{ |
|||
// write out the png data
|
|||
var path = GetSequenceDirectoryPath(frame); |
|||
|
|||
path = Path.Combine(path,$"step{frame.step}.segmentation.{segmentation.imageFormat}"); |
|||
var file = File.Create(path, 4096); |
|||
file.Write(segmentation.buffer, 0, segmentation.buffer.Length); |
|||
file.Close(); |
|||
|
|||
var outSeg = ToAnnotationHeader(frame, segmentation); |
|||
var values = new JArray(); |
|||
|
|||
foreach (var i in segmentation.instances) |
|||
{ |
|||
values.Add(new JObject |
|||
{ |
|||
["instance_id"] = i.instanceId, |
|||
["rgba"] = FromColor32(i.rgba) |
|||
}); |
|||
} |
|||
|
|||
outSeg["imageFormat"] = segmentation.imageFormat; |
|||
outSeg["dimension"] = FromVector2(segmentation.dimension); |
|||
outSeg["imagePath"] = path; |
|||
outSeg["instances"] = values; |
|||
|
|||
return outSeg; |
|||
} |
|||
} |
|||
} |
|
|||
fileFormatVersion: 2 |
|||
guid: 5353392e887128948bc94b5e0dd9ff73 |
|||
folderAsset: yes |
|||
DefaultImporter: |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: 3dbded9ae1438d344bc799a283ac4ea2 |
|||
folderAsset: yes |
|||
DefaultImporter: |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
撰写
预览
正在加载...
取消
保存
Reference in new issue