浏览代码

Updates for solo proof of concept

/solo_support
Steve Borkman 3 年前
当前提交
f7ceca90
共有 9 个文件被更改,包括 585 次插入77 次删除
  1. 4
      com.unity.perception/Runtime/GroundTruth/Exporters/PerceptionNew/PerceptionNewExporter.cs
  2. 4
      com.unity.perception/Runtime/GroundTruth/Exporters/SOLO/SoloExporter.cs
  3. 56
      com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs
  4. 2
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  5. 87
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  6. 209
      com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs
  7. 41
      com.unity.perception/Runtime/GroundTruth/SoloDesign/Frame.cs
  8. 248
      com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs
  9. 11
      com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs.meta

4
com.unity.perception/Runtime/GroundTruth/Exporters/PerceptionNew/PerceptionNewExporter.cs


filename = $"step.{step}.annotation.{id}.camera.json";
return true;
}
#if false
if (rawData is InstanceSegmentationLabeler.InstanceColorValue)
{
id = "instance_segmentation";

filename = $"step.{step}.annotation.{id}.camera.json";
return true;
}
#endif
if (rawData is SemanticSegmentationLabeler.SegmentationValue)
{
id = "semantic_segmentation";

4
com.unity.perception/Runtime/GroundTruth/Exporters/SOLO/SoloExporter.cs


filename = $"step.{step}.annotation.{id}.camera.json";
return true;
}
#if false
if (rawData is InstanceSegmentationLabeler.InstanceColorValue)
{
id = "instance_segmentation";

filename = $"step.{step}.annotation.{id}.camera.json";
return true;
}
#endif
if (rawData is SemanticSegmentationLabeler.SegmentationValue)
{
id = "semantic_segmentation";

56
com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs


static ProfilerMarker s_OnObjectInfoReceivedCallback = new ProfilerMarker("OnInstanceSegmentationObjectInformationReceived");
static ProfilerMarker s_OnImageReceivedCallback = new ProfilerMarker("OnInstanceSegmentationImagesReceived");
Dictionary<int, AsyncAnnotation> m_AsyncAnnotations;
Dictionary<int, (AsyncAnnotation, byte[])> m_AsyncAnnotations;
Texture m_CurrentTexture;
/// <inheritdoc cref="IOverlayPanelProvider"/>

[SuppressMessage("ReSharper", "InconsistentNaming")]
[SuppressMessage("ReSharper", "NotAccessedField.Local")]
public struct InstanceColorValue
public struct ColorValue
string m_InstancePath;
List<InstanceColorValue> m_InstanceColorValues;
public struct InstanceData
{
public byte[] buffer;
public List<ColorValue> colors;
}
string m_InstancePath;
List<InstanceData> m_InstanceData;
#if false
struct AsyncWrite
{
public NativeArray<Color32> data;

}
#endif
/// <summary>
/// Creates a new InstanceSegmentationLabeler. Be sure to assign <see cref="idLabelConfig"/> before adding to a <see cref="PerceptionCamera"/>.
/// </summary>

using (s_OnObjectInfoReceivedCallback.Auto())
{
m_InstanceColorValues.Clear();
m_InstanceData.Clear();
var colorValues = new List<ColorValue>();
foreach (var objectInfo in renderedObjectInfos)
{

m_InstanceColorValues.Add(new InstanceColorValue
colorValues.Add(new ColorValue
{
instance_id = objectInfo.instanceId,
color = objectInfo.instanceColor

annotation.ReportFileAndValues(m_InstancePath, m_InstanceColorValues);
var instanceData = new InstanceData
{
buffer = annotation.Item2,
colors = colorValues
};
m_InstanceData.Add(instanceData);
annotation.Item1.ReportFileAndValues(m_InstancePath, m_InstanceData);
if (!m_AsyncAnnotations.ContainsKey(frameCount))
if (!m_AsyncAnnotations.TryGetValue(frameCount, out var annotation))
return;
using (s_OnImageReceivedCallback.Auto())

var localPath = $"{Manager.Instance.GetDirectoryFor(k_Directory)}/{k_FilePrefix}{frameCount}.png";
var colors = new NativeArray<Color32>(data, Allocator.Persistent);
#if false
var asyncRequest = Manager.Instance.CreateRequest<AsyncRequest<AsyncWrite>>();
asyncRequest.data = new AsyncWrite

asyncRequest.Enqueue(r =>
{
Profiler.BeginSample("InstanceSegmentationEncode");
var pngBytes = ImageConversion.EncodeArrayToPNG(r.data.data.ToArray(), GraphicsFormat.R8G8B8A8_UNorm, (uint)r.data.width, (uint)r.data.height);
var pngEncoded = ImageConversion.EncodeArrayToPNG(r.data.data.ToArray(), GraphicsFormat.R8G8B8A8_UNorm, (uint)r.data.width, (uint)r.data.height);
File.WriteAllBytes(r.data.path, pngBytes);
File.WriteAllBytes(r.data.path, pngEncoded);
Manager.Instance.ConsumerFileProduced(r.data.path);
Profiler.EndSample();
r.data.data.Dispose();

#endif
annotation.Item2 = ImageConversion.EncodeArrayToPNG(colors.ToArray(), GraphicsFormat.R8G8B8A8_UNorm, (uint)renderTexture.width, (uint)renderTexture.height);
Profiler.EndSample();
Profiler.BeginSample("InstanceSegmentationWritePng");
File.WriteAllBytes(localPath, annotation.Item2);
Manager.Instance.ConsumerFileProduced(localPath);
Profiler.EndSample();
colors.Dispose();
m_AsyncAnnotations[frameCount] = annotation;
}
}

m_AsyncAnnotations[Time.frameCount] = perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition);
m_AsyncAnnotations[Time.frameCount] = (perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition), null);
}
/// <inheritdoc/>

throw new InvalidOperationException("InstanceSegmentationLabeler's idLabelConfig field must be assigned");
m_InstanceColorValues = new List<InstanceColorValue>();
m_InstanceData = new List<InstanceData>();
m_AsyncAnnotations = new Dictionary<int, AsyncAnnotation>();
m_AsyncAnnotations = new Dictionary<int, (AsyncAnnotation, byte[])>();
m_AnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition(
"instance segmentation",

2
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height);
}
SetPersistentSensorData("buffer", encodedData);
return !FileProducer.Write(captureFilename, encodedData)
? AsyncRequest.Result.Error
: AsyncRequest.Result.Completed;

87
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


using UnityEngine.Perception.GroundTruth.Exporters.PerceptionFormat;
using UnityEngine.Perception.GroundTruth.Exporters.PerceptionNew;
using UnityEngine.Perception.GroundTruth.Exporters.Solo;
using UnityEngine.Perception.GroundTruth.SoloDesign;
using UnityEngine.Profiling;
namespace UnityEngine.Perception.GroundTruth

HashSet<Guid> m_Ids = new HashSet<Guid>();
Guid m_SequenceId = Guid.NewGuid();
IDatasetExporter _ActiveReporter = null;
//IDatasetExporter _ActiveReporter = null;
IPerceptionConsumer _ActiveConsumer = null;
// Always use the property SequenceTimeMs instead
int m_FrameCountLastUpdatedSequenceTime;

public SimulationState(string outputDirectory)
{
_ActiveReporter = null;
var go = GameObject.Find("SoloConsumer");
if (go == null)
{
go = new GameObject("SoloConsumer");
_ActiveConsumer = go.AddComponent<SoloConsumer>();
}
else
{
_ActiveConsumer = go.GetComponent<SoloConsumer>();
}
PlayerPrefs.SetString(defaultOutputBaseDirectory, Configuration.Instance.GetStorageBasePath());
m_OutputDirectoryName = outputDirectory;

}
IDatasetExporter GetActiveReporter()
//IDatasetExporter GetActiveReporter()
IPerceptionConsumer GetActiveConsumer()
if (_ActiveReporter != null) return _ActiveReporter;
var mode = PlayerPrefs.GetString(outputFormatMode, nameof(CocoExporter));
#if false
// TODO figure out how to do this with just the class name and not have to have the switch
var exporter = Activator.CreateInstance(Type.GetType(mode) ?? typeof(PerceptionExporter));
if (exporter is IDatasetExporter casted)
{
m_ActiveReporter = casted;
}
#else
Debug.Log($"SS - Sim State setting active reporter: {mode}");
switch (mode)
{
case nameof(PerceptionExporter):
_ActiveReporter = new PerceptionExporter();
break;
case nameof(CocoExporter):
_ActiveReporter = new CocoExporter();
break;
case nameof(PerceptionNewExporter):
_ActiveReporter = new PerceptionNewExporter();
break;
case nameof(SoloExporter):
_ActiveReporter = new SoloExporter();
break;
default:
_ActiveReporter = new PerceptionExporter();
break;
}
#endif
Debug.Log("Calling SS::OnSimulationBegin");
_ActiveReporter?.OnSimulationBegin(Manager.Instance.GetDirectoryFor(m_OutputDirectoryName));
return _ActiveReporter;
return _ActiveConsumer;
var directory = GetActiveReporter()?.GetRgbCaptureFilename(additionalSensorValues);
return directory == string.Empty ? defaultFilename : directory;
return string.Empty;
}
/// <summary>

var height = -1;
var fullPath = filename;
var frameCount = 0;
var buffer = new byte[0];
foreach (var i in additionalSensorValues)
{

case "frame":
frameCount = (int)i.Item2;
break;
case "":
buffer = (byte[])i.Item2;
break;
}
}

var accel = pendingCapture.SensorSpatialData.EgoAcceleration ?? Vector3.zero;
#if false
#endif
}
static string GetFormatFromFilename(string filename)

if (!m_HasStarted)
{
GetActiveConsumer()?.OnSimulationStarted(new SimulationMetadata());
//simulation starts now
m_FrameCountLastUpdatedSequenceTime = Time.frameCount;
m_LastTimeScale = Time.timeScale;

}
}
Debug.Log($"adt: {infoTypeData}");
// Debug.Log($"adt: {infoTypeData}");
}
}

IsRunning = false;
Debug.Log($"Calling SS::OnSimulationEnd");
GetActiveReporter()?.OnSimulationEnd();
// Debug.Log($"Calling SS::OnSimulationEnd");
// GetActiveReporter()?.OnSimulationEnd();
var metadata = new CompletionMetadata();
GetActiveConsumer()?.OnSimulationCompleted(metadata);
}

RegisterAdditionalInfoType(name, specValues, description, format, id, AdditionalInfoKind.Annotation);
GetActiveReporter()?.OnAnnotationRegistered(id, specValues); // <- Not sure about this one either
// GetActiveReporter()?.OnAnnotationRegistered(id, specValues); // <- Not sure about this one either
return new AnnotationDefinition(id);
}

RegisterAdditionalInfoType(name, specValues, description, null, id, AdditionalInfoKind.Metric);
GetActiveReporter()?.OnMetricRegistered(id, name, description); // <- Not sure about this one either
// GetActiveReporter()?.OnMetricRegistered(id, name, description); // <- Not sure about this one either
return new MetricDefinition(id);
}

}
}
var values2 = new List<object>();
foreach (var v in values)
{
values2.Add(v);
}
ReportAsyncAnnotationResult(asyncAnnotation, filename, jArray, values:values2);
ReportAsyncAnnotationResult(asyncAnnotation, filename, jArray, values?.Cast<object>().ToList() ?? null); //aluesList);
}
void ReportAsyncAnnotationResult(AsyncAnnotation asyncAnnotation, string filename, JArray jArray, IEnumerable<object> values = null)

209
com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs


using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Unity.Mathematics;
using UnityEngine.Perception.GroundTruth.SoloDesign;
// ReSharper disable NotAccessedField.Local
// ReSharper disable CoVariantArrayConversion

WriteJObjectToFile(metricDefinitionsJObject, "metric_definitions.json");
}
}
Debug.Log($"Dataset written to {Path.GetDirectoryName(OutputDirectory)}");
// Debug.Log($"Dataset written to {Path.GetDirectoryName(OutputDirectory)}");
}
void WriteJObjectToFile(JObject jObject, string filename)

m_WriteToDiskSampler.Begin();
var path = Path.Combine(OutputDirectory, filename);
Debug.Log($"ss - sensors.json - {path}");
// Debug.Log($"ss - sensors.json - {path}");
int m_currentReportedSequence = 0;
Dictionary<Guid, int> m_SequenceMap = new Dictionary<Guid, int>();
Sensor ToSensor(PendingCapture pendingCapture, SimulationState simulationState, int captureFileIndex)
{
var sensor = new RgbSensor
{
Id = "camera",
sensorType = "camera",
position = Vector3.zero,
rotation = Vector3.zero,
velocity = Vector3.zero,
acceleration = Vector3.zero,
metadata = new Dictionary<string, object>(),
imageFormat = "png",
dimension = Vector2.zero,
buffer = null
};
return sensor;
}
Frame ToFrame(PendingCapture pendingCapture, SimulationState simulationState, int captureFileIndex)
{
if (!m_SequenceMap.TryGetValue(pendingCapture.SequenceId, out var seqId))
{
seqId = m_currentReportedSequence++;
m_SequenceMap[pendingCapture.SequenceId] = seqId;
}
return new Frame(pendingCapture.FrameCount, seqId, pendingCapture.Step);
}
if (!flush && m_PendingCaptures.Count < k_MinPendingCapturesBeforeWrite)
return;
// if (!flush && m_PendingCaptures.Count < k_MinPendingCapturesBeforeWrite)
// return;
m_SerializeCapturesSampler.Begin();

return;
}
BoundingBoxAnnotation ToBoundingBox(Annotation annotation, AnnotationData data)
{
var bbox = new BoundingBoxAnnotation
{
Id = "bounding box",
sensorId = "camera",
description = "Labeled bounding boxes",
annotationType = "bounding box labeler",
metadata = new Dictionary<string, object>(),
boxes = new List<BoundingBoxAnnotation.Entry>()
};
foreach (var d in data.RawValues)
{
if (d is BoundingBox2DLabeler.BoundingBoxValue e)
{
var entry = new BoundingBoxAnnotation.Entry
{
instanceId = (int)e.instance_id,
label = e.label_name,
origin = new Vector2{x = e.x, y = e.y},
dimension = new Vector2{x = e.width, y = e.height}
};
bbox.boxes.Add(entry);
}
}
return bbox;
}
InstanceSegmentation ToInstanceSegmentation(Annotation annotation, AnnotationData data, params(string,object)[] sensorValues)
{
var seg = new InstanceSegmentation
{
Id = "instance segmentation",
sensorId = "camera",
description = "instance segmentation blah blah blah",
annotationType = "instance segmentation labeler",
metadata = new Dictionary<string, object>(),
instances = new List<InstanceSegmentation.Entry>(),
dimension = Vector2.zero,
imageFormat = "png"
};
foreach (var sv in sensorValues)
{
switch (sv.Item1)
{
case "camera_width":
seg.dimension.x = (int)sv.Item2;
break;
case "camera_height":
seg.dimension.y = (int)sv.Item2;
break;
}
}
foreach (var d in data.RawValues)
{
if (d is InstanceSegmentationLabeler.InstanceData i)
{
seg.buffer = i.buffer;
foreach (var color in i.colors)
{
var entry = new InstanceSegmentation.Entry
{
instanceId = (int)color.instance_id,
rgba = color.color
};
seg.instances.Add(entry);
}
}
}
return seg;
}
List<Sensor> ConvertToSensors(PendingCapture capture, SimulationState simulationState)
{
var dim = new Vector2();
var buffer = new byte[0];
foreach (var sv in capture.AdditionalSensorValues)
{
switch (sv.Item1)
{
case "camera_width":
dim.x = (int)sv.Item2;
break;
case "camera_height":
dim.y = (int)sv.Item2;
break;
case "buffer":
buffer = (byte[])sv.Item2;
break;
}
}
return new List<Sensor>
{
new RgbSensor
{
Id = "camera",
sensorType = capture.SensorData.modality,
imageFormat = ".png",
dimension = dim,
position = capture.SensorSpatialData.EgoPose.position,
rotation = capture.SensorSpatialData.EgoPose.rotation.eulerAngles,
velocity = capture.SensorSpatialData.EgoVelocity ?? Vector3.zero,
acceleration = capture.SensorSpatialData.EgoAcceleration ?? Vector3.zero,
buffer = buffer,
metadata = new Dictionary<string, object>()
}
};
}
Frame ConvertToFrameData(PendingCapture capture, SimulationState simState, int captureFileIndex)
{
if (!m_SequenceMap.TryGetValue(capture.SequenceId, out var seq))
{
seq = m_currentReportedSequence++;
m_SequenceMap[capture.SequenceId] = seq;
}
var frame = new Frame(capture.FrameCount, seq, capture.Step);
frame.sensors = ConvertToSensors(capture, simState);
foreach (var (annotation, data) in capture.Annotations)
{
SoloDesign.Annotation soloAnnotation = null;
var supported = false;
switch (data.AnnotationDefinition.Id.ToString())
{
case "f9f22e05-443f-4602-a422-ebe4ea9b55cb":
soloAnnotation = ToBoundingBox(annotation, data);
supported = true;
break;
case "1ccebeb4-5886-41ff-8fe0-f911fa8cbcdf":
soloAnnotation = ToInstanceSegmentation(annotation, data, capture.AdditionalSensorValues);
supported = true;
break;
}
if (supported) frame.annotations.Add(soloAnnotation);
}
return frame;
}
GetActiveReporter()?.ProcessPendingCaptures(pendingCaptures, simulationState);
foreach (var pendingCapture in pendingCaptures)
{
var frame = ConvertToFrameData(pendingCapture, simulationState, captureFileIndex);
GetActiveConsumer()?.OnFrameGenerated(frame);
}
//GetActiveReporter()?.ProcessPendingCaptures(pendingCaptures, simulationState);
#if false
simulationState.m_SerializeCapturesAsyncSampler.Begin();

void Write(List<PendingMetric> pendingMetrics, SimulationState simState, int metricsFileIndex)
{
#if false
#endif
#if false
m_SerializeMetricsAsyncSampler.Begin();
var jArray = new JArray();

41
com.unity.perception/Runtime/GroundTruth/SoloDesign/Frame.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
using Unity.Mathematics;
namespace UnityEngine.Perception.GroundTruth.SoloDesign
{

/// <summary>
/// Metadata describing the simulation.
/// </summary>
[Serializable]
public SimulationMetadata()
{
unityVersion = "figure out how to do unity version";
perceptionVersion = "0.8.0-preview.4";
#if HDRP_PRESENT
renderPipeline = "HDRP";
#elif URP_PRESENT
renderPipeline = "URP";
#else
renderPipeline = "built-in";
#endif
metadata = new Dictionary<string, object>();
}
/// <summary>
/// The version of the Unity editor executing the simulation.
/// </summary>

/// <summary>
/// Metadata describing the final metrics of the simulation.
/// </summary>
[Serializable]
public CompletionMetadata() : base()
{}
public struct Sequence
{
/// <summary>

/// frame. This is only reported after all of the captures, annotations, and
/// metrics are ready to report for a single frame.
/// </summary>
[Serializable]
public Frame(int frame, int sequence, int step)
{
this.frame = frame;
this.sequence = sequence;
this.step = step;
sensors = new List<Sensor>();
annotations = new List<Annotation>();
metrics = new List<Metric>();
}
/// <summary>
/// The perception frame number of this record
/// </summary>

/// <summary>
/// Abstract sensor class that holds all of the common information for a sensor.
/// </summary>
[Serializable]
public abstract class Sensor
{
/// <summary>

/// <summary>
/// The concrete class for an RGB sensor.
/// </summary>
[Serializable]
public class RgbSensor : Sensor
{
// The format of the image type

/// annotations. Concrete instances of this class will add
/// data for their specific annotation type.
/// </summary>
[Serializable]
public abstract class Annotation
{
/// <summary>

/// <summary>
/// Bounding boxes for all of the labeled objects in a capture
/// </summary>
[Serializable]
public class BoundingBoxAnnotation : Annotation
{
public struct Entry

/// The instance segmentation image recorded for a capture. This
/// includes the data that associates a pixel color to an object.
/// </summary>
[Serializable]
public class InstanceSegmentation : Annotation
{
public struct Entry

/// metrics. Concrete instances of this class will add
/// data for their specific metric type.
/// </summary>
[Serializable]
public abstract class Metric
{
/// <summary>

/// The object count metric records how many of a particular object are
/// present in a capture.
/// </summary>
[Serializable]
public class ObjectCountMetric : Metric
{
public struct Entry

248
com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs


using System;
using System.Globalization;
using System.IO;
using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace UnityEngine.Perception.GroundTruth.SoloDesign
{
public class SoloConsumer : MonoBehaviour, IPerceptionConsumer
{
public string _baseDirectory = "D:/PerceptionOutput/SoloConsumer";
public string soloDatasetName = "solo";
static string currentDirectory = "";
SimulationMetadata m_CurrentMetadata;
public void OnSimulationStarted(SimulationMetadata metadata)
{
Debug.Log("SC - On Simulation Started");
m_CurrentMetadata = metadata;
var i = 0;
while (true)
{
var n = $"{soloDatasetName}_{i++}";
n = Path.Combine(_baseDirectory, n);
if (!Directory.Exists(n))
{
Directory.CreateDirectory(n);
currentDirectory = n;
break;
}
}
}
static string GetSequenceDirectoryPath(Frame frame)
{
var path = $"sequence.{frame.sequence}";
// verify that a directory already exists for a sequence,
// if not, create it.
path = Path.Combine(currentDirectory, path);
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
return path;
}
void WriteJTokenToFile(string filePath, JToken jToken)
{
var stringWriter = new StringWriter(new StringBuilder(256), CultureInfo.InvariantCulture);
using (var jsonTextWriter = new JsonTextWriter(stringWriter))
{
jsonTextWriter.Formatting = Formatting.Indented;
jToken.WriteTo(jsonTextWriter);
}
var contents = stringWriter.ToString();
File.WriteAllText(filePath, contents);
}
public void OnFrameGenerated(Frame frame)
{
var path = GetSequenceDirectoryPath(frame);
path = Path.Combine(path, $"step{frame.step}.frame_data.json");
WriteJTokenToFile(path, ToFrame(frame));
Debug.Log("SC - On Frame Generated");
}
public void OnSimulationCompleted(CompletionMetadata metadata)
{
Debug.Log("SC - On Simulation Completed");
}
static JToken ToFrame(Frame frame)
{
var frameJson = new JObject
{
["frame"] = frame.frame,
["sequence"] = frame.sequence,
["step"] = frame.step
};
var captures = new JArray();
var annotations = new JArray();
var metrics = new JArray();
foreach (var sensor in frame.sensors)
{
switch (sensor)
{
case RgbSensor rgb:
captures.Add(ConvertSensor(frame, rgb));
break;
}
}
foreach (var annotation in frame.annotations)
{
switch (annotation)
{
case BoundingBoxAnnotation bbox:
annotations.Add(ConvertAnnotation(frame, bbox));
break;
case InstanceSegmentation seg:
annotations.Add(ConvertAnnotation(frame, seg));
break;
}
}
frameJson["captures"] = captures;
frameJson["annotations"] = annotations;
frameJson["metrics"] = metrics;
return frameJson;
}
static JArray FromVector3(Vector3 vector3)
{
return new JArray
{
vector3.x, vector3.y, vector3.z
};
}
static JArray FromVector2(Vector2 vector2)
{
return new JArray
{
vector2.x, vector2.y
};
}
static JArray FromColor32(Color32 color)
{
return new JArray
{
color.r, color.g, color.b, color.a
};
}
static JToken ToSensorHeader(Frame frame, Sensor sensor)
{
var token = new JObject
{
["Id"] = sensor.Id,
["sensorType"] = sensor.sensorType,
["position"] = FromVector3(sensor.position),
["rotation"] = FromVector3(sensor.rotation),
["velocity"] = FromVector3(sensor.velocity),
["acceleration"] = FromVector3(sensor.acceleration)
};
return token;
}
static JToken ConvertSensor(Frame frame, RgbSensor sensor)
{
// write out the png data
var path = GetSequenceDirectoryPath(frame);
path = Path.Combine(path, $"step{frame.step}.{sensor.sensorType}.{sensor.imageFormat}");
var file = File.Create(path, 4096);
file.Write(sensor.buffer, 0, sensor.buffer.Length);
file.Close();
var outRgb = ToSensorHeader(frame, sensor);
outRgb["fileName"] = path;
outRgb["imageFormat"] = sensor.imageFormat;
outRgb["dimension"] = FromVector2(sensor.dimension);
return outRgb;
}
static JToken ToAnnotationHeader(Frame frame, Annotation annotation)
{
var token = new JObject
{
["Id"] = annotation.Id,
["definition"] = annotation.description,
["sequence"] = frame.sequence,
["step"] = frame.step,
["sensor"] = annotation.sensorId
};
return token;
}
static JToken ConvertAnnotation(Frame frame, BoundingBoxAnnotation bbox)
{
var outBox = ToAnnotationHeader(frame, bbox);
var values = new JArray();
foreach (var box in bbox.boxes)
{
values.Add(new JObject
{
["frame"] = frame.frame,
["label_name"] = box.label,
["instance_id"] = box.instanceId,
["origin"] = FromVector2(box.origin),
["dimension"] = FromVector2(box.dimension)
});
}
outBox["values"] = values;
return outBox;
}
static JToken ConvertAnnotation(Frame frame, InstanceSegmentation segmentation)
{
// write out the png data
var path = GetSequenceDirectoryPath(frame);
path = Path.Combine(path,$"step{frame.step}.segmentation.{segmentation.imageFormat}");
var file = File.Create(path, 4096);
file.Write(segmentation.buffer, 0, segmentation.buffer.Length);
file.Close();
var outSeg = ToAnnotationHeader(frame, segmentation);
var values = new JArray();
foreach (var i in segmentation.instances)
{
values.Add(new JObject
{
["instance_id"] = i.instanceId,
["rgba"] = FromColor32(i.rgba)
});
}
outSeg["imageFormat"] = segmentation.imageFormat;
outSeg["dimension"] = FromVector2(segmentation.dimension);
outSeg["imagePath"] = path;
outSeg["instances"] = values;
return outSeg;
}
}
}

11
com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs.meta


fileFormatVersion: 2
guid: c9bdd61aa45dc2044a0e4e677b77e042
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存