浏览代码

Annotations now work through rebuilt simulation state

/solo_support
Steve Borkman 3 年前
当前提交
6b409406
共有 23 个文件被更改,包括 565 次插入506 次删除
  1. 16
      TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs
  2. 13
      com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs
  3. 21
      com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs
  4. 268
      com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs
  5. 2
      com.unity.perception/Runtime/GroundTruth/Ego.cs
  6. 16
      com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBox3DLabeler.cs
  7. 175
      com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBoxLabeler.cs
  8. 70
      com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs
  9. 14
      com.unity.perception/Runtime/GroundTruth/Labelers/KeypointLabeler.cs
  10. 2
      com.unity.perception/Runtime/GroundTruth/Labelers/ObjectCountLabeler.cs
  11. 4
      com.unity.perception/Runtime/GroundTruth/Labelers/RenderedObjectInfoLabeler.cs
  12. 13
      com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs
  13. 25
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  14. 308
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  15. 62
      com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs
  16. 20
      com.unity.perception/Runtime/GroundTruth/SoloDesign/Frame.cs
  17. 5
      com.unity.perception/Runtime/GroundTruth/SoloDesign/OldPerceptionConsumer.cs
  18. 7
      com.unity.perception/Runtime/GroundTruth/SoloDesign/OldPerceptionJsonFactory.cs
  19. 9
      com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs
  20. 5
      com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloMessageBuilder.cs
  21. 12
      com.unity.perception/Runtime/Randomization/Scenarios/PerceptionScenario.cs
  22. 4
      com.unity.perception/Tests/Runtime/GroundTruthTests/RenderedObjectInfoTests.cs

16
TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs


public GameObject targetLight;
public GameObject target;
MetricDefinition lightMetricDefinition;
AnnotationDefinition boundingBoxAnnotationDefinition;
// MetricDefinition lightMetricDefinition;
// BoundingBox2DLabeler.BoundingBoxAnnotationDefinition boundingBoxAnnotationDefinition;
#if false
boundingBoxAnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition(
"Target bounding box",
"The position of the target in the camera's local space",
id: Guid.Parse("C0B4A22C-0420-4D9F-BAFC-954B8F7B35A7"));
boundingBoxAnnotationDefinition = new BoundingBox2DLabeler.BoundingBoxAnnotationDefinition("Target Bounding Box", "The position of the target in the camera's local space");
DatasetCapture.RegisterAnnotationDefinition(boundingBoxAnnotationDefinition);
#endif
#if false
//Report the light's position by manually creating the json array string.
var lightPos = targetLight.transform.position;
DatasetCapture.ReportMetric(lightMetricDefinition,

boundingBoxAnnotationDefinition,
new[] { targetPos });
}
#endif
}
}

13
com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs


using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.Perception.GroundTruth.Exporters;
using UnityEngine.Perception.GroundTruth.Exporters.PerceptionFormat;
namespace UnityEditor.Perception.GroundTruth
{

Dictionary<SerializedProperty, CameraLabelerDrawer> m_CameraLabelerDrawers = new Dictionary<SerializedProperty, CameraLabelerDrawer>();
ReorderableList m_LabelersList;
string[] m_ExporterList;
string m_OutputMode = "Perception";
int m_OutputModeIndex = -1;
public void OnEnable()
{
m_LabelersList = new ReorderableList(this.serializedObject, labelersProperty, true, true, true, true);

m_LabelersList.drawElementCallback = DrawElement;
m_LabelersList.onAddCallback += OnAdd;
m_LabelersList.onRemoveCallback += OnRemove;
#if false
m_OutputMode = PlayerPrefs.GetString(SimulationState.outputFormatMode);
m_ExporterList = TypeCache.GetTypesDerivedFrom<IDatasetExporter>().Select(exporter => exporter.Name).ToArray();
if (m_ExporterList.Any())

#endif
}
float GetElementHeight(int index)

{
using(new EditorGUI.DisabledScope(EditorApplication.isPlaying))
{
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.ID)), new GUIContent("ID", "Provide a unique sensor ID for the camera."));
#if false
if (m_ExporterList.Any())
{
if (m_OutputModeIndex < 0)

PlayerPrefs.SetString(SimulationState.outputFormatMode, m_OutputMode);
}
}
#endif
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureTriggerMode)),new GUIContent("Capture Trigger Mode", $"The method of triggering captures for this camera. In {nameof(CaptureTriggerMode.Scheduled)} mode, captures happen automatically based on a start frame and frame delta time. In {nameof(CaptureTriggerMode.Manual)} mode, captures should be triggered manually through calling the {nameof(perceptionCamera.RequestCapture)} method of {nameof(PerceptionCamera)}."));
GUILayout.Space(5);

21
com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs


using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.Perception.GroundTruth.Exporters;
using UnityEngine.Perception.GroundTruth.Exporters.PerceptionFormat;
using UnityEngine.Perception.Randomization.Samplers;
using UnityEngine.Perception.Randomization.Scenarios;
using UnityEngine.SceneManagement;

TextField m_BuildPathField;
TextField m_SelectedBuildPathTextField;
TextField m_BuildIdField;
#if false
#endif
[MenuItem("Window/Run in Unity Simulation")]
static void ShowWindow()
{

new Random().NextBytes(bytes);
m_RandomSeedField.value = BitConverter.ToUInt32(bytes, 0);
};
#if false
m_OutputFormats = TypeCache.GetTypesDerivedFrom<IDatasetExporter>().Select(exporter => exporter.Name).ToArray();
m_OutputFormatMenu = root.Q<ToolbarMenu>("output-format");
var i = 0;

m_OutputFormatMenu.text = format;
});
}
#endif
for (i = 0; i < m_SysParamDefinitions.Length; i++)
for (var i = 0; i < m_SysParamDefinitions.Length; i++)
{
var index = i;
var param = m_SysParamDefinitions[i];

m_PrevExecutionIdLabel.text = $"Execution ID: {PlayerPrefs.GetString("SimWindow/prevExecutionId")}";
m_PrevRandomSeedLabel.text = $"Random Seed: {PlayerPrefs.GetString("SimWindow/prevRandomSeed")}";
m_OutputFormatMenu.text = PlayerPrefs.GetString(SimulationState.outputFormatMode, nameof(PerceptionExporter));
// m_OutputFormatMenu.text = PlayerPrefs.GetString(SimulationState.outputFormatMode, nameof(PerceptionExporter));
}
static string IncrementRunName(string runName)

scenarioConfig = (TextAsset)m_ScenarioConfigField.value,
currentOpenScenePath = SceneManager.GetSceneAt(0).path,
currentScenario = FindObjectOfType<ScenarioBase>(),
outputFormat = m_OutputFormats[m_OutputFormatIndex]
// outputFormat = m_OutputFormats[m_OutputFormatIndex]
};
var runGuid = Guid.NewGuid();
PerceptionEditorAnalytics.ReportRunInUnitySimulationStarted(

constants["totalIterations"] = m_RunParameters.totalIterations;
constants["instanceCount"] = m_RunParameters.instanceCount;
constants["randomSeed"] = m_RunParameters.randomSeed;
constants["outputFormat"] = m_RunParameters.outputFormat;
// constants["outputFormat"] = m_RunParameters.outputFormat;
var appParamName = $"{m_RunParameters.runName}";
var appParamsString = JsonConvert.SerializeObject(configuration, Formatting.Indented);

PlayerPrefs.SetInt("SimWindow/sysParamIndex", m_RunParameters.sysParamIndex);
PlayerPrefs.SetString("SimWindow/scenarioConfig",
m_RunParameters.scenarioConfig != null ? m_RunParameters.scenarioConfigAssetPath : string.Empty);
PlayerPrefs.SetString(SimulationState.outputFormatMode, m_RunParameters.outputFormat);
// PlayerPrefs.SetString(SimulationState.outputFormatMode, m_RunParameters.outputFormat);
SetFieldsFromPlayerPreferences();
}

public TextAsset scenarioConfig;
public string currentOpenScenePath;
public ScenarioBase currentScenario;
public string outputFormat;
// public string outputFormat;
public string scenarioConfigAssetPath => AssetDatabase.GetAssetPath(scenarioConfig);
}

268
com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs


public PerceptionConsumer activeConsumer;
readonly Guid k_DatasetGuid = Guid.NewGuid();
// readonly Guid k_DatasetGuid = Guid.NewGuid();
SimulationState m_SimulationState;

get { return m_SimulationState ?? (m_SimulationState = CreateSimulationData()); }
private set => m_SimulationState = value;
}
internal string OutputDirectory => simulationState.GetOutputDirectoryNoCreate();
/// <summary>
/// The json metadata schema version the DatasetCapture's output conforms to.

/// <returns>A <see cref="SensorHandle"/>, which should be used to check <see cref="SensorHandle.ShouldCaptureThisFrame"/> each frame to determine whether to capture (or render) that frame.
/// It is also used to report captures, annotations, and metrics on the sensor.</returns>
/// <exception cref="ArgumentException">Thrown if ego is invalid.</exception>
#if false
#if false
if (!simulationState.Contains(egoHandle.Id))
throw new ArgumentException("Supplied ego is not part of the simulation.", nameof(egoHandle));
#endif
#endif
public SensorHandle RegisterSensor(SensorDefinition sensor)
{
return simulationState.AddSensor(sensor, sensor.simulationDeltaTime);
}
#if false
/// <summary>
/// Creates a metric type, which can be used to produce metrics during the simulation.
/// See <see cref="ReportMetric{T}(MetricDefinition,T[])"/>, <see cref="SensorHandle.ReportMetricAsync(MetricDefinition)"/>, <see cref="SensorHandle.ReportMetric{T}(MetricDefinition,T[])"/>,

{
return simulationState.RegisterMetricDefinition(name, specValues, description, id);
}
#endif
public void RegisterMetricDefinition(SoloDesign.MetricDefinition metricDefinition)
{
simulationState.RegisterMetricDefinition(metricDefinition);
}
public void RegisterAnnotationDefinition(SoloDesign.AnnotationDefinition definition)
{
simulationState.RegisterAnnotationDefinition(definition);
}
#if false
/// <summary>
/// Creates an annotation type, which can be used to produce annotations during the simulation.
/// See <see cref="SensorHandle.ReportAnnotationFile"/>, <see cref="SensorHandle.ReportAnnotationValues{T}"/> and <see cref="SensorHandle.ReportAnnotationAsync"/>.

{
return simulationState.RegisterAnnotationDefinition(name, specValues, description, format, id);
}
#endif
#if false
/// <summary>
/// Report a metric not associated with any sensor or annotation.
/// </summary>

/// <param name="metricDefinition">The metric definition of the metric being reported</param>
/// <returns>An <see cref="AsyncMetric"/> which should be used to report the metric values, potentially in a later frame</returns>
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => simulationState.CreateAsyncMetric(metricDefinition);
#endif
internal bool IsValid(Guid id) => simulationState.Contains(id);
internal bool IsValid(string id) => simulationState.Contains(id);
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
return new SimulationState($"Dataset{k_DatasetGuid}");
return new SimulationState();
}
[RuntimeInitializeOnLoadMethod]

{
internal DatasetCapture datasetCapture { get; }
/// <summary>
/// The unique ID of the sensor. This ID is used to refer to this sensor in the json metadata.
/// </summary>
public Guid Id { get; }
public string Id { get; internal set; }
internal SensorHandle(Guid id, DatasetCapture datasetCapture)
internal SensorHandle(string id, DatasetCapture datasetCapture)
Id = id;
Id = id ?? string.Empty;
this.datasetCapture = datasetCapture;
}

datasetCapture.simulationState.SetEnabled(this, value);
}
}
#if false
/// <summary>
/// Report a file-based annotation related to this sensor in this frame.
/// </summary>

/// <exception cref="InvalidOperationException">Thrown if this method is called during a frame where <see cref="ShouldCaptureThisFrame"/> is false.</exception>
/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public Annotation ReportAnnotationFile(AnnotationDefinition annotationDefinition, string filename)
public AnnotationHandle ReportAnnotationFile(AnnotationDefinition annotationDefinition, string filename)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");

return datasetCapture.simulationState.ReportAnnotationFile(annotationDefinition, this, filename);
}
#endif
public AnnotationHandle ReportAnnotation(SoloDesign.AnnotationDefinition definition, SoloDesign.Annotation annotation)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!definition.IsValid())
throw new ArgumentException("The given annotationDefinition is invalid", nameof(definition));
return datasetCapture.simulationState.ReportAnnotation(this, definition, annotation);
}
#if false
/// <summary>
/// Report a value-based annotation related to this sensor in this frame.
/// </summary>

/// <returns>Returns a handle to the reported annotation for reporting annotation-based metrics.</returns>
/// <exception cref="InvalidOperationException">Thrown if this method is called during a frame where <see cref="ShouldCaptureThisFrame"/> is false.</exception>
/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public Annotation ReportAnnotationValues<T>(AnnotationDefinition annotationDefinition, T[] values)
public AnnotationHandle ReportAnnotationValues<T>(AnnotationDefinition annotationDefinition, T[] values)
{
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");

return datasetCapture.simulationState.ReportAnnotationValues(annotationDefinition, this, values);
}
#endif
/// <summary>
/// Creates an async annotation for reporting the values for an annotation during a future frame.
/// </summary>

/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public AsyncAnnotation ReportAnnotationAsync(AnnotationDefinition annotationDefinition)
public AsyncAnnotation ReportAnnotationAsync(SoloDesign.AnnotationDefinition annotationDefinition)
if (!annotationDefinition.IsValid)
if (!annotationDefinition.IsValid())
}
public string GetRgbCaptureFilename(string defaultFilename, params(string, object)[] additionalSensorValues)
{
return datasetCapture.simulationState.GetRgbCaptureFilename(defaultFilename, additionalSensorValues);
}
/// <summary>

{
datasetCapture.simulationState.SetNextCaptureTimeToNowForSensor(this);
}
#if false
/// <summary>
/// Report a metric regarding this sensor in the current frame.
/// </summary>

return datasetCapture.simulationState.CreateAsyncMetric(metricDefinition, this);
}
#endif
/// <summary>
/// Dispose this SensorHandle.
/// </summary>

/// Returns whether this SensorHandle is valid in the current simulation. Nil SensorHandles are never valid.
/// </summary>
public bool IsValid => datasetCapture.IsValid(this.Id);
/// <summary>
/// Returns true if this SensorHandle was default-instantiated.
/// </summary>

/// <inheritdoc/>
public bool Equals(SensorHandle other)
{
return Id.Equals(other.Id);
switch (Id)
{
case null when other.Id == null:
return true;
case null:
return false;
default:
return Id.Equals(other.Id);
}
}
/// <inheritdoc/>

/// <summary>
/// True if ReportValues has not been called yet.
/// </summary>
public bool IsPending => !IsNil && m_SimulationState.IsPending(ref this);
// public bool IsPending => !IsNil && m_SimulationState.IsPending(ref this);
/// <summary>
/// Returns true if the AsyncMetric is its default value.

if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncMetricResult(this, values: values);
// m_SimulationState.ReportAsyncMetricResult(this, values: values);
}
/// <summary>

if (valuesJsonArray == null)
throw new ArgumentNullException(nameof(valuesJsonArray));
m_SimulationState.ReportAsyncMetricResult(this, valuesJsonArray);
// m_SimulationState.ReportAsyncMetricResult(this, valuesJsonArray);
}
}

/// </summary>
public struct AsyncAnnotation
{
internal AsyncAnnotation(Annotation annotation, SimulationState simulationState)
internal AsyncAnnotation(AnnotationHandle annotationHandle, SimulationState simulationState)
{
this.annotationHandle = annotationHandle;
m_SimulationState = simulationState;
}
#if false
internal AsyncAnnotation(AnnotationHandle annotationHandle, int step, SensorHandle sensorHandle, SimulationState simulationState)
Annotation = annotation;
this.annotationHandle = annotationHandle;
#endif
public readonly Annotation Annotation;
public readonly AnnotationHandle annotationHandle;
internal bool IsNil => m_SimulationState == null && Annotation.IsNil;
internal bool IsNil => m_SimulationState == null && annotationHandle.IsNil;
/// <summary>
/// True if neither <see cref="ReportValues{T}"/> nor <see cref="ReportFile"/> have been called.
/// </summary>
public bool IsPending => !IsNil && m_SimulationState.IsPending(Annotation);
/// Report a file-based data for this annotation.
/// </summary>
/// <param name="path">The path to the file containing the annotation data.</param>
/// <exception cref="ArgumentNullException">Thrown if path is null</exception>
public void ReportFile(string path)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
m_SimulationState.ReportAsyncAnnotationResult<object>(this, path);
}
/// <summary>
/// Report file-based and value-based data for this annotation.
/// </summary>
/// <param name="path">The path to the file containing the annotation data.</param>
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if path or values is null</exception>
public void ReportFileAndValues<T>(string path, IEnumerable<T> values)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, path, values);
}
/// <summary>
/// Report a value-based data for this annotation.
/// True if neither <see cref="ReportValues{T}"/> nor <see cref="ReportFile"/> have been called.
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if values is null</exception>
public void ReportValues<T>(IEnumerable<T> values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
public bool IsPending => !IsNil && m_SimulationState.IsPending(annotationHandle);
m_SimulationState.ReportAsyncAnnotationResult(this, values: values);
}
/// <summary>
/// Report a value-based data for this annotation.
/// </summary>
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if values is null</exception>
public void ReportValues<T>(NativeSlice<T> values) where T : struct
public void Report(SoloDesign.Annotation annotation)
if (values == null)
throw new ArgumentNullException(nameof(values));
if (annotation == null)
throw new ArgumentNullException();
m_SimulationState.ReportAsyncAnnotationResult(this, values: values);
m_SimulationState.ReportAsyncAnnotationResult(this, annotation);
}
}

public struct Annotation : IEquatable<Annotation>
public struct AnnotationHandle : IEquatable<AnnotationHandle>
public readonly Guid Id;
public readonly string Id;
/// <summary>
/// The step on which the annotation was reported.
/// </summary>

public readonly SensorHandle SensorHandle;
SimulationState m_SimulationState;
internal Annotation(SensorHandle sensorHandle, SimulationState simState, int step)
internal AnnotationHandle(SensorHandle sensorHandle, SimulationState simState, AnnotationDefinition definition, int step)
Id = Guid.NewGuid();
Id = definition.id;
SensorHandle = sensorHandle;
Step = step;
}

/// </summary>
public bool IsNil => Id == Guid.Empty;
public bool IsNil => Id == string.Empty;
#if false
/// <summary>
/// Reports a metric on this annotation. May only be called in the same frame as the annotation was reported.
/// </summary>

/// <exception cref="ArgumentNullException">Thrown if values is null</exception>
/// <exception cref="InvalidOperationException">Thrown if <see cref="Annotation.SensorHandle"/> reports false for <see cref="UnityEngine.Perception.GroundTruth.SensorHandle.ShouldCaptureThisFrame"/>.</exception>
/// <exception cref="InvalidOperationException">Thrown if <see cref="AnnotationHandle.SensorHandle"/> reports false for <see cref="UnityEngine.Perception.GroundTruth.SensorHandle.ShouldCaptureThisFrame"/>.</exception>
public void ReportMetric<T>(MetricDefinition metricDefinition, [NotNull] T[] values)
{
if (values == null)

/// <param name="metricDefinition"></param>
/// <param name="valuesJsonArray">A string-based JSON array to be placed in the "values" field of the metric</param>
/// <exception cref="ArgumentNullException">Thrown if values is null</exception>
/// <exception cref="InvalidOperationException">Thrown if <see cref="Annotation.SensorHandle"/> reports false for
/// <exception cref="InvalidOperationException">Thrown if <see cref="AnnotationHandle.SensorHandle"/> reports false for
/// <see cref="UnityEngine.Perception.GroundTruth.SensorHandle.ShouldCaptureThisFrame"/>.</exception>
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] string valuesJsonArray)
{

/// <param name="metricDefinition">The type of the metric.</param>
/// <returns>A handle to an AsyncMetric, which can be used to report values for this metric in future frames.</returns>
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => m_SimulationState.CreateAsyncMetric(metricDefinition, SensorHandle, this);
/// <inheritdoc/>
public bool Equals(Annotation other)
{
return Id.Equals(other.Id);
}
/// <inheritdoc/>
public override bool Equals(object obj)
{
return obj is Annotation other && Equals(other);
}
#endif
public override int GetHashCode()
{
return Id.GetHashCode();
}
}
/// <summary>
/// An ego, which is used to group multiple sensors under a single frame of reference.
/// </summary>
public struct EgoHandle : IEquatable<EgoHandle>
{
/// <summary>
/// The ID for this ego. This ID will be used to refer to this ego in the json metadata.
/// </summary>
public readonly Guid Id;
/// <summary>
/// A human-readable description of this ego.
/// </summary>
public readonly string Description;
internal EgoHandle(Guid id, string description)
{
this.Id = id;
this.Description = description;
}
/// <inheritdoc/>
public bool Equals(EgoHandle other)
public bool Equals(AnnotationHandle other)
{
return Id.Equals(other.Id);
}

{
return obj is EgoHandle other && Equals(other);
return obj is AnnotationHandle other && Equals(other);
}
/// <inheritdoc/>

}
/// <summary>
/// Compares two <see cref="EgoHandle"/> instances for equality.
/// </summary>
/// <param name="left">The first EgoHandle.</param>
/// <param name="right">The second EgoHandle.</param>
/// <returns>Returns true if the two EgoHandles refer to the same ego.</returns>
public static bool operator==(EgoHandle left, EgoHandle right)
{
return left.Equals(right);
}
/// <summary>
/// Compares two <see cref="EgoHandle"/> instances for inequality.
/// </summary>
/// <param name="left">The first EgoHandle.</param>
/// <param name="right">The second EgoHandle.</param>
/// <returns>Returns true if the two EgoHandles refer to the same ego.</returns>
public static bool operator!=(EgoHandle left, EgoHandle right)
{
return !left.Equals(right);
}
#if false
/// <summary>
/// A metric type, used to define a kind of metric. <see cref="DatasetCapture.RegisterMetricDefinition"/>.
/// </summary>

return Id.GetHashCode();
}
}
#endif
#if false
/// <summary>
/// A metric type, used to define a kind of annotation. <see cref="DatasetCapture.RegisterAnnotationDefinition"/>.
/// </summary>

m_SimulationState = simState;
}
}
#endif
/// <summary>
/// Container holding the poses of the ego and sensor. Also optionally contains the ego velocity and acceleration.
/// </summary>

2
com.unity.perception/Runtime/GroundTruth/Ego.cs


/// </summary>
public class Ego : MonoBehaviour
{
#if false
/// <summary>
/// A human-readable description for this Ego to be included in the dataset.
/// </summary>

m_EgoHandle = DatasetCapture.RegisterEgo(Description);
#endif
}
#endif
}
}

16
com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBox3DLabeler.cs


}
static ProfilerMarker s_BoundingBoxCallback = new ProfilerMarker("OnBoundingBoxes3DReceived");
AnnotationDefinition m_AnnotationDefinition;
// AnnotationDefinition m_AnnotationDefinition;
int m_CurrentFrame;
// int m_CurrentFrame;
/// <summary>

{
if (idLabelConfig == null)
throw new InvalidOperationException("BoundingBox3DLabeler's idLabelConfig field must be assigned");
#if false
#else
// m_AnnotationDefinition = new AnnotationDefinition();
#endif
perceptionCamera.RenderedObjectInfosCalculated += OnRenderObjectInfosCalculated;
m_AsyncAnnotations = new Dictionary<int, AsyncAnnotation>();

/// <inheritdoc/>
protected override void OnBeginRendering(ScriptableRenderContext scriptableRenderContext)
{
#if false
m_CurrentFrame = Time.frameCount;
m_BoundingBoxValues[m_CurrentFrame] = new Dictionary<uint, BoxData>();

foreach (var label in LabelManager.singleton.registeredLabels)
ProcessLabel(label);
#endif
}
void OnRenderObjectInfosCalculated(int frameCount, NativeArray<RenderedObjectInfo> renderedObjectInfos)

}
BoundingBoxComputed?.Invoke(frameCount, m_ToReport);
asyncAnnotation.ReportValues(m_ToReport);
// asyncAnnotation.ReportValues(m_ToReport);
}
}

var converted = ConvertToBoxData(labelEntry, labeledEntity.instanceId, combinedBounds.center, combinedBounds.extents, cameraRotation);
m_BoundingBoxValues[m_CurrentFrame][labeledEntity.instanceId] = converted;
// m_BoundingBoxValues[m_CurrentFrame][labeledEntity.instanceId] = converted;
}
}
}

175
com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBoxLabeler.cs


using Unity.Profiling;
using UnityEngine.Serialization;
using Unity.Simulation;
using UnityEngine.Perception.GroundTruth.Exporters.Solo;
using UnityEngine.Perception.GroundTruth.SoloDesign;
using UnityEngine.Rendering;
using UnityEngine.UI;

[Serializable]
public sealed class BoundingBox2DLabeler : CameraLabeler
{
public class AnnotationDefinition : SoloDesign.AnnotationDefinition
{
static readonly string k_Id = "bounding box";
static readonly string k_Description = "Bounding box for each labeled object visible to the sensor";
static readonly string k_AnnotationType = "bounding box";
public AnnotationDefinition() : base(k_Id, k_Description, k_AnnotationType) { }
public AnnotationDefinition(IEnumerable<Entry> spec)
: base(k_Id, k_Description, k_AnnotationType)
{
this.spec = spec;
}
[Serializable]
public struct Entry : IMessageProducer
{
public Entry(int id, string name)
{
labelId = id;
labelName = name;
}
public int labelId;
public string labelName;
public void ToMessage(IMessageBuilder builder)
{
builder.AddInt("label_id", labelId);
builder.AddString("label_name", labelName);
}
}
public IEnumerable<Entry> spec;
public override void ToMessage(IMessageBuilder builder)
{
base.ToMessage(builder);
foreach (var e in spec)
{
var nested = builder.AddNestedMessageToVector("spec");
e.ToMessage(nested);
}
}
}
AnnotationDefinition m_AnnotationDefinition = new AnnotationDefinition();
/// <summary>
/// Bounding boxes for all of the labeled objects in a capture
/// </summary>
[Serializable]
public class BoundingBoxAnnotation : SoloDesign.Annotation
{
public struct Entry
{
// The instance ID of the object
public int instanceId;
public int labelId;
// The type of the object
public string labelName;
/// <summary>
/// (xy) pixel location of the object's bounding box
/// </summary>
public Vector2 origin;
/// <summary>
/// (width/height) dimensions of the bounding box
/// </summary>
public Vector2 dimension;
public void ToMessage(IMessageBuilder builder)
{
builder.AddInt("instance_id", instanceId);
builder.AddInt("label_id", labelId);
builder.AddString("label_name", labelName);
builder.AddFloatVector("origin", new[] { origin.x, origin.y });
builder.AddFloatVector("dimension", new[] { dimension.x, dimension.y });
}
}
/// <summary>
/// The bounding boxes recorded by the annotator
/// </summary>
public List<Entry> boxes;
public override void ToMessage(IMessageBuilder builder)
{
base.ToMessage(builder);
foreach (var e in boxes)
{
var nested = builder.AddNestedMessageToVector("values");
e.ToMessage(nested);
}
}
}
///<inheritdoc/>
public override string description
{

[SuppressMessage("ReSharper", "InconsistentNaming")]
[SuppressMessage("ReSharper", "NotAccessedField.Local")]
public struct BoundingBoxValue
{
public int label_id;
public int frame;
public string label_name;
public uint instance_id;
public float x;
public float y;
public float width;
public float height;
}
public static string annotationId = "f9f22e05-443f-4602-a422-ebe4ea9b55cb";
public static string annotationId = "bounding box";
/// <summary>
/// The <see cref="IdLabelConfig"/> which associates objects with labels.
/// </summary>

Dictionary<int, (AsyncAnnotation annotation, LabelEntryMatchCache labelEntryMatchCache)> m_AsyncData;
AnnotationDefinition m_BoundingBoxAnnotationDefinition;
List<BoundingBoxValue> m_BoundingBoxValues;
List<BoundingBoxAnnotation.Entry> m_BoundingBoxValues;
Vector2 m_OriginalScreenSize = Vector2.zero;

/// <inheritdoc/>
protected override bool supportsVisualization => true;
/// <summary>
/// Event information for <see cref="BoundingBox2DLabeler.BoundingBoxesCalculated"/>
/// </summary>

/// <summary>
/// Bounding boxes.
/// </summary>
public IEnumerable<BoundingBoxValue> data;
public IEnumerable<BoundingBoxAnnotation.Entry> data;
}
/// <summary>

throw new InvalidOperationException("BoundingBox2DLabeler's idLabelConfig field must be assigned");
m_AsyncData = new Dictionary<int, (AsyncAnnotation annotation, LabelEntryMatchCache labelEntryMatchCache)>();
m_BoundingBoxValues = new List<BoundingBoxValue>();
m_BoundingBoxValues = new List<BoundingBoxAnnotation.Entry>();
DatasetCapture.RegisterAnnotationDefinition(new AnnotationDefinition());
#if false
#endif
perceptionCamera.RenderedObjectInfosCalculated += OnRenderedObjectInfosCalculated;
visualizationEnabled = supportsVisualization;

protected override void OnBeginRendering(ScriptableRenderContext scriptableRenderContext)
{
m_AsyncData[Time.frameCount] =
(perceptionCamera.SensorHandle.ReportAnnotationAsync(m_BoundingBoxAnnotationDefinition),
(perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition),
idLabelConfig.CreateLabelEntryMatchCache(Allocator.TempJob));
}

if (!asyncData.labelEntryMatchCache.TryGetLabelEntryFromInstanceId(objectInfo.instanceId, out var labelEntry, out _))
continue;
m_BoundingBoxValues.Add(new BoundingBoxValue
{
label_id = labelEntry.id,
frame = frameCount,
label_name = labelEntry.label,
instance_id = objectInfo.instanceId,
x = objectInfo.boundingBox.x,
y = objectInfo.boundingBox.y,
width = objectInfo.boundingBox.width,
height = objectInfo.boundingBox.height,
});
m_BoundingBoxValues.Add(new BoundingBoxAnnotation.Entry
{
labelId = labelEntry.id,
labelName = labelEntry.label,
instanceId = (int)objectInfo.instanceId,
origin = new Vector2(objectInfo.boundingBox.x, objectInfo.boundingBox.y),
dimension = new Vector2(objectInfo.boundingBox.width, objectInfo.boundingBox.height)
}
);
}
if (!CaptureOptions.useAsyncReadbackIfSupported && frameCount != Time.frameCount)

data = m_BoundingBoxValues,
frameCount = frameCount
});
asyncData.annotation.ReportValues(m_BoundingBoxValues);
#if true
var toReport = new BoundingBoxAnnotation
{
sensorId = perceptionCamera.ID,
Id = m_AnnotationDefinition.id,
annotationType = m_AnnotationDefinition.annotationType,
description = m_AnnotationDefinition.description,
boxes = m_BoundingBoxValues
};
asyncData.annotation.Report(toReport);
#endif
}
}

foreach (var box in m_BoundingBoxValues)
{
var x = box.x * screenRatioWidth;
var y = box.y * screenRatioHeight;
var x = box.origin.x * screenRatioWidth;
var y = box.origin.y * screenRatioHeight;
var boxRect = new Rect(x, y, box.width * screenRatioWidth, box.height * screenRatioHeight);
var labelWidth = Math.Min(120, box.width * screenRatioWidth);
var boxRect = new Rect(x, y, box.dimension.x * screenRatioWidth, box.dimension.y * screenRatioHeight);
var labelWidth = Math.Min(120, box.dimension.x * screenRatioWidth);
GUI.Label(labelRect, box.label_name + "_" + box.instance_id, m_Style);
GUI.Label(labelRect, box.labelName + "_" + box.instanceId, m_Style);
}
}
}

70
com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs


using Unity.Collections;
using Unity.Profiling;
using Unity.Simulation;
using UnityEditor;
using UnityEngine.Perception.GroundTruth.SoloDesign;
using UnityEngine.Profiling;
using UnityEngine.Rendering;

[Serializable]
public sealed class InstanceSegmentationLabeler : CameraLabeler, IOverlayPanelProvider
{
InstanceSegmentationDefinition m_Definition = new InstanceSegmentationDefinition();
///<inheritdoc/>
public override string description
{

/// <inheritdoc/>
protected override bool supportsVisualization => true;
static readonly string k_Directory = "InstanceSegmentation" + Guid.NewGuid().ToString();
const string k_FilePrefix = "Instance_";
public string annotationId = "1ccebeb4-5886-41ff-8fe0-f911fa8cbcdf";
public string annotationId = "instance segmentation";
/// <summary>
/// The <see cref="idLabelConfig"/> which associates objects with labels.

AnnotationDefinition m_AnnotationDefinition;
Dictionary<int, (AsyncAnnotation, byte[])> m_AsyncAnnotations;
Dictionary<int, (AsyncAnnotation annotation, byte[] buffer)> m_AsyncData;
Texture m_CurrentTexture;
/// <inheritdoc cref="IOverlayPanelProvider"/>

void OnRenderedObjectInfosCalculated(int frame, NativeArray<RenderedObjectInfo> renderedObjectInfos)
{
if (!m_AsyncAnnotations.TryGetValue(frame, out var annotation))
if (!m_AsyncData.TryGetValue(frame, out var asyncData))
m_AsyncAnnotations.Remove(frame);
m_AsyncData.Remove(frame);
var colorValues = new List<ColorValue>();
var instances = new List<InstanceSegmentation.Entry>();
foreach (var objectInfo in renderedObjectInfos)
{

colorValues.Add(new ColorValue
instances.Add(new InstanceSegmentation.Entry
instance_id = objectInfo.instanceId,
color = objectInfo.instanceColor
instanceId = (int)objectInfo.instanceId,
rgba = objectInfo.instanceColor
var instanceData = new InstanceData
var toReport = new InstanceSegmentation
buffer = annotation.Item2,
colors = colorValues
sensorId = perceptionCamera.ID,
Id = m_Definition.id,
annotationType = m_Definition.annotationType,
description = m_Definition.description,
imageFormat = "png",
instances = instances,
dimension = new Vector2(Screen.width, Screen.height), // TODO figure out how to get this from the camera
buffer = asyncData.buffer
m_InstanceData.Add(instanceData);
annotation.Item1.ReportFileAndValues(m_InstancePath, m_InstanceData);
asyncData.annotation.Report(toReport);
if (!m_AsyncAnnotations.TryGetValue(frameCount, out var annotation))
if (!m_AsyncData.TryGetValue(frameCount, out var annotation))
return;
using (s_OnImageReceivedCallback.Auto())

m_InstancePath = $"{k_Directory}/{k_FilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(k_Directory)}/{k_FilePrefix}{frameCount}.png";
// m_InstancePath = $"{k_Directory}/{k_FilePrefix}{frameCount}.png";
// var localPath = $"{Manager.Instance.GetDirectoryFor(k_Directory)}/{k_FilePrefix}{frameCount}.png";
var colors = new NativeArray<Color32>(data, Allocator.Persistent);
#if false

asyncRequest.Execute();
#endif
annotation.Item2 = ImageConversion.EncodeArrayToPNG(colors.ToArray(), GraphicsFormat.R8G8B8A8_UNorm, (uint)renderTexture.width, (uint)renderTexture.height);
Profiler.EndSample();
Profiler.BeginSample("InstanceSegmentationWritePng");
File.WriteAllBytes(localPath, annotation.Item2);
Manager.Instance.ConsumerFileProduced(localPath);
Profiler.EndSample();
// Profiler.EndSample();
// Profiler.BeginSample("InstanceSegmentationWritePng");
// File.WriteAllBytes(localPath, annotation.Item2);
// Manager.Instance.ConsumerFileProduced(localPath);
// Profiler.EndSample();
m_AsyncAnnotations[frameCount] = annotation;
m_AsyncData[frameCount] = annotation;
}
}

m_AsyncAnnotations[Time.frameCount] = (perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition), null);
m_AsyncData[Time.frameCount] = (perceptionCamera.SensorHandle.ReportAnnotationAsync(m_Definition), null);
}
/// <inheritdoc/>

perceptionCamera.InstanceSegmentationImageReadback += OnImageCaptured;
perceptionCamera.RenderedObjectInfosCalculated += OnRenderedObjectInfosCalculated;
m_AsyncAnnotations = new Dictionary<int, (AsyncAnnotation, byte[])>();
m_AnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition(
"instance segmentation",
idLabelConfig.GetAnnotationSpecification(),
"pixel-wise instance segmentation label",
"PNG",
Guid.Parse(annotationId));
m_AsyncData = new Dictionary<int, (AsyncAnnotation, byte[])>();
visualizationEnabled = supportsVisualization;
}

14
com.unity.perception/Runtime/GroundTruth/Labelers/KeypointLabeler.cs


public KeypointObjectFilter objectFilter;
// ReSharper restore MemberCanBePrivate.Global
AnnotationDefinition m_AnnotationDefinition;
// AnnotationDefinition m_AnnotationDefinition;
Texture2D m_MissingTexture;
Dictionary<int, (AsyncAnnotation annotation, Dictionary<uint, KeypointEntry> keypoints)> m_AsyncAnnotations;

{
if (idLabelConfig == null)
throw new InvalidOperationException($"{nameof(KeypointLabeler)}'s idLabelConfig field must be assigned");
#if false
#else
// m_AnnotationDefinition = new AnnotationDefinition();
#endif
// Texture to use in case the template does not contain a texture for the joints or the skeletal connections
m_MissingTexture = new Texture2D(1, 1);

//This code assumes that OnRenderedObjectInfoReadback will be called immediately after OnInstanceSegmentationImageReadback
KeypointsComputed?.Invoke(frameCount, m_KeypointEntriesToReport);
asyncAnnotation.annotation.ReportValues(m_KeypointEntriesToReport);
// asyncAnnotation.annotation.ReportValues(m_KeypointEntriesToReport);
}
/// <param name="scriptableRenderContext"></param>

#if false
m_CurrentFrame = Time.frameCount;
var annotation = perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition);

foreach (var label in LabelManager.singleton.registeredLabels)
ProcessLabel(m_CurrentFrame, label);
#endif
}
// ReSharper disable InconsistentNaming

return jsons;
}
}
}
}

2
com.unity.perception/Runtime/GroundTruth/Labelers/ObjectCountLabeler.cs


/// <inheritdoc/>
protected override void OnBeginRendering(ScriptableRenderContext scriptableRenderContext)
{
#if true
if (m_ObjectCountMetricDefinition.Equals(default))
{
m_ObjectCountMetricDefinition = DatasetCapture.RegisterMetricDefinition("object count",

m_ObjectCountAsyncMetrics[Time.frameCount] = perceptionCamera.SensorHandle.ReportMetricAsync(m_ObjectCountMetricDefinition);
#endif
}
NativeArray<uint> ComputeObjectCounts(NativeArray<RenderedObjectInfo> objectInfo)

4
com.unity.perception/Runtime/GroundTruth/Labelers/RenderedObjectInfoLabeler.cs


RenderedObjectInfoValue[] m_VisiblePixelsValues;
Dictionary<int, AsyncMetric> m_ObjectInfoAsyncMetrics;
MetricDefinition m_RenderedObjectInfoMetricDefinition;
// MetricDefinition m_RenderedObjectInfoMetricDefinition;
/// <summary>
/// Creates a new RenderedObjectInfoLabeler. Be sure to assign <see cref="idLabelConfig"/> before adding to a <see cref="PerceptionCamera"/>.

/// <inheritdoc/>
protected override void OnBeginRendering(ScriptableRenderContext scriptableRenderContext)
{
#if false
if (m_RenderedObjectInfoMetricDefinition.Equals(default))
{
m_RenderedObjectInfoMetricDefinition = DatasetCapture.RegisterMetricDefinition(

}
m_ObjectInfoAsyncMetrics[Time.frameCount] = perceptionCamera.SensorHandle.ReportMetricAsync(m_RenderedObjectInfoMetricDefinition);
#endif
}
void ProduceRenderedObjectInfoMetric(NativeArray<RenderedObjectInfo> renderedObjectInfos, int frameCount)

13
com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs


[SerializeField]
RenderTexture m_TargetTextureOverride;
AnnotationDefinition m_SemanticSegmentationAnnotationDefinition;
// AnnotationDefinition m_SemanticSegmentationAnnotationDefinition;
RenderTextureReader<Color32> m_SemanticSegmentationTextureReader;
#if HDRP_PRESENT

pixel_value = labelConfig.skyColor
});
}
#if false
m_SemanticSegmentationAnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition(
"semantic segmentation",
specs.ToArray(),

#else
// m_SemanticSegmentationAnnotationDefinition = new AnnotationDefinition();
#endif
m_SemanticSegmentationTextureReader = new RenderTextureReader<Color32>(targetTexture);
visualizationEnabled = supportsVisualization;
}

var datasetRelativePath = $"{semanticSegmentationDirectory}/{k_SegmentationFilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(semanticSegmentationDirectory)}/{k_SegmentationFilePrefix}{frameCount}.png";
annotation.ReportFileAndValues(datasetRelativePath, info);
// annotation.ReportFileAndValues(datasetRelativePath, info);
var asyncRequest = Manager.Instance.CreateRequest<AsyncRequest<AsyncSemanticSegmentationWrite>>();

/// <inheritdoc/>
protected override void OnEndRendering(ScriptableRenderContext scriptableRenderContext)
{
#if false
#endif
}
/// <inheritdoc/>

25
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


using Unity.Simulation;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Perception.GroundTruth.SoloDesign;
using UnityEngine.Profiling;
using UnityEngine.Rendering;
#if HDRP_PRESENT

int m_LastFrameCaptured = -1;
int m_LastFrameEndRendering = -1;
Ego m_EgoMarker;
SensorHandle m_SensorHandle;
SensorHandle _SensorHandle;
Vector2 m_ScrollPosition;
#if URP_PRESENT

#endif
public string ID;
/// <summary>
/// A human-readable description of the camera.

get
{
EnsureSensorRegistered();
return m_SensorHandle;
return _SensorHandle;
private set => m_SensorHandle = value;
private set => _SensorHandle = value;
}
/// <summary>

void EnsureSensorRegistered()
{
if (m_SensorHandle.IsNil)
if (_SensorHandle.IsNil)
m_EgoMarker = GetComponentInParent<Ego>();
// var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
var ego = new EgoHandle();
SensorHandle = DatasetCapture.Instance.RegisterSensor(
ego, "camera", description, firstCaptureFrame, captureTriggerMode,
simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming);
var sensorDef = new SensorDefinition(ID, "camera", description)
{
firstCaptureFrame = firstCaptureFrame,
captureTriggerMode = captureTriggerMode.ToString(),
simulationDeltaTime = simulationDeltaTime,
framesBetweenCaptures = framesBetweenCaptures,
manualSensorsAffectTiming = manualSensorAffectSimulationTiming
};
SensorHandle = DatasetCapture.Instance.RegisterSensor(sensorDef);
}
}

308
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


using Unity.Collections;
using Unity.Simulation;
using UnityEngine;
using UnityEngine.Perception.GroundTruth.Exporters;
using UnityEngine.Perception.GroundTruth.Exporters.Coco;
using UnityEngine.Perception.GroundTruth.Exporters.PerceptionFormat;
using UnityEngine.Perception.GroundTruth.Exporters.PerceptionNew;
using UnityEngine.Perception.GroundTruth.Exporters.Solo;
using UnityEngine.Perception.GroundTruth.SoloDesign;
using UnityEngine.Profiling;

{
HashSet<SensorHandle> m_ActiveSensors = new HashSet<SensorHandle>();
Dictionary<SensorHandle, SensorData> m_Sensors = new Dictionary<SensorHandle, SensorData>();
HashSet<EgoHandle> m_Egos = new HashSet<EgoHandle>();
HashSet<Guid> m_Ids = new HashSet<Guid>();
Guid m_SequenceId = Guid.NewGuid();
//IDatasetExporter _ActiveReporter = null;
int m_SequenceId = -1;
// PerceptionConsumer activeConsumer = null;
HashSet<string> _Ids = new HashSet<string>();
// Always use the property SequenceTimeMs instead
int m_FrameCountLastUpdatedSequenceTime;

bool m_HasStarted;
int m_CaptureFileIndex;
List<AdditionalInfoTypeData> m_AdditionalInfoTypeData = new List<AdditionalInfoTypeData>();
List<PendingMetric> m_PendingMetrics = new List<PendingMetric>(k_MinPendingMetricsBeforeWrite + 10);
// List<PendingMetric> m_PendingMetrics = new List<PendingMetric>(k_MinPendingMetricsBeforeWrite + 10);
int m_NextMetricId = 1;
// int m_NextMetricId = 1;
CustomSampler m_SerializeCapturesSampler = CustomSampler.Create("SerializeCaptures");
CustomSampler m_SerializeCapturesAsyncSampler = CustomSampler.Create("SerializeCapturesAsync");

CustomSampler m_SerializeMetricsAsyncSampler = CustomSampler.Create("SerializeMetricsAsync");
CustomSampler m_GetOrCreatePendingCaptureForThisFrameSampler = CustomSampler.Create("GetOrCreatePendingCaptureForThisFrame");
float m_LastTimeScale;
readonly string m_OutputDirectoryName;
string m_OutputDirectoryPath;
public const string userBaseDirectoryKey = "userBaseDirectory";

public bool IsRunning { get; private set; }
public string OutputDirectory
{
get
{
if (m_OutputDirectoryPath == null)
m_OutputDirectoryPath = Manager.Instance.GetDirectoryFor(m_OutputDirectoryName);
return m_OutputDirectoryPath;
}
}
public SimulationState(string outputDirectory)
public SimulationState()
#if false
var go = GameObject.Find("SoloConsumer");
if (go == null)
{
go = new GameObject("SoloConsumer");
activeConsumer = go.AddComponent<SoloConsumer>();
}
else
{
activeConsumer = go.GetComponent<SoloConsumer>();
}
#endif
PlayerPrefs.SetString(defaultOutputBaseDirectory, Configuration.Instance.GetStorageBasePath());
m_OutputDirectoryName = outputDirectory;
var basePath = PlayerPrefs.GetString(userBaseDirectoryKey, string.Empty);
if (basePath != string.Empty)
{
if (Directory.Exists(basePath))
{
Configuration.localPersistentDataPath = basePath;
}
else
{
Debug.LogWarning($"Passed in directory to store simulation artifacts: {basePath}, does not exist. Using default directory {Configuration.localPersistentDataPath} instead.");
basePath = Configuration.localPersistentDataPath;
}
}
/*
//var activeReporterString = PlayerPrefs.GetString(activeReporterKey, defaultReporter);
var activeReporterString = "coco";
if (activeReporterString == "perceptionOutput")
{
m_ActiveReporter = new PerceptionExporter();
}
else
{
m_ActiveReporter = new CocoExporter();
}
*/
PlayerPrefs.SetString(latestOutputDirectoryKey, Manager.Instance.GetDirectoryFor("", basePath));
//IDatasetExporter GetActiveReporter()
PerceptionConsumer GetActiveConsumer()
static PerceptionConsumer GetActiveConsumer()
public string GetRgbCaptureFilename(string defaultFilename, params(string, object)[] additionalSensorValues)
{
return string.Empty;
}
/// <summary>
/// A self-sufficient container for all information about a reported capture. Capture writing should not depend on any
/// state outside of this container, as other state may have changed since the capture was reported.

public Guid Id;
public string Path;
public Guid SequenceId;
public int SequenceId;
public List<(Annotation, AnnotationData)> Annotations = new List<(Annotation, AnnotationData)>();
public List<(AnnotationHandle, Annotation)> Annotations = new List<(AnnotationHandle, Annotation)>();
public PendingCapture(Guid id, SensorHandle sensorHandle, SensorData sensorData, Guid sequenceId, int frameCount, int step, float timestamp)
public (int, int) Id => (SequenceId, Step);
public PendingCapture(SensorHandle sensorHandle, SensorData sensorData, int sequenceId, int frameCount, int step, float timestamp)
{
SensorHandle = sensorHandle;
FrameCount = frameCount;

Id = id;
#if false
public PendingMetric(MetricDefinition metricDefinition, int metricId, SensorHandle sensorHandle, Guid captureId, Annotation annotation, Guid sequenceId, int step, JToken values = null)
public PendingMetric(MetricDefinition metricDefinition, int metricId, SensorHandle sensorHandle, AnnotationHandle annotationHandle, int sequenceId, int step, JToken values = null)
Annotation = annotation;
AnnotationHandle = annotationHandle;
CaptureId = captureId;
Values = values;
}

public readonly int MetricId;
public readonly Guid CaptureId;
public readonly Annotation Annotation;
public readonly Guid SequenceId;
public (int, int) CaptureId => (SequenceId, Step);
public readonly AnnotationHandle AnnotationHandle;
public readonly int SequenceId;
#endif
public struct SensorData
{
public string modality;

public float sequenceTimeOfNextCapture;
public float sequenceTimeOfNextRender;
public int lastCaptureFrameCount;
public EgoHandle egoHandle;
}
public struct AnnotationData
{
public readonly AnnotationDefinition AnnotationDefinition;
public string Path;
public JArray ValuesJson;
public IEnumerable<object> RawValues;
public bool IsAssigned => Path != null || ValuesJson != null;
public AnnotationData(AnnotationDefinition annotationDefinition, string path, JArray valuesJson)
: this()
{
AnnotationDefinition = annotationDefinition;
Path = path;
ValuesJson = valuesJson;
}
}
enum AdditionalInfoKind

throw new InvalidOperationException($"Capture for frame {Time.frameCount} already reported for sensor {this}");
pendingCapture.CaptureReported = true;
pendingCapture.Path = filename;
pendingCapture.AdditionalSensorValues = additionalSensorValues;
pendingCapture.SensorSpatialData = sensorSpatialData;

}
}
public string GetOutputDirectoryNoCreate() => Path.Combine(Configuration.Instance.GetStoragePath(), m_OutputDirectoryName);
void EnsureSequenceTimingsUpdated()
{
if (!m_HasStarted)

m_Sensors[kvp.Key] = sensorData;
}
m_SequenceId = Guid.NewGuid();
m_SequenceId++;
}
void ResetTimings()

m_LastTimeScale = Time.timeScale;
}
string RegisterId(string requestedId)
{
var id = requestedId;
var i = 0;
while (_Ids.Contains(id))
{
id = $"{requestedId}_{i++}";
}
_Ids.Add(id);
return id;
}
public SensorHandle AddSensor(SensorDefinition sensor, float renderingDeltaTime)
{
var sensorData = new SensorData()
{
modality = sensor.modality,
description = sensor.definition,
firstCaptureTime = UnscaledSequenceTime + sensor.firstCaptureFrame * renderingDeltaTime,
captureTriggerMode = CaptureTriggerMode.Scheduled, // TODO fix this
renderingDeltaTime = renderingDeltaTime,
framesBetweenCaptures = sensor.framesBetweenCaptures,
manualSensorAffectSimulationTiming = sensor.manualSensorsAffectTiming,
lastCaptureFrameCount = -1
};
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = UnscaledSequenceTime;
sensor.id = RegisterId(sensor.id);
var sensorHandle = new SensorHandle(sensor.id, DatasetCapture.Instance);
m_ActiveSensors.Add(sensorHandle);
m_Sensors.Add(sensorHandle, sensorData);
GetActiveConsumer()?.OnSensorRegistered(sensor);
return sensorHandle;
}
#if false
public void AddSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float renderingDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming, SensorHandle sensor)
{
var sensorData = new SensorData()

GetActiveConsumer()?.OnSensorRegistered(new SensorDefinition("camera", modality, description));
}
#endif
float GetSequenceTimeOfNextCapture(SensorData sensorData)
{
// If the first capture hasn't happened yet, sequenceTimeNextCapture field won't be valid

return sensorData.sequenceTimeOfNextCapture;
}
public bool Contains(Guid id) => m_Ids.Contains(id);
public void AddEgo(EgoHandle egoHandle)
{
CheckDatasetAllowed();
m_Egos.Add(egoHandle);
m_Ids.Add(egoHandle.Id);
}
public bool Contains(string id) => _Ids.Contains(id);
public bool IsEnabled(SensorHandle sensorHandle) => m_ActiveSensors.Contains(sensorHandle);

}
WritePendingCaptures();
WritePendingMetrics();
// WritePendingMetrics();
Time.captureDeltaTime = nextFrameDt;
}

public void End()
{
if (m_Ids.Count == 0)
if (_Ids.Count == 0)
#if false
#endif
if (m_AdditionalInfoTypeData.Any())
{
List<IdLabelConfig.LabelEntrySpec> labels = new List<IdLabelConfig.LabelEntrySpec>();

GetActiveConsumer()?.OnSimulationCompleted(metadata);
}
public void RegisterAnnotationDefinition(SoloDesign.AnnotationDefinition definition)
{
GetActiveConsumer()?.OnAnnotationRegistered(definition);
}
public void RegisterMetricDefinition(SoloDesign.MetricDefinition definition)
{
GetActiveConsumer()?.OnMetricRegistered(definition);
}
#if false
public AnnotationDefinition RegisterAnnotationDefinition<TSpec>(string name, TSpec[] specValues, string description, string format, Guid id)
{
if (id == Guid.Empty)

return new AnnotationDefinition(id, this);
}
#endif
BoundingBoxAnnotationDefinition ToBoundingBoxDef<TSpec>(TSpec[] specValues)
{

return new BoundingBoxAnnotationDefinition(entries);
}
#if false
public MetricDefinition RegisterMetricDefinition<TSpec>(string name, TSpec[] specValues, string description, Guid id)
{
if (id == Guid.Empty)

return new MetricDefinition(id);
}
#endif
void RegisterAdditionalInfoType<TSpec>(string name, TSpec[] specValues, string description, string format, Guid id, AdditionalInfoKind additionalInfoKind)
{
CheckDatasetAllowed();

id = id,
specValues = specValues
};
if (!m_Ids.Add(id))
#if false
if (!m_Ids.Add(id.ToString()))
{
foreach (var existingAnnotationDefinition in m_AdditionalInfoTypeData)
{

throw new ArgumentException($"Id {id} is already in use. Ids must be unique.");
}
#endif
public Annotation ReportAnnotationFile(AnnotationDefinition annotationDefinition, SensorHandle sensorHandle, string filename)
public AnnotationHandle ReportAnnotation(SensorHandle sensor, AnnotationDefinition definition, Annotation annotation)
{
var handle = new AnnotationHandle(sensor, this, definition, AcquireStep());
var pendingCapture = GetOrCreatePendingCaptureForThisFrame(sensor);
pendingCapture.Annotations.Add((handle, null));
return handle;
}
#if false
public AnnotationHandle ReportAnnotationFile(AnnotationHandle annotationHandle, SensorHandle sensorHandle)
var annotation = new Annotation(sensorHandle, this, AcquireStep());
var annotation = new AnnotationHandle(sensorHandle, this, AcquireStep());
pendingCapture.Annotations.Add((annotation, new AnnotationData(annotationDefinition, filename, null)));
pendingCapture.Annotations.Add((annotation, new AnnotationData(annotationHandle, null)));
public Annotation ReportAnnotationValues<T>(AnnotationDefinition annotationDefinition, SensorHandle sensorHandle, T[] values)
public AnnotationHandle ReportAnnotationValues<T>(AnnotationDefinition annotationDefinition, SensorHandle sensorHandle, T[] values)
var annotation = new Annotation(sensorHandle, this, AcquireStep());
var annotation = new AnnotationHandle(sensorHandle, this, AcquireStep());
var pendingCapture = GetOrCreatePendingCaptureForThisFrame(sensorHandle);
var valuesJson = new JArray();
foreach (var value in values)

pendingCapture.Annotations.Add((annotation, new AnnotationData(annotationDefinition, null, valuesJson)));
return annotation;
}
#endif
PendingCapture GetOrCreatePendingCaptureForThisFrame(SensorHandle sensorHandle)
{
return GetOrCreatePendingCaptureForThisFrame(sensorHandle, out var _);

if (pendingCapture == null)
{
created = true;
pendingCapture = new PendingCapture(Guid.NewGuid(), sensorHandle, m_Sensors[sensorHandle], m_SequenceId, Time.frameCount, AcquireStep(), SequenceTime);
pendingCapture = new PendingCapture(sensorHandle, m_Sensors[sensorHandle], m_SequenceId, Time.frameCount, AcquireStep(), SequenceTime);
m_PendingCaptures.Add(pendingCapture);
}

public AsyncAnnotation ReportAnnotationAsync(AnnotationDefinition annotationDefinition, SensorHandle sensorHandle)
{
return new AsyncAnnotation(ReportAnnotation(sensorHandle, annotationDefinition, null), this);
}
#if false
public AsyncAnnotation ReportAnnotationAsync(AnnotationDefinition annotationDefinition, SensorHandle sensorHandle)
{
#endif
public void ReportAsyncAnnotationResult(AsyncAnnotation asyncAnnotation, SoloDesign.Annotation annotation)
{
if (!asyncAnnotation.IsPending)
throw new InvalidOperationException("AsyncAnnotation has already been reported and cannot be reported again.");
PendingCapture pendingCapture = null;
var annotationIndex = -1;
foreach (var c in m_PendingCaptures)
{
if (c.Step == asyncAnnotation.annotationHandle.Step && c.SensorHandle == asyncAnnotation.annotationHandle.SensorHandle)
{
pendingCapture = c;
annotationIndex = pendingCapture.Annotations.FindIndex(a => a.Item1.Equals(asyncAnnotation.annotationHandle));
if (annotationIndex != -1)
break;
}
}
Debug.Assert(pendingCapture != null && annotationIndex != -1);
var annotationTuple = pendingCapture.Annotations[annotationIndex];
annotationTuple.Item2 = annotation;
pendingCapture.Annotations[annotationIndex] = annotationTuple;
}
#if false
public void ReportAsyncAnnotationResult<T>(AsyncAnnotation asyncAnnotation, string filename = null, NativeSlice<T> values = default) where T : struct
{
var jArray = new JArray();

annotationTuple.Item2 = annotationData;
pendingCapture.Annotations[annotationIndex] = annotationTuple;
}
#endif
public bool IsPending(Annotation annotation)
public bool IsPending(AnnotationHandle annotationHandle)
foreach (var a in c.Annotations)
foreach (var (handle, annotation) in c.Annotations)
if (a.Item1.Equals(annotation))
return !a.Item2.IsAssigned;
if (handle.Equals(annotationHandle))
return annotation == null;
}
}

#if false
public bool IsPending(ref AsyncMetric asyncMetric)
{
foreach (var m in m_PendingMetrics)

pendingMetric.Values = values;
m_PendingMetrics[metricIndex] = pendingMetric;
}
static JArray JArrayFromArray<T>(T[] values)
{
var jArray = new JArray();
foreach (var value in values)
jArray.Add(DatasetJsonUtility.ToJToken(value));
return jArray;
}
public AsyncMetric CreateAsyncMetric(MetricDefinition metricDefinition, SensorHandle sensorHandle = default, Annotation annotation = default)
#endif
#if false
public AsyncMetric CreateAsyncMetric(MetricDefinition metricDefinition, SensorHandle sensorHandle = default, AnnotationHandle annotationHandle = default)
var captureId = Guid.Empty;
captureId = capture.Id;
m_PendingMetrics.Add(new PendingMetric(metricDefinition, id, sensorHandle, captureId, annotation, m_SequenceId, AcquireStep()));
m_PendingMetrics.Add(new PendingMetric(metricDefinition, id, sensorHandle, annotationHandle, m_SequenceId, AcquireStep()));
public void ReportMetric<T>(MetricDefinition metricDefinition, T[] values, SensorHandle sensorHandle, Annotation annotation)
public void ReportMetric<T>(MetricDefinition metricDefinition, T[] values, SensorHandle sensorHandle, AnnotationHandle annotationHandle)
ReportMetric(metricDefinition, jArray, sensorHandle, annotation);
ReportMetric(metricDefinition, jArray, sensorHandle, annotationHandle);
public void ReportMetric(MetricDefinition metricDefinition, JToken values, SensorHandle sensorHandle, Annotation annotation)
public void ReportMetric(MetricDefinition metricDefinition, JToken values, SensorHandle sensorHandle, AnnotationHandle annotationHandle)
var captureId = sensorHandle.IsNil ? Guid.Empty : GetOrCreatePendingCaptureForThisFrame(sensorHandle).Id;
m_PendingMetrics.Add(new PendingMetric(metricDefinition, m_NextMetricId++, sensorHandle, captureId, annotation, m_SequenceId, AcquireStep(), values));
var captureId = sensorHandle.IsNil ? (-1, -1) : GetOrCreatePendingCaptureForThisFrame(sensorHandle).Id;
m_PendingMetrics.Add(new PendingMetric(metricDefinition, m_NextMetricId++, sensorHandle, annotationHandle, m_SequenceId, AcquireStep(), values));
#endif
}
}

62
com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs


{
var egoReference = new JObject();
egoReference["version"] = DatasetCapture.SchemaVersion;
egoReference["egos"] = new JArray(m_Egos.Select(e =>
{
var egoObj = new JObject();
egoObj["id"] = e.Id.ToString();
if (e.Description != null)
egoObj["description"] = e.Description;
return egoObj;
}).ToArray());
WriteJObjectToFile(egoReference, "egos.json");

{
var sensorReference = new JObject();
sensorReference["id"] = kvp.Key.Id.ToString();
sensorReference["ego_id"] = kvp.Value.egoHandle.Id.ToString();
sensorReference["modality"] = kvp.Value.modality;
if (kvp.Value.description != null)
sensorReference["description"] = kvp.Value.description;

void WriteJObjectToFile(JObject jObject, string filename)
{
#if false
m_JsonToStringSampler.Begin();
var stringWriter = new StringWriter(new StringBuilder(256), CultureInfo.InvariantCulture);
using (var jsonTextWriter = new JsonTextWriter(stringWriter))

File.WriteAllText(path, contents);
Manager.Instance.ConsumerFileProduced(path);
m_WriteToDiskSampler.End();
#endif
Dictionary<Guid, int> m_SequenceMap = new Dictionary<Guid, int>();
Dictionary<int, int> m_SequenceMap = new Dictionary<int, int>();
Sensor ToSensor(PendingCapture pendingCapture, SimulationState simulationState, int captureFileIndex)
{

{
var pendingCapture = m_PendingCaptures[i];
if ((writeCapturesFromThisFrame || pendingCapture.FrameCount < frameCountNow) &&
pendingCapture.Annotations.All(a => a.Item2.IsAssigned))
pendingCapture.Annotations.All(a => a.Item2 != null))
{
pendingCapturesToWrite.Add(pendingCapture);
m_PendingCaptures.RemoveAt(i);

m_SerializeCapturesSampler.End();
return;
}
#if false
BoundingBoxAnnotation ToBoundingBox(Annotation annotation, AnnotationData data)
{
var bbox = new BoundingBoxAnnotation

return bbox;
}
InstanceSegmentation ToInstanceSegmentation(Annotation annotation, AnnotationData data, params(string,object)[] sensorValues)
#endif
#if false
InstanceSegmentation ToInstanceSegmentation(AnnotationHandle annotation, AnnotationData data, params(string,object)[] sensorValues)
{
var seg = new InstanceSegmentation
{

return seg;
}
#endif
#if true
List<Sensor> ConvertToSensors(PendingCapture capture, SimulationState simulationState)
{
var dim = new Vector2();

}
};
}
#endif
#if true
Frame ConvertToFrameData(PendingCapture capture, SimulationState simState, int captureFileIndex)
{
if (!m_SequenceMap.TryGetValue(capture.SequenceId, out var seq))

frame.sensors = ConvertToSensors(capture, simState);
foreach (var (handle, annotation) in capture.Annotations)
{
frame.annotations.Add(annotation);
}
#if false
foreach (var (annotation, data) in capture.Annotations)
{
SoloDesign.Annotation soloAnnotation = null;

{
#if false
#endif
case "1ccebeb4-5886-41ff-8fe0-f911fa8cbcdf":
soloAnnotation = ToInstanceSegmentation(annotation, data, capture.AdditionalSensorValues);
supported = true;

if (supported) frame.annotations.Add(soloAnnotation);
}
}
#endif
#endif
void Write(List<PendingCapture> pendingCaptures, SimulationState simulationState, int captureFileIndex)
{
foreach (var pendingCapture in pendingCaptures)

m_SerializeCapturesSampler.End();
m_CaptureFileIndex++;
}
#if false
struct WritePendingMetricRequestData
{
public List<PendingMetric> PendingMetrics;

static JObject JObjectFromPendingMetric(PendingMetric metric)
{
var jObject = new JObject();
#if false
jObject["annotation_id"] = metric.Annotation.IsNil ? new JRaw("null") : new JValue(metric.Annotation.Id.ToString());
jObject["annotation_id"] = metric.annotationHandle.IsNil ? new JRaw("null") : new JValue(metric.annotationHandle.Id.ToString());
#endif
#endif
#if false
/// <summary>
/// Creates the json representation of the given PendingCapture. Static because this should not depend on any SimulationState members,
/// which may have changed since the capture was reported.

var sensorJObject = new JObject();//new SensorCaptureJson
sensorJObject["sensor_id"] = pendingCapture.SensorHandle.Id.ToString();
sensorJObject["ego_id"] = pendingCapture.SensorData.egoHandle.Id.ToString();
sensorJObject["modality"] = pendingCapture.SensorData.modality;
sensorJObject["translation"] = DatasetJsonUtility.ToJToken(pendingCapture.SensorSpatialData.SensorPose.position);
sensorJObject["rotation"] = DatasetJsonUtility.ToJToken(pendingCapture.SensorSpatialData.SensorPose.rotation);

}
var egoCaptureJson = new JObject();
egoCaptureJson["ego_id"] = pendingCapture.SensorData.egoHandle.Id.ToString();
egoCaptureJson["translation"] = DatasetJsonUtility.ToJToken(pendingCapture.SensorSpatialData.EgoPose.position);
egoCaptureJson["rotation"] = DatasetJsonUtility.ToJToken(pendingCapture.SensorSpatialData.EgoPose.rotation);
egoCaptureJson["velocity"] = pendingCapture.SensorSpatialData.EgoVelocity.HasValue ? DatasetJsonUtility.ToJToken(pendingCapture.SensorSpatialData.EgoVelocity.Value) : null;

capture["timestamp"] = pendingCapture.Timestamp;
capture["sensor"] = sensorJObject;
capture["ego"] = egoCaptureJson;
capture["filename"] = pendingCapture.Path;
capture["format"] = GetFormatFromFilename(pendingCapture.Path);
if (pendingCapture.Annotations.Any())
capture["annotations"] = new JArray(pendingCapture.Annotations.Select(JObjectFromAnnotation).ToArray());

static JObject JObjectFromAnnotation((Annotation, AnnotationData) annotationInfo)
#endif
#if false
static JObject JObjectFromAnnotation((AnnotationHandle, AnnotationData) annotationInfo)
{
var annotationJObject = new JObject();
annotationJObject["id"] = annotationInfo.Item1.Id.ToString();

return annotationJObject;
}
#endif
struct WritePendingCaptureRequestData
{
public List<PendingCapture> PendingCaptures;

20
com.unity.perception/Runtime/GroundTruth/SoloDesign/Frame.cs


{
}
public virtual void OnMetricRegistered() { }
public virtual void OnMetricRegistered(MetricDefinition metricDefinition) { }
/// <summary>
/// Called at the end of each frame. Contains all of the generated data for the

this.id = id;
this.description = description;
this.annotationType = annotationType;
}
public virtual bool IsValid()
{
return id != string.Empty && description != string.Empty && annotationType != string.Empty;
}
public virtual void ToMessage(IMessageBuilder builder)

builder.AddString("annotation_type", annotationType);
}
}
#if false
/// <summary>
/// Bounding boxes for all of the labeled objects in a capture
/// </summary>

e.ToMessage(nested);
}
}
}
#endif
[Serializable]
public class InstanceSegmentationDefinition : AnnotationDefinition
{
static readonly string k_Id = "instance segmentation";
static readonly string k_Description = "You know the deal";
static readonly string k_AnnotationType = "instance segmentation";
public InstanceSegmentationDefinition() : base(k_Id, k_Description, k_AnnotationType) { }
}
/// <summary>

5
com.unity.perception/Runtime/GroundTruth/SoloDesign/OldPerceptionConsumer.cs


Dictionary<int, Guid> m_SequenceToGuidMap = new Dictionary<int, Guid>();
List<PerceptionCapture> m_CurrentCaptures = new List<PerceptionCapture>();
void Start()
{
// Only here to get the check mark to show up in Unity Editor
}
internal string VerifyDirectoryWithGuidExists(string directoryPrefix, bool appendGuid = true)
{
var dirs = Directory.GetDirectories(m_CurrentPath);

7
com.unity.perception/Runtime/GroundTruth/SoloDesign/OldPerceptionJsonFactory.cs


using System.Linq;
using Newtonsoft.Json.Linq;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.Perception.GroundTruth.SoloDesign;
namespace GroundTruth.SoloDesign

{
return JToken.FromObject(PerceptionInstanceSegmentationValue.Convert(consumer, frame.frame, i), consumer.Serializer);
}
case BoundingBoxAnnotation b:
case BoundingBox2DLabeler.BoundingBoxAnnotation b:
{
return JToken.FromObject(PerceptionBoundingBoxAnnotationValue.Convert(consumer, labelerId, defId, b), consumer.Serializer);
}

public float width;
public float height;
internal static Entry Convert(BoundingBoxAnnotation.Entry entry)
internal static Entry Convert(BoundingBox2DLabeler.BoundingBoxAnnotation.Entry entry)
{
return new Entry
{

public Guid annotation_definition;
public List<Entry> values;
public static PerceptionBoundingBoxAnnotationValue Convert(OldPerceptionConsumer consumer, Guid labelerId, Guid defId, BoundingBoxAnnotation annotation)
public static PerceptionBoundingBoxAnnotationValue Convert(OldPerceptionConsumer consumer, Guid labelerId, Guid defId, BoundingBox2DLabeler.BoundingBoxAnnotation annotation)
{
return new PerceptionBoundingBoxAnnotationValue
{

9
com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloConsumer.cs


SimulationMetadata m_CurrentMetadata;
void Start()
{
// Only here to get the check mark to show up in Unity Editor
}
public override void OnSimulationStarted(SimulationMetadata metadata)
{
Debug.Log("SC - On Simulation Started");

{
switch (annotation)
{
case BoundingBoxAnnotation bbox:
case BoundingBox2DLabeler.BoundingBoxAnnotation bbox:
annotations.Add(ConvertAnnotation(frame, bbox));
break;
case InstanceSegmentation seg:

return token;
}
static JToken ConvertAnnotation(Frame frame, BoundingBoxAnnotation bbox)
static JToken ConvertAnnotation(Frame frame, BoundingBox2DLabeler.BoundingBoxAnnotation bbox)
{
var outBox = ToAnnotationHeader(frame, bbox);
var values = new JArray();

5
com.unity.perception/Runtime/GroundTruth/SoloDesign/SoloMessageBuilder.cs


SimulationMetadata m_CurrentMetadata;
void Start()
{
// Only here to get the check mark to show up in Unity Editor
}
public override void OnSimulationStarted(SimulationMetadata metadata)
{
Debug.Log("SC - On Simulation Started");

12
com.unity.perception/Runtime/Randomization/Scenarios/PerceptionScenario.cs


/// <summary>
/// The metric definition used to report the current scenario iteration
/// </summary>
MetricDefinition m_IterationMetricDefinition;
// MetricDefinition m_IterationMetricDefinition;
/// <summary>
/// The scriptable render pipeline hook used to capture perception data skips the first frame of the simulation

/// <inheritdoc/>
protected override void OnStart()
{
var md = new GroundTruth.SoloDesign.MetricDefinition();
DatasetCapture.Instance.RegisterMetricDefinition(md);
#if false
m_IterationMetricDefinition = DatasetCapture.Instance.RegisterMetricDefinition(
"scenario_iteration", "Iteration information for dataset sequences",
Guid.Parse(k_ScenarioIterationMetricDefinitionId));

"The random seed used to initialize the random state of the simulation. Only triggered once per simulation.",
Guid.Parse("14adb394-46c0-47e8-a3f0-99e754483b76"));
DatasetCapture.Instance.ReportMetric(randomSeedMetricDefinition, new[] { genericConstants.randomSeed });
#endif
}
/// <inheritdoc/>

#if false
#endif
}
/// <inheritdoc/>

Manager.Instance.Shutdown();
Quit();
}
#if false
/// <summary>
/// Used to report a scenario iteration as a perception metric
/// </summary>

public int iteration;
}
#endif
}
}

4
com.unity.perception/Tests/Runtime/GroundTruthTests/RenderedObjectInfoTests.cs


boundingBoxes.Dispose();
cache.Dispose();
}
#if false
[UnityTest]
public IEnumerator LabelsCorrectWhenIdsReset()

Assert.AreEqual(3, timesInfoReceived);
}
#endif
private GameObject SetupCameraBoundingBox2D(Action<BoundingBox2DLabeler.BoundingBoxesCalculatedEventArgs> onBoundingBoxesCalculated, IdLabelConfig idLabelConfig)
{
var cameraObject = SetupCamera(camera =>

正在加载...
取消
保存