浏览代码

new branch

- branched out from initial branch to keep the branch given to Romain untouched
- changed things so that the only available mode is "render all frames", no more "renderOnlyCapturedFrames" flag.
- Added simulation timing ability for manual camera
- more work on multi cam coordination
- changed tests to reflect new changes
/manual_capture
Mohsen Kamalzadeh 4 年前
当前提交
1d0b35a2
共有 8 个文件被更改,包括 232 次插入192 次删除
  1. 54
      com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs
  2. 46
      com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs
  3. 24
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  4. 107
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  5. 2
      com.unity.perception/Tests/Editor/DatasetCaptureEditorTests.cs
  6. 149
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureSensorSchedulingTests.cs
  7. 40
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs
  8. 2
      com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs

54
com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs


using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using UnityEditorInternal;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;

serializedObject.ApplyModifiedProperties();
}
const string k_FrametimeTitle = "Simulation Delta Time";
string onlyRenderCaptTitle = "Only Render Captured Frames";
string periodTilte = "Capture and Render Delta Time";
string frametimeTitle = "Rendering Delta Time";
int startFrame;
public override void OnInspectorGUI()
{
using(new EditorGUI.DisabledScope(EditorApplication.isPlaying))

EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureRgbImages)),new GUIContent("Save Camera Output to Disk", "For each captured frame, save an RGB image of the perception camera's output to disk."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureTriggerMode)),new GUIContent("Capture Trigger Mode", $"The method of triggering captures for this camera. In {nameof(PerceptionCamera.CaptureTriggerMode.Scheduled)} mode, captures happen automatically based on a start time/frame and time/frame interval. In {nameof(PerceptionCamera.CaptureTriggerMode.Manual)} mode, captures should be triggered manually through calling the {nameof(perceptionCamera.CaptureOnNextUpdate)} method of {nameof(PerceptionCamera)}."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureRgbImages)),new GUIContent("Save Camera RGB Output to Disk", "For each captured frame, save an RGB image of the perception camera's output to disk."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureTriggerMode)),new GUIContent("Capture Trigger Mode", $"The method of triggering captures for this camera. In {nameof(PerceptionCamera.CaptureTriggerMode.Scheduled)} mode, captures happen automatically based on a start frame and frame delta time. In {nameof(PerceptionCamera.CaptureTriggerMode.Manual)} mode, captures should be triggered manually through calling the {nameof(perceptionCamera.CaptureOnNextUpdate)} method of {nameof(PerceptionCamera)}."));
GUILayout.Space(5);
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.onlyRenderCapturedFrames)),new GUIContent(onlyRenderCaptTitle, $"If this checkbox is enabled, the attached camera will only render those frames that it needs to capture. In addition, the global frame delta time will be altered to match this camera's capture period, thus, the scene will not be visually updated in-between captures (physics simulation is unaffected). Therefore, if you have more than one {nameof(PerceptionCamera)} active, this flag should be either disabled or enabled for all of them, otherwise the cameras will not capture and synchronize properly."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.simulationDeltaTime)),new GUIContent(k_FrametimeTitle, $"Sets Unity's Time.{nameof(Time.captureDeltaTime)} to the specified number, causing a fixed number of frames to be simulated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. Thus, simulation time and real time will not be synchronized."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.firstCaptureFrame)), new GUIContent("Start at Frame",$"Frame number at which this camera starts capturing."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.framesBetweenCaptures)),new GUIContent("Frames Between Captures", "The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame."));
if (perceptionCamera.onlyRenderCapturedFrames)
{
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.startTime)), new GUIContent("Start Time","Time at which this perception camera starts rendering and capturing (seconds)."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.period)), new GUIContent(periodTilte, "The interval at which the perception camera should render and capture (seconds)."));
var interval = (perceptionCamera.framesBetweenCaptures + 1) * perceptionCamera.simulationDeltaTime;
var startTime = perceptionCamera.simulationDeltaTime * perceptionCamera.firstCaptureFrame;
EditorGUILayout.HelpBox($"First capture at {startTime} seconds and consecutive captures every {interval} seconds of simulation time.", MessageType.None);
EditorGUILayout.HelpBox($"First capture at {perceptionCamera.startTime} seconds and consecutive captures every {perceptionCamera.period} seconds of simulation time.", MessageType.None);
}
else
{
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.renderingDeltaTime)),new GUIContent(frametimeTitle, "The rendering delta time (seconds of simulation time). E.g. 0.0166 translates to roughly 60 frames per second. Note that if the hardware is not capable of rendering, capturing, and saving the required number of frames per second, the simulation will slow down in real time in order to produce the exact number of required frames per each second of simulation time. Thus, the results will always be correct with regard to simulation time but may look slow in real time."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.startFrame)), new GUIContent("Start at Frame",$"Frame number at which this camera starts capturing."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.framesBetweenCaptures)),new GUIContent("Frames Between Captures", "The number of frames to render between the camera's scheduled captures. Setting this to 0 makes the camera capture every rendered frame."));
GUILayout.EndVertical();
}
else
{
GUILayout.BeginVertical("TextArea");
EditorGUILayout.LabelField("Manual Capture Properties", EditorStyles.boldLabel);
//Because start time only needs to be calculated once, we can do it here. But for scheduling consecutive captures,
//we calculate the time of the next capture every time based on the values given for captureEveryXFrames and renderingDeltaTime, in order to preserve accuracy.
perceptionCamera.startTime = perceptionCamera.startFrame * perceptionCamera.renderingDeltaTime;
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.manualSensorAffectSimulationTiming)),new GUIContent("Affect Simulation Timing", $"Have this camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time."));
var interval = (perceptionCamera.framesBetweenCaptures + 1) * perceptionCamera.renderingDeltaTime;
EditorGUILayout.HelpBox($"First capture at {perceptionCamera.startTime} seconds and consecutive captures every {interval} seconds of simulation time.", MessageType.None);
if (perceptionCamera.manualSensorAffectSimulationTiming)
{
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.simulationDeltaTime)),new GUIContent(k_FrametimeTitle, $"Sets Unity's Time.{nameof(Time.captureDeltaTime)} to the specified number, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. Thus, simulation time and real time will not be synchronized."));
EditorGUILayout.HelpBox($"Captures should be triggered manually through calling the {nameof(perceptionCamera.CaptureOnNextUpdate)} method of {nameof(PerceptionCamera)}.", MessageType.None);
else
{
perceptionCamera.onlyRenderCapturedFrames = false;
EditorGUILayout.HelpBox($"Captures should be triggered manually through calling the {nameof(perceptionCamera.CaptureOnNextUpdate)} method of {nameof(PerceptionCamera)}. Framerate or simulation timings will not be modified by this camera.", MessageType.None);
}
GUILayout.Space(15);
m_LabelersList.DoLayoutList();
}

46
com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs


/// <param name="egoHandle">The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.</param>
/// <param name="modality">The kind of the sensor (ex. "camera", "lidar")</param>
/// <param name="description">A human-readable description of the sensor (ex. "front-left rgb camera")</param>
/// <param name="period">The period, in seconds, on which the sensor should capture. Frames will be scheduled in the simulation such that each sensor is triggered every _period_ seconds.</param>
/// <param name="firstCaptureTime">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
/// <param name="firstCaptureFrame">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
/// <param name="onlyRenderCapturedFrames"></param>
/// <param name="renderingDeltaTime"></param>
/// <param name="simulationDeltaTime"></param>
/// <param name="framesBetweenCaptures"></param>
/// <param name="manualSensorAffectSimulationTiming"></param>
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime, PerceptionCamera.CaptureTriggerMode captureTriggerMode, bool onlyRenderCapturedFrames, float renderingDeltaTime = -1, int framesBetweenCaptures = 0)
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, PerceptionCamera.CaptureTriggerMode captureTriggerMode, float simulationDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming = false)
SimulationState.AddSensor(egoHandle, modality, description, period, firstCaptureTime, captureTriggerMode, onlyRenderCapturedFrames, renderingDeltaTime, framesBetweenCaptures, sensor);
SimulationState.AddSensor(egoHandle, modality, description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming, sensor);
return sensor;
}

/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public Annotation ReportAnnotationFile(AnnotationDefinition annotationDefinition, string filename)
{
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));

/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public Annotation ReportAnnotationValues<T>(AnnotationDefinition annotationDefinition, T[] values)
{
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));

/// <exception cref="ArgumentException">Thrown if the given AnnotationDefinition is invalid.</exception>
public AsyncAnnotation ReportAnnotationAsync(AnnotationDefinition annotationDefinition)
{
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException("Annotation reported on SensorHandle in frame when its ShouldCaptureThisFrame is false.");
if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));

/// <exception cref="InvalidOperationException">Thrown if ReportCapture is being called when ShouldCaptureThisFrame is false or it has already been called this frame.</exception>
public void ReportCapture(string filename, SensorSpatialData sensorSpatialData, params(string, object)[] additionalSensorValues)
{
// if (!ShouldCaptureThisFrame)
// {
// throw new InvalidOperationException("Capture reported in frame when ShouldCaptureThisFrame is false.");
// }
if (!ShouldCaptureThisFrame)
{
throw new InvalidOperationException("Capture reported in frame when ShouldCaptureThisFrame is false.");
}
DatasetCapture.SimulationState.ReportCapture(this, filename, sensorSpatialData, additionalSensorValues);
}

public void CaptureOnNextUpdate()
{
DatasetCapture.SimulationState.ExternalCaptureRequestForSensor(this);
DatasetCapture.SimulationState.SetNextCaptureTimeToNowForSensor(this);
}
/// <summary>

if (values == null)
throw new ArgumentNullException(nameof(values));
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, values, this, default);
}

/// <exception cref="InvalidOperationException">Thrown if <see cref="ShouldCaptureThisFrame"/> is false.</exception>
public void ReportMetric(MetricDefinition metricDefinition, [NotNull] string valuesJsonArray)
{
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), this, default);
}

/// <returns>An <see cref="AsyncMetric"/> which should be used to report the metric values, potentially in a later frame</returns>
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition)
{
// if (!ShouldCaptureThisFrame)
// throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
return DatasetCapture.SimulationState.CreateAsyncMetric(metricDefinition, this);
}

24
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


/// A human-readable description of the camera.
/// </summary>
public string description;
/// <summary>
/// The interval in seconds at which the camera should render and capture.
/// </summary>
public float period = .0166f;
/// <summary>
/// The start time in seconds of the first frame in the simulation.
/// </summary>
public float startTime;
/// <summary>
/// Whether camera output should be captured to disk
/// </summary>

/// <summary>
/// Frame number at which this camera starts capturing.
/// </summary>
public int startFrame;
public int firstCaptureFrame = 0;
/// <summary>
/// The method of triggering captures for this camera. In <see cref="PerceptionCamera.CaptureTriggerMode.Scheduled"/> mode, captures happen automatically based on a start time/frame and time/frame interval. In <see cref="PerceptionCamera.CaptureTriggerMode.Scheduled"/> mode, captures should be triggered manually through calling the <see cref="PerceptionCamera.CaptureOnNextUpdate"/> method of <see cref="PerceptionCamera"/>."

}
public CaptureTriggerMode captureTriggerMode = CaptureTriggerMode.Scheduled;
/// When enabled, the camera will only render those frames that it needs to capture. In addition, the global frame delta time (<see cref="Time.captureDeltaTime"/>) will be altered to match this camera's capture period, thus, the scene will not be visually updated in-between captures (physics simulation is unaffected). If there is more than one <see cref="PerceptionCamera"/> active, this flag should be either disabled or enabled for all of them, otherwise the cameras will not capture and synchronize properly.
/// Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time
public bool onlyRenderCapturedFrames = true;
public bool manualSensorAffectSimulationTiming = false;
/// The rendering frame time (seconds). E.g. 0.0166 translates to 60 frames per second.
/// The simulation frame time (seconds) for this camera. E.g. 0.0166 translates to 60 frames per second. This will be used as Unity's <see cref="Time.captureDeltaTime"/>, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware.
public float renderingDeltaTime = 0.0166f;
public float simulationDeltaTime = 0.0166f;
/// "The number of frames to render between the camera's scheduled captures when the rendering delta time is not controlled by this camera (i.e. <see cref="onlyRenderCapturedFrames"/> is false). Setting this to 0 makes the camera capture every rendered frame.
/// "The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture frame.
/// </summary>
public int framesBetweenCaptures = 0;

{
m_EgoMarker = GetComponentInParent<Ego>();
var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime, captureTriggerMode, onlyRenderCapturedFrames, renderingDeltaTime, framesBetweenCaptures);
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming);
}
}

107
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


}
//A sensor will be triggered if sequenceTime is within includeThreshold seconds of the next trigger
const float k_IncludeInFrameThreshold = .01f;
const float k_SimulationTimingAccuracy = 0.01f;
const int k_MaxDeltaTime = 10;
const float k_maxDeltaTime = 100f;
public SimulationState(string outputDirectory)
{

{
public string modality;
public string description;
public float period;
public bool onlyRenderCapturedFrames;
public int captureEveryXFrames;
public int framesBetweenCaptures;
public bool manualSensorAffectSimulationTiming;
public bool externalCaptureRequestPending;
public float sequenceTimeOfNextRender;
public int lastCaptureFrameCount;
public EgoHandle egoHandle;
}

{
var sensorData = kvp.Value;
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = 0;
m_Sensors[kvp.Key] = sensorData;
}

m_LastTimeScale = Time.timeScale;
}
public void AddSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime, PerceptionCamera.CaptureTriggerMode captureTriggerMode, bool controlSimulationTiming, float renderingDeltaTime, int captureEveryXFrames, SensorHandle sensor)
public void AddSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, PerceptionCamera.CaptureTriggerMode captureTriggerMode, float renderingDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming, SensorHandle sensor)
period = period,
firstCaptureTime = firstCaptureTime,
firstCaptureTime = firstCaptureFrame * renderingDeltaTime,
onlyRenderCapturedFrames = controlSimulationTiming,
captureEveryXFrames = captureEveryXFrames,
framesBetweenCaptures = framesBetweenCaptures,
manualSensorAffectSimulationTiming = manualSensorAffectSimulationTiming,
sensorData.externalCaptureRequestPending = false;
sensorData.sequenceTimeOfNextRender = 0;
m_ActiveSensors.Add(sensor);
m_Sensors.Add(sensor, sensorData);
m_Ids.Add(sensor.Id);

{
// If the first capture hasn't happened yet, sequenceTimeNextCapture field won't be valid
if (sensorData.firstCaptureTime >= UnscaledSequenceTime)
return sensorData.firstCaptureTime;
{
return sensorData.captureTriggerMode == PerceptionCamera.CaptureTriggerMode.Scheduled? sensorData.firstCaptureTime : float.MaxValue;
}
return sensorData.sequenceTimeOfNextCapture;
}

foreach (var activeSensor in m_ActiveSensors)
{
var sensorData = m_Sensors[activeSensor];
if (sensorData.captureTriggerMode.Equals(PerceptionCamera.CaptureTriggerMode.Scheduled))
if (Mathf.Abs(sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime) < k_SimulationTimingAccuracy)
if (!activeSensor.ShouldCaptureThisFrame)
continue;
//means this frame fulfills this sensor's simulation time requirements, we can move target to next frame.
sensorData.sequenceTimeOfNextRender += sensorData.renderingDeltaTime;
}
if (sensorData.onlyRenderCapturedFrames)
if (activeSensor.ShouldCaptureThisFrame)
{
if (sensorData.captureTriggerMode.Equals(PerceptionCamera.CaptureTriggerMode.Scheduled))
//In this case, the simulation timing is controlled by this and other sensors that have this flag enabled, so we can precisely jump to the next frame using a specific delta (capture period) provided by the user
sensorData.sequenceTimeOfNextCapture += sensorData.period;
//In this case, the sensor is scheduled but does not affect simulation timings, so it has to capture on specific rendered frames.
sensorData.sequenceTimeOfNextCapture += sensorData.renderingDeltaTime * (sensorData.framesBetweenCaptures + 1);
Debug.Assert(sensorData.sequenceTimeOfNextCapture > UnscaledSequenceTime,
$"Next scheduled capture should be after {UnscaledSequenceTime} but is {sensorData.sequenceTimeOfNextCapture}");
else
else if (sensorData.captureTriggerMode.Equals(PerceptionCamera.CaptureTriggerMode.Manual))
//In this case, the sensor is scheduled but does not affect simulation timings, so it has to capture on specific rendered frames.
sensorData.sequenceTimeOfNextCapture += sensorData.renderingDeltaTime * sensorData.captureEveryXFrames;
sensorData.sequenceTimeOfNextCapture = float.MaxValue;
//Debug.Log($"unscaled : {UnscaledSequenceTime}, seqNext {sensorData.sequenceTimeOfNextCapture}");
Debug.Assert(!sensorData.onlyRenderCapturedFrames || sensorData.sequenceTimeOfNextCapture > UnscaledSequenceTime,
$"Next scheduled capture should be after {UnscaledSequenceTime} but is {sensorData.sequenceTimeOfNextCapture}");
// sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
m_Sensors[activeSensor] = sensorData;
m_Sensors[activeSensor] = sensorData;
float nextFrameDt = k_MaxDeltaTime;
float nextFrameDt = k_maxDeltaTime;
float thisSensorNextFrameDt = -1;
//if the sensor is not scheduled, it does not need to force the simulation to produce frames at a specific rate, so we will not need to touch Time.captureDeltaTime.
if (sensorData.onlyRenderCapturedFrames)
{
var thisSensorNextFrameDt = sensorData.sequenceTimeOfNextCapture - UnscaledSequenceTime;
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to run in the past but got skipped over.");
if (thisSensorNextFrameDt > 0f && thisSensorNextFrameDt < nextFrameDt)
nextFrameDt = thisSensorNextFrameDt;
}
else
{
var thisSensorNextFrameDt = sensorData.renderingDeltaTime;
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to capture in the past but got skipped over.");
}
else if (sensorData.captureTriggerMode.Equals(PerceptionCamera.CaptureTriggerMode.Manual) && sensorData.manualSensorAffectSimulationTiming)
{
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
}
if (thisSensorNextFrameDt > 0f && thisSensorNextFrameDt < nextFrameDt)
nextFrameDt = thisSensorNextFrameDt;
}
}
if (thisSensorNextFrameDt > 0f && thisSensorNextFrameDt < nextFrameDt)
nextFrameDt = thisSensorNextFrameDt;
if (Math.Abs(nextFrameDt - k_MaxDeltaTime) < 0.00001)
if (Math.Abs(nextFrameDt - k_maxDeltaTime) < 0.0001)
//means no sensor is controlling simulation timing, so we set Time.captureDeltaTime to 0 (default) to render normally
//means no sensor is controlling simulation timing, so we set Time.captureDeltaTime to 0 (default) which means the setting does not do anything
nextFrameDt = 0;
}

Debug.Log("next fdt: " + nextFrameDt);
public void ExternalCaptureRequestForSensor(SensorHandle sensorHandle)
public void SetNextCaptureTimeToNowForSensor(SensorHandle sensorHandle)
data.externalCaptureRequestPending = true;
data.sequenceTimeOfNextCapture = UnscaledSequenceTime;
m_Sensors[sensorHandle] = data;
}

if (data.lastCaptureFrameCount == Time.frameCount)
return true;
bool captureRequested = data.externalCaptureRequestPending;
if (captureRequested)
{
data.externalCaptureRequestPending = false;
m_Sensors[sensorHandle] = data;
}
return (data.captureTriggerMode.Equals(PerceptionCamera.CaptureTriggerMode.Scheduled) && (data.sequenceTimeOfNextCapture - UnscaledSequenceTime) < k_IncludeInFrameThreshold)
|| captureRequested;
return data.sequenceTimeOfNextCapture - UnscaledSequenceTime < k_SimulationTimingAccuracy;
}
public void End()

2
com.unity.perception/Tests/Editor/DatasetCaptureEditorTests.cs


yield return new EnterPlayMode();
DatasetCapture.ResetSimulation();
var ego = DatasetCapture.RegisterEgo("ego");
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0.1f, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 0.1f, 0);
sensor.ReportCapture("file.txt", new SensorSpatialData());
expectedDatasetPath = DatasetCapture.OutputDirectory;
yield return new ExitPlayMode();

149
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureSensorSchedulingTests.cs


internal SimulationStateTestHelper()
{
var bindingFlags = BindingFlags.NonPublic | BindingFlags.Instance;
m_SequenceTimeOfNextCaptureMethod = m_State.GetType().GetMethod("SequenceTimeOfNextCapture", bindingFlags);
m_SequenceTimeOfNextCaptureMethod = m_State.GetType().GetMethod("GetSequenceTimeOfNextCapture", bindingFlags);
Debug.Assert(m_SequenceTimeOfNextCaptureMethod != null, "Couldn't find sequence time method.");
var sensorsField = m_State.GetType().GetField("m_Sensors", bindingFlags);
Debug.Assert(sensorsField != null, "Couldn't find internal sensors field");

public IEnumerator SequenceTimeOfNextCapture_ReportsCorrectTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 1.5f;
var period = .4f;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var firstCaptureFrame = 2f;
var simulationDeltaTime = .4f;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, PerceptionCamera.CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
var startTime = firstCaptureFrame * simulationDeltaTime;
firstCaptureTime,
period + firstCaptureTime,
period * 2 + firstCaptureTime,
period * 3 + firstCaptureTime
startTime,
simulationDeltaTime + startTime,
simulationDeltaTime * 2 + startTime,
simulationDeltaTime * 3 + startTime
for (var i = 0; i < sequenceTimesExpected.Length; i++)
for (var i = 0; i < firstCaptureFrame; i++)
//render the non-captured frames before firstCaptureFrame
}
for (var i = 0; i < sequenceTimesExpected.Length; i++)
{
yield return null;
public IEnumerator SequenceTimeOfNextCapture_WithInBetweenFrames_ReportsCorrectTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureFrame = 2;
var simulationDeltaTime = .4f;
var framesBetweenCaptures = 2;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, PerceptionCamera.CaptureTriggerMode.Scheduled, simulationDeltaTime, framesBetweenCaptures);
var startingFrame = Time.frameCount;
var startTime = firstCaptureFrame * simulationDeltaTime;
var interval = (framesBetweenCaptures + 1) * simulationDeltaTime;
float[] sequenceTimesExpected =
{
startTime,
interval + startTime,
interval * 2 + startTime,
interval * 3 + startTime
};
int[] simulationFramesToCheck =
{
firstCaptureFrame,
firstCaptureFrame + (framesBetweenCaptures + 1),
firstCaptureFrame + (framesBetweenCaptures + 1) * 2,
firstCaptureFrame + (framesBetweenCaptures + 1) * 3,
};
int checkedFrame = 0;
var currentSimFrame = Time.frameCount - startingFrame;
while (currentSimFrame <= simulationFramesToCheck[simulationFramesToCheck.Length-1] && checkedFrame < simulationFramesToCheck.Length)
{
currentSimFrame = Time.frameCount - startingFrame;
if (currentSimFrame == simulationFramesToCheck[checkedFrame])
{
var sensorData = m_TestHelper.GetSensorData(sensorHandle);
var sequenceTimeActual = m_TestHelper.CallSequenceTimeOfNextCapture(sensorData);
Assert.AreEqual(sequenceTimesExpected[checkedFrame], sequenceTimeActual, 0.0001f);
checkedFrame++;
}
else
{
yield return null;
}
}
}
[UnityTest]
var firstCaptureTime = 1.5f;
var period = .4f;
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var firstCaptureFrame = 2f;
var simulationDeltaTime = .4f;
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, PerceptionCamera.CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
firstCaptureTime,
period,
period,
period
simulationDeltaTime,
simulationDeltaTime,
simulationDeltaTime,
simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator FramesScheduled_WithTimeScale_ResultsInProperDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 2f;
var period = 1f;
var firstCaptureFrame = 2f;
var simulationDeltaTime = 1f;
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, PerceptionCamera.CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
timeScale * firstCaptureTime,
timeScale * period,
timeScale * period,
timeScale * period
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator ChangingTimeScale_CausesDebugError()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 1f, 2f, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(ego, "cam", "", 2f, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
Time.timeScale = 5;

public IEnumerator ChangingTimeScale_DuringStartNewSequence_Succeeds()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 1f, 2f, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(ego, "cam", "", 2f, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
Time.timeScale = 1;

public IEnumerator FramesScheduled_WithChangingTimeScale_ResultsInProperDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 2f;
var period = 1f;
var firstCaptureFrame = 2f;
var simulationDeltaTime = 1f;
float[] newTimeScalesPerFrame =
{
2f,

};
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
newTimeScalesPerFrame[0] * firstCaptureTime,
newTimeScalesPerFrame[1] * period,
newTimeScalesPerFrame[2] * period,
newTimeScalesPerFrame[3] * period
newTimeScalesPerFrame[0] * simulationDeltaTime,
newTimeScalesPerFrame[1] * simulationDeltaTime,
newTimeScalesPerFrame[2] * simulationDeltaTime,
newTimeScalesPerFrame[3] * simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator ResetSimulation_ResetsCaptureDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 4, 10, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(ego, "cam", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 5, 0);
Assert.AreEqual(10, Time.captureDeltaTime);
Assert.AreEqual(5, Time.captureDeltaTime);
DatasetCapture.ResetSimulation();
Assert.AreEqual(0, Time.captureDeltaTime);
}

{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime1 = 10;
var frequencyInMs1 = 4;
var sensor1 = DatasetCapture.RegisterSensor(ego, "cam", "1", frequencyInMs1, firstCaptureTime1, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var firstCaptureFrame1 = 2;
var simDeltaTime1 = 4;
var framesBetweenCaptures1 = 2;
var sensor1 = DatasetCapture.RegisterSensor(ego, "cam", "1", firstCaptureFrame1, PerceptionCamera.CaptureTriggerMode.Scheduled, simDeltaTime1, framesBetweenCaptures1);
var firstCaptureTime2 = 10;
var frequencyInMs2 = 6;
var sensor2 = DatasetCapture.RegisterSensor(ego, "cam", "2", frequencyInMs2, firstCaptureTime2, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var firstCaptureFrame2 = 1;
var simDeltaTime2 = 6;
var framesBetweenCaptures2 = 1;
var sensor2 = DatasetCapture.RegisterSensor(ego, "cam", "2", firstCaptureFrame2, PerceptionCamera.CaptureTriggerMode.Scheduled, simDeltaTime2, framesBetweenCaptures2);
var sensor3 = DatasetCapture.RegisterSensor(ego, "cam", "3", 1, 1, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
sensor3.Enabled = false;
((float)firstCaptureTime1, true, true),
(4, true, false),
(4, false, false),
(4, true, true)
(4, false, false),
(4, false, false),
(2, false, true),
(2, true, false)
};
var samplesActual = new(float deltaTime, bool sensor1ShouldCapture, bool sensor2ShouldCapture)[samplesExpected.Length];
for (int i = 0; i < samplesActual.Length; i++)

[Test]
public void Enabled_StartsTrue()
{
var sensor1 = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "cam", "1", 1, 1, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensor1 = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "cam", "1", 1, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.IsTrue(sensor1.Enabled);
}
}

40
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs


}}";
var ego = DatasetCapture.RegisterEgo(egoDescription);
var sensorHandle = DatasetCapture.RegisterSensor(ego, modality, sensorDescription, 1, 1, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, modality, sensorDescription, 1, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.IsTrue(sensorHandle.IsValid);
DatasetCapture.ResetSimulation();
Assert.IsFalse(sensorHandle.IsValid);

}}";
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "camera", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "camera", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var sensorSpatialData = new SensorSpatialData(new Pose(egoPosition, egoRotation), new Pose(position, rotation), egoVelocity, null);
sensorHandle.ReportCapture(filename, sensorSpatialData, ("camera_intrinsic", intrinsics));

};
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 2, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 2, 0);
var sensorSpatialData = new SensorSpatialData(default, default, null, null);
Assert.IsTrue(sensorHandle.ShouldCaptureThisFrame);
sensorHandle.ReportCapture("f", sensorSpatialData);

]";
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportCapture(filename, default);
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("semantic segmentation", "pixel-wise semantic segmentation label", "PNG", annotationDefinitionGuid);
sensorHandle.ReportAnnotationFile(annotationDefinition, "annotations/semantic_segmentation_000.png");

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportAnnotationValues(annotationDefinition, values);
DatasetCapture.ResetSimulation();

{
var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationFile(annotationDefinition, ""));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationValues(annotationDefinition, new int[0]));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationAsync(annotationDefinition));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportAnnotationAsync(annotationDefinition);
DatasetCapture.ResetSimulation();
LogAssert.Expect(LogType.Error, new Regex("Simulation ended with pending .*"));

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsValid);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsPending);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsPending);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
// Record one capture for this frame
sensorHandle.ReportCapture(fileName, default);

{
var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportMetric(metricDefinition, new int[0]));
}

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportMetricAsync(metricDefinition));
}

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportMetricAsync(metricDefinition);
DatasetCapture.ResetSimulation();
LogAssert.Expect(LogType.Error, new Regex("Simulation ended with pending .*"));

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var asyncMetric = sensorHandle.ReportMetricAsync(metricDefinition);
Assert.IsTrue(asyncMetric.IsValid);

var expectedLine = @"""step"": 0";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
yield return null;

var expectedLine = @"""step"": 0";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
sensor.ReportMetric(metricDefinition, values);

}}";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0, PerceptionCamera.CaptureTriggerMode.Scheduled, true);
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, PerceptionCamera.CaptureTriggerMode.Scheduled, 1, 0);
var annotation = sensor.ReportAnnotationFile(DatasetCapture.RegisterAnnotationDefinition(""), "");
var valuesJsonArray = JArray.FromObject(values).ToString(Formatting.Indented);
if (async)

2
com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs


// Second frame, first iteration
yield return null;
Assert.AreEqual(DatasetCapture.SimulationState.SequenceTime, perceptionCamera.period);
Assert.AreEqual(DatasetCapture.SimulationState.SequenceTime, perceptionCamera.simulationDeltaTime);
// Third frame, second iteration, SequenceTime has been reset
yield return null;

正在加载...
取消
保存