浏览代码

Updating example

/update-setup-steps
Jon Hogins 5 年前
当前提交
104410b4
共有 4 个文件被更改,包括 147 次插入64 次删除
  1. 82
      com.unity.perception/Documentation~/SimulationManager.md
  2. 68
      TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs
  3. 61
      TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricExample.cs
  4. 0
      /TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs.meta

82
com.unity.perception/Documentation~/SimulationManager.md


`SimulationManager` tracks egos, sensors, annotations, and metrics, combining them into a unified [JSON-based dataset](Schema/Synthetic_Dataset_Schema.md) on disk. It also controls the simulation time elapsed per frame to accommodate the active sensors.
## Custom sensors
Custom sensors can be registered using `SimulationManager.RegisterSensor()`. The `period` passed in at registration time determines how often in simulation time frames should be scheduled for the sensor to run. The sensor implementation would then check `ShouldCaptureThisFrame` on the returned `SensorHandle` each frame to determine whether it is time for the sensor to perform a capture. `SensorHandle.ReportCapture` should then be called in each of these frames to report the state of the sensor to populate the dataset.
In addition to the common annotations and metrics produced by [PerceptionCamera](PerceptionCamera.md), scripts can produce their own via `SimulationManager`. Annotation and metric definitions must first be registered using `SimulationManager.RegisterAnnotationDefinition()` or `SimulationManager.RegisterMetricDefinition()`. These return `AnnotationDefinition` and `MetricDefinition` instances which can then be used to report values during runtime.
Annotations and metrics are always associated with the frame they are reported in. They may also be associated with a specific sensor by using the `Report*` methods on `SensorHandle`.
### Example
<!-- If you change this, change it in PerceptionURP/Assets/Examples/CustomAnnotationAndMetricReporter.cs as well -->
//example MonoBehaviour
```
## Custom sensors
using System;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
[RequireComponent(typeof(PerceptionCamera))]
public class CustomAnnotationAndMetricReporter : MonoBehaviour
{
public GameObject light;
public GameObject target;
MetricDefinition lightMetricDefinition;
AnnotationDefinition boundingBoxAnnotationDefinition;
SensorHandle cameraSensorHandle;
public void Start()
{
//Metrics and annotations are registered up-front
lightMetricDefinition = SimulationManager.RegisterMetricDefinition(
"Light position",
"The world-space position of the light",
Guid.Parse("1F6BFF46-F884-4CC5-A878-DB987278FE35"));
boundingBoxAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition(
"Target bounding box",
"The position of the target in the camera's local space",
id: Guid.Parse("C0B4A22C-0420-4D9F-BAFC-954B8F7B35A7"));
}
public void Update()
{
//Report the light's position by manually creating the json array string.
var lightPos = light.transform.position;
SimulationManager.ReportMetric(lightMetricDefinition,
$@"[{{ ""x"": {lightPos.x}, ""y"": {lightPos.y}, ""z"": {lightPos.z} }}]");
//compute the location of the object in the camera's local space
Vector3 targetPos = transform.worldToLocalMatrix * target.transform.position;
//Report using the PerceptionCamera's SensorHandle if scheduled this frame
var sensorHandle = GetComponent<PerceptionCamera>().SensorHandle;
if (sensorHandle.ShouldCaptureThisFrame)
{
sensorHandle.ReportAnnotationValues(
boundingBoxAnnotationDefinition,
new[] { targetPos });
}
}
}
// Example metric that is added each frame in the dataset:
// {
// "capture_id": null,
// "annotation_id": null,
// "sequence_id": "9768671e-acea-4c9e-a670-0f2dba5afe12",
// "step": 1,
// "metric_definition": "1f6bff46-f884-4cc5-a878-db987278fe35",
// "values": [{ "x": 96.1856, "y": 192.676, "z": -193.8386 }]
// },
// Example annotation that is added to each capture in the dataset:
// {
// "id": "33f5a3aa-3e5e-48f1-8339-6cbd64ed4562",
// "annotation_definition": "c0b4a22c-0420-4d9f-bafc-954b8f7b35a7",
// "values": [
// [
// -1.03097284,
// 0.07265166,
// -6.318692
// ]
// ]
// }
```

68
TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs


using System;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
[RequireComponent(typeof(PerceptionCamera))]
public class CustomAnnotationAndMetricReporter : MonoBehaviour
{
public GameObject light;
public GameObject target;
MetricDefinition lightMetricDefinition;
AnnotationDefinition boundingBoxAnnotationDefinition;
SensorHandle cameraSensorHandle;
public void Start()
{
//Metrics and annotations are registered up-front
lightMetricDefinition = SimulationManager.RegisterMetricDefinition(
"Light position",
"The world-space position of the light",
Guid.Parse("1F6BFF46-F884-4CC5-A878-DB987278FE35"));
boundingBoxAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition(
"Target bounding box",
"The position of the target in the camera's local space",
id: Guid.Parse("C0B4A22C-0420-4D9F-BAFC-954B8F7B35A7"));
}
public void Update()
{
//Report the light's position by manually creating the json array string.
var lightPos = light.transform.position;
SimulationManager.ReportMetric(lightMetricDefinition,
$@"[{{ ""x"": {lightPos.x}, ""y"": {lightPos.y}, ""z"": {lightPos.z} }}]");
//compute the location of the object in the camera's local space
Vector3 targetPos = transform.worldToLocalMatrix * target.transform.position;
//Report using the PerceptionCamera's SensorHandle if scheduled this frame
var sensorHandle = GetComponent<PerceptionCamera>().SensorHandle;
if (sensorHandle.ShouldCaptureThisFrame)
{
sensorHandle.ReportAnnotationValues(
boundingBoxAnnotationDefinition,
new[] { targetPos });
}
}
}
// Example metric that is added each frame in the dataset:
// {
// "capture_id": null,
// "annotation_id": null,
// "sequence_id": "9768671e-acea-4c9e-a670-0f2dba5afe12",
// "step": 1,
// "metric_definition": "1f6bff46-f884-4cc5-a878-db987278fe35",
// "values": [{ "x": 96.1856, "y": 192.676, "z": -193.8386 }]
// },
// Example annotation that is added to each capture in the dataset:
// {
// "id": "33f5a3aa-3e5e-48f1-8339-6cbd64ed4562",
// "annotation_definition": "c0b4a22c-0420-4d9f-bafc-954b8f7b35a7",
// "values": [
// [
// -1.03097284,
// 0.07265166,
// -6.318692
// ]
// ]
// }

61
TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricExample.cs


using System;
using Unity.Mathematics;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
[RequireComponent(typeof(PerceptionCamera))]
public class CustomAnnotationAndMetricExample : MonoBehaviour
{
public GameObject light;
public GameObject target;
MetricDefinition lightPositionMetricDefinition;
AnnotationDefinition targetBoundingBoxAnnotationDefinition;
SensorHandle cameraSensorHandle;
public void Start()
{
//Metrics and annotations are registered up-front and are referenced later when values are reported
lightPositionMetricDefinition = SimulationManager.RegisterMetricDefinition(
"Light position",
"The world-space position of the light",
Guid.Parse("1F6BFF46-F884-4CC5-A878-DB987278FE35"));
targetBoundingBoxAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition(
"Target bounding box",
"The axis-aligned bounding box of the target in the camera's local space",
id: Guid.Parse("C0B4A22C-0420-4D9F-BAFC-954B8F7B35A7"));
}
public void Update()
{
//Report the light's position by manually creating the json array string.
var lightPosition = light.transform.position;
SimulationManager.ReportMetric(lightPositionMetricDefinition,
$@"[{{ ""x"": {lightPosition.x}, ""y"": {lightPosition.y}, ""z"": {lightPosition.z} }}]");
//compute the location of the object in the camera's local space
var targetCameraLocalPosition = transform.worldToLocalMatrix * target.transform.position;
//Report the annotation on the camera SensorHandle exposed by the PerceptionCamera
GetComponent<PerceptionCamera>().SensorHandle.ReportAnnotationValues(targetBoundingBoxAnnotationDefinition,new[] { targetCameraLocalPosition });
}
}
//
// {
// "id": "71265896-2a46-405a-a3d9-e587cdfac631",
// "annotation_definition": "c0b4a22c-0420-4d9f-bafc-954b8f7b35a7",
// "values": [
// {
// "Center": {
// "x": -85.386672973632813,
// "y": 84.000732421875,
// "z": 112.38008880615234
// },
// "Extents": {
// "x": 0.64206844568252563,
// "y": 0.71592754125595093,
// "z": 0.66213905811309814
// }
// }
// ]
// },

/TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricExample.cs.meta → /TestProjects/PerceptionURP/Assets/ExampleScripts/CustomAnnotationAndMetricReporter.cs.meta

正在加载...
取消
保存