浏览代码

Merge branch 'master' into keypoints

/main
Steven Borkman 3 年前
当前提交
dd6870b8
共有 105 个文件被更改,包括 2381 次插入1301 次删除
  1. 1
      .gitignore
  2. 10
      .yamato/environments.yml
  3. 6
      .yamato/upm-ci-performance.yml
  4. 2
      LICENSE.md
  5. 52
      README.md
  6. 4
      TestProjects/PerceptionHDRP/Assets/Terrain Data.asset
  7. 10
      TestProjects/PerceptionHDRP/Packages/manifest.json
  8. 31
      TestProjects/PerceptionHDRP/Packages/packages-lock.json
  9. 4
      TestProjects/PerceptionHDRP/ProjectSettings/PackageManagerSettings.asset
  10. 4
      TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt
  11. 63
      TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity
  12. 8
      TestProjects/PerceptionURP/Packages/manifest.json
  13. 31
      TestProjects/PerceptionURP/Packages/packages-lock.json
  14. 4
      TestProjects/PerceptionURP/ProjectSettings/PackageManagerSettings.asset
  15. 11
      TestProjects/PerceptionURP/ProjectSettings/ProjectSettings.asset
  16. 4
      TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt
  17. 2
      TestProjects/PerceptionURP/ProjectSettings/QualitySettings.asset
  18. 4
      TestProjects/PerceptionURP/ProjectSettings/UnityConnectSettings.asset
  19. 33
      com.unity.perception/CHANGELOG.md
  20. 8
      com.unity.perception/Documentation~/DatasetCapture.md
  21. 39
      com.unity.perception/Documentation~/PerceptionCamera.md
  22. 50
      com.unity.perception/Documentation~/Randomization/Index.md
  23. 71
      com.unity.perception/Documentation~/Randomization/Parameters.md
  24. 6
      com.unity.perception/Documentation~/Randomization/RandomizerTags.md
  25. 20
      com.unity.perception/Documentation~/Randomization/Randomizers.md
  26. 8
      com.unity.perception/Documentation~/Randomization/Samplers.md
  27. 34
      com.unity.perception/Documentation~/Randomization/Scenarios.md
  28. 21
      com.unity.perception/Documentation~/TableOfContents.md
  29. 146
      com.unity.perception/Documentation~/Tutorial/Phase1.md
  30. 82
      com.unity.perception/Documentation~/Tutorial/Phase2.md
  31. 93
      com.unity.perception/Documentation~/Tutorial/Phase3.md
  32. 6
      com.unity.perception/Documentation~/Tutorial/TUTORIAL.md
  33. 882
      com.unity.perception/Documentation~/images/PerceptionCameraFinished.png
  34. 68
      com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs
  35. 24
      com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs
  36. 43
      com.unity.perception/Editor/Randomization/Editors/ScenarioBaseEditor.cs
  37. 12
      com.unity.perception/Editor/Randomization/Uss/Styles.uss
  38. 18
      com.unity.perception/Editor/Randomization/Uxml/Randomizer/RandomizerList.uxml
  39. 17
      com.unity.perception/Editor/Randomization/Uxml/ScenarioBaseElement.uxml
  40. 7
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/AddRandomizerMenu.cs
  41. 7
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs
  42. 30
      com.unity.perception/Editor/Randomization/VisualElements/Sampler/SamplerElement.cs
  43. 35
      com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs
  44. 6
      com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs
  45. 2
      com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/Materials/OutlineMaterial.mat
  46. 53
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  47. 4
      com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs
  48. 105
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  49. 11
      com.unity.perception/Runtime/Randomization/Parameters/NumericParameter.cs
  50. 33
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/BooleanParameter.cs
  51. 111
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters/ColorHsvaParameter.cs
  52. 52
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters/ColorRgbParameter.cs
  53. 11
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/FloatParameter.cs
  54. 31
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/IntegerParameter.cs
  55. 35
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector2Parameter.cs
  56. 38
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector3Parameter.cs
  57. 51
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector4Parameter.cs
  58. 2
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/BackgroundObjectPlacementRandomizer.cs
  59. 6
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/ColorRandomizer.cs
  60. 4
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/ForegroundObjectPlacementRandomizer.cs
  61. 6
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/HueOffsetRandomizer.cs
  62. 6
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/RotationRandomizer.cs
  63. 6
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/SunAngleRandomizer.cs
  64. 10
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/TextureRandomizer.cs
  65. 4
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerTag.cs
  66. 39
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerTagManager.cs
  67. 12
      com.unity.perception/Runtime/Randomization/Samplers/ISampler.cs
  68. 39
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/ConstantSampler.cs
  69. 48
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/NormalSampler.cs
  70. 41
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/UniformSampler.cs
  71. 66
      com.unity.perception/Runtime/Randomization/Samplers/SamplerUtility.cs
  72. 3
      com.unity.perception/Runtime/Randomization/Scenarios/FixedLengthScenario.cs
  73. 14
      com.unity.perception/Runtime/Randomization/Scenarios/Scenario.cs
  74. 38
      com.unity.perception/Runtime/Randomization/Scenarios/ScenarioBase.cs
  75. 3
      com.unity.perception/Runtime/Randomization/Scenarios/ScenarioConstants.cs
  76. 9
      com.unity.perception/Runtime/Randomization/Scenarios/UnitySimulationScenarioConstants.cs
  77. 2
      com.unity.perception/Tests/Editor/DatasetCaptureEditorTests.cs
  78. 259
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureSensorSchedulingTests.cs
  79. 40
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs
  80. 12
      com.unity.perception/Tests/Runtime/GroundTruthTests/PerceptionCameraIntegrationTests.cs
  81. 19
      com.unity.perception/Tests/Runtime/Randomization/ParameterTests/StructParameterTests.cs
  82. 14
      com.unity.perception/Tests/Runtime/Randomization/RandomizerTests/RandomizerTagTests.cs
  83. 21
      com.unity.perception/Tests/Runtime/Randomization/SamplerTests/NormalSamplerTests.cs
  84. 60
      com.unity.perception/Tests/Runtime/Randomization/SamplerTests/SamplerTestsBase.cs
  85. 21
      com.unity.perception/Tests/Runtime/Randomization/SamplerTests/UniformSamplerTests.cs
  86. 7
      com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs
  87. 6
      com.unity.perception/package.json
  88. 99
      com.unity.perception/Editor/Randomization/Editors/PerceptionEditorAnalytics.cs
  89. 3
      com.unity.perception/Editor/Randomization/Editors/PerceptionEditorAnalytics.cs.meta
  90. 3
      com.unity.perception/Editor/Utilities.meta
  91. 32
      com.unity.perception/Runtime/Randomization/Samplers/SamplerState.cs
  92. 3
      com.unity.perception/Runtime/Randomization/Samplers/SamplerState.cs.meta
  93. 99
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/AnimationCurveSampler.cs
  94. 11
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/AnimationCurveSampler.cs.meta
  95. 10
      com.unity.perception/Runtime/Randomization/Samplers/SamplerValidationException.cs
  96. 3
      com.unity.perception/Runtime/Randomization/Samplers/SamplerValidationException.cs.meta
  97. 34
      com.unity.perception/Tests/Runtime/Randomization/SamplerTests/AnimationCurveSamplerTests.cs
  98. 11
      com.unity.perception/Tests/Runtime/Randomization/SamplerTests/AnimationCurveSamplerTests.cs.meta
  99. 3
      com.unity.perception/Editor/Utilities/ParameterUIElementsEditor.cs.meta

1
.gitignore


/.download
**/Build/**
**/Builds/**
/utr

10
.yamato/environments.yml


# sticking to 2019.4.6f1 for testing for now because Linux Editor fails to open PerceptionHDRP on 2019.4.8f1
# see https://fogbugz.unity3d.com/default.asp?1273518_d68j5lb6eucglb84
coverage_editors:
- version: 2019.4.6f1
- version: 2019.4.18f1
- version: 2019.4.6f1
- version: 2019.4.18f1
- version: 2019.4.6f1
- version: 2020.1.3f1
- version: 2019.4.18f1
# - version: 2020.1.3f1
- version: 2019.4.6f1
- version: 2019.4.18f1
# - version: 2020.1.15f1
# - version: 2020.2.0a21

6
.yamato/upm-ci-performance.yml


---
{% for editor in complete_editors %}
{% for editor in performance_editors %}
{% for suite in performance_suites %}
{% for project in projects %}
{{project.name}}_linux_{{suite.name}}_{{editor.version}}:

{% endfor %}
{% endfor %}
{% for editor in complete_editors %}
{% for editor in performance_editors %}
{% for suite in performance_suites %}
{% for project in projects %}
{{project.name}}_windows_{{suite.name}}_{{editor.version}}:

model: rtx2080
image: package-ci/win10:stable
image: graphics-foundation/win10-dxr:stable
flavor: b1.large
commands:
- git submodule update --init --recursive

2
LICENSE.md


com.unity.perception copyright © 2020 Unity Technologies ApS
com.unity.perception copyright © 2021 Unity Technologies ApS
Apache License
Version 2.0, January 2004

52
README.md


<img src="com.unity.perception/Documentation~/images/banner2.PNG" align="middle"/>
# Perception
The Perception package provides a toolkit for generating large-scale datasets for perception-based machine learning training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
![ReleaseBadge](https://badge-proxy.cds.internal.unity3d.com/5ab9a162-9dd0-4ba1-ba41-cf25378a927a)
[![license badge](https://img.shields.io/badge/license-Apache--2.0-green.svg)](LICENSE.md)

## Documentation
# Perception Package (Unity Computer Vision)
[Quick Installation Instructions](com.unity.perception/Documentation~/SetupSteps.md)
The Perception package provides a toolkit for generating large-scale datasets for perception-based machine learning training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
[Perception Tutorial](com.unity.perception/Documentation~/Tutorial/TUTORIAL.md): Detailed instructions covering all the important steps from installing Unity Editor, to creating your first Perception project, building a randomized Scene, and generating large-scale synthetic datasets by leveraging the power of Unity Simulation.
## Getting Started
[Perception Manual](com.unity.perception/Documentation~/index.md): Sample projects and documentation of the SDK.
**[Quick Installation Instructions](com.unity.perception/Documentation~/SetupSteps.md)**
Get your local Perception workspace up and running quickly. Recommended for users with prior Unity experience.
**[Perception Tutorial](com.unity.perception/Documentation~/Tutorial/TUTORIAL.md)**
Detailed instructions covering all the important steps from installing Unity Editor, to creating your first Perception project, building a randomized Scene, and generating large-scale synthetic datasets by leveraging the power of Unity Simulation. No prior Unity experience required.
## Documentation
In-depth documentation on inidividual components of the package.
|Feature|Description|
|---|---|
|[Labeling](com.unity.perception/Documentation~/GroundTruthLabeling.md)|A component that marks a GameObject and its descendants with a set of labels|
|[Label Config](com.unity.perception/Documentation~/GroundTruthLabeling.md#label-config)|An asset that defines a taxonomy of labels for ground truth generation|
|[Perception Camera](com.unity.perception/Documentation~/PerceptionCamera.md)|Captures RGB images and ground truth from a [Camera](https://docs.unity3d.com/Manual/class-Camera.html).|
|[Dataset Capture](com.unity.perception/Documentation~/DatasetCapture.md)|Ensures sensors are triggered at proper rates and accepts data for the JSON dataset.|
|[Randomization (Experimental)](com.unity.perception/Documentation~/Randomization/Index.md)|The Randomization tool set lets you integrate domain randomization principles into your simulation.|
## Example Projects
### SynthDet
<img src="com.unity.perception/Documentation~/images/synthdet.png"/>
[SynthDet](https://github.com/Unity-Technologies/SynthDet) is an end-to-end solution for training a 2D object detection model using synthetic data.
### Unity Simulation Smart Camera example
<img src="com.unity.perception/Documentation~/images/smartcamera.png"/>
The [Unity Simulation Smart Camera Example](https://github.com/Unity-Technologies/Unity-Simulation-Smart-Camera-Outdoor) illustrates how the Perception toolset could be used in a smart city or autonomous vehicle simulation. You can generate datasets locally or at scale in [Unity Simulation](https://unity.com/products/unity-simulation).
## Local development
The repository includes two projects for local development in `TestProjects` folder, one set up for HDRP and the other for URP.

* To allow navigating to code in all packages included in your project, in your Unity Editor, navigate to `Edit -> Preferences... -> External Tools` and check `Generate all .csproj files.`
## Known issues
* The Linux Editor 2019.4.7f1 and 2019.4.8f1 might hang when importing HDRP-based Perception projects. For Linux Editor support, use 2019.4.6f1 or 2020.1
## Support
For general questions or concerns please contact the Computer Vision team at computer-vision@unity3d.com.
For feedback, bugs, or other issues please file a GitHub issue and the Computer Vision team will investigate the issue as soon as possible.
@misc{com.unity.perception2020,
@misc{com.unity.perception2021,
title={Unity {P}erception Package},
author={{Unity Technologies}},
howpublished={\url{https://github.com/Unity-Technologies/com.unity.perception}},

4
TestProjects/PerceptionHDRP/Assets/Terrain Data.asset
文件差异内容过多而无法显示
查看文件

10
TestProjects/PerceptionHDRP/Packages/manifest.json


{
"dependencies": {
"com.unity.collab-proxy": "1.2.16",
"com.unity.ext.nunit": "1.0.0",
"com.unity.ext.nunit": "1.0.5",
"com.unity.ide.vscode": "1.2.1",
"com.unity.ide.vscode": "1.2.3",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19",
"com.unity.test-framework": "1.1.16",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21",
"com.unity.test-framework": "1.1.19",
"com.unity.testtools.codecoverage": "0.2.2-preview",
"com.unity.textmeshpro": "2.0.1",
"com.unity.ugui": "1.0.0",

31
TestProjects/PerceptionHDRP/Packages/packages-lock.json


{
"dependencies": {
"com.unity.burst": {
"version": "1.3.0-preview.5",
"depth": 2,
"version": "1.3.9",
"depth": 1,
"com.unity.mathematics": "1.1.0"
"com.unity.mathematics": "1.2.1"
},
"url": "https://packages.unity.com"
},

"url": "https://packages.unity.com"
},
"com.unity.ext.nunit": {
"version": "1.0.0",
"version": "1.0.5",
"depth": 0,
"source": "registry",
"dependencies": {},

"url": "https://packages.unity.com"
},
"com.unity.ide.vscode": {
"version": "1.2.1",
"version": "1.2.3",
"depth": 0,
"source": "registry",
"dependencies": {},

"url": "https://packages.unity.com"
},
"com.unity.mathematics": {
"version": "1.1.0",
"version": "1.2.1",
"depth": 2,
"source": "registry",
"dependencies": {},

"dependencies": {
"com.unity.nuget.newtonsoft-json": "1.1.2",
"com.unity.render-pipelines.core": "7.1.6",
"com.unity.burst": "1.3.9",
"com.unity.simulation.client": "0.0.10-preview.9",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19"
"com.unity.simulation.client": "0.0.10-preview.10",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21"
}
},
"com.unity.platforms": {

"url": "https://packages.unity.com"
},
"com.unity.simulation.capture": {
"version": "0.0.10-preview.13",
"version": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.19"
"com.unity.simulation.core": "0.0.10-preview.21"
"version": "0.0.10-preview.9",
"version": "0.0.10-preview.10",
"depth": 1,
"source": "registry",
"dependencies": {},

"version": "0.0.10-preview.19",
"version": "0.0.10-preview.21",
"depth": 0,
"source": "registry",
"dependencies": {},

"version": "1.1.16",
"version": "1.1.19",
"com.unity.ext.nunit": "1.0.0",
"com.unity.ext.nunit": "1.0.5",
"com.unity.modules.imgui": "1.0.0",
"com.unity.modules.jsonserialize": "1.0.0"
},

4
TestProjects/PerceptionHDRP/ProjectSettings/PackageManagerSettings.asset


m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 0}
m_Script: {fileID: 13960, guid: 0000000000000000e000000000000000, type: 0}
m_EditorClassIdentifier: UnityEditor:UnityEditor.PackageManager.UI:PackageManagerProjectSettings
m_EditorClassIdentifier:
m_ScopedRegistriesSettingsExpanded: 1
oneTimeWarningShown: 0
m_Registries:

4
TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.12f1
m_EditorVersionWithRevision: 2019.4.12f1 (225e826a680e)
m_EditorVersion: 2019.4.16f1
m_EditorVersionWithRevision: 2019.4.16f1 (e05b6e02d63e)

63
TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity


m_TrainingDataDestination: TrainingData
m_LightProbeSampleCountMultiplier: 4
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 0
m_UseShadowmask: 1
--- !u!196 &4
NavMeshSettings:
serializedVersion: 2

debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &85886256
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 85886258}
- component: {fileID: 85886257}
m_Layer: 0
m_Name: Scenario
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &85886257
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 85886256}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d50076aff0af4515b4422166496fdd5e, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Randomizers: []
quitOnComplete: 1
serializedConstantsFileName: constants
constants:
totalIterations: 100
instanceCount: 1
instanceIndex: 0
framesPerIteration: 1
references:
version: 1
--- !u!4 &85886258
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 85886256}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 107.21, y: 72.77, z: -144.12}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 6
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &411238276
GameObject:
m_ObjectHideFlags: 0

m_EditorClassIdentifier:
labels:
- Crate
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &411238278
BoxCollider:
m_ObjectHideFlags: 0

m_EditorClassIdentifier:
labels:
- Cube
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &934158984
BoxCollider:
m_ObjectHideFlags: 0

m_EditorClassIdentifier:
labels:
- Box
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &1640252280
BoxCollider:
m_ObjectHideFlags: 0

m_EditorClassIdentifier:
labels:
- Terrain
useAutoLabeling: 0
autoLabelingSchemeType:

8
TestProjects/PerceptionURP/Packages/manifest.json


"dependencies": {
"com.unity.collab-proxy": "1.2.16",
"com.unity.ide.rider": "1.1.4",
"com.unity.ide.vscode": "1.2.1",
"com.unity.ide.vscode": "1.2.3",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19",
"com.unity.test-framework": "1.1.16",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21",
"com.unity.test-framework": "1.1.19",
"com.unity.textmeshpro": "2.0.1",
"com.unity.timeline": "1.2.12",
"com.unity.ugui": "1.0.0",

31
TestProjects/PerceptionURP/Packages/packages-lock.json


{
"dependencies": {
"com.unity.burst": {
"version": "1.3.0-preview.5",
"depth": 2,
"version": "1.3.9",
"depth": 1,
"com.unity.mathematics": "1.1.0"
"com.unity.mathematics": "1.2.1"
},
"url": "https://packages.unity.com"
},

"url": "https://packages.unity.com"
},
"com.unity.ext.nunit": {
"version": "1.0.0",
"version": "1.0.5",
"depth": 1,
"source": "registry",
"dependencies": {},

"url": "https://packages.unity.com"
},
"com.unity.ide.vscode": {
"version": "1.2.1",
"version": "1.2.3",
"depth": 0,
"source": "registry",
"dependencies": {},

"url": "https://packages.unity.com"
},
"com.unity.mathematics": {
"version": "1.1.0",
"version": "1.2.1",
"depth": 2,
"source": "registry",
"dependencies": {},

"dependencies": {
"com.unity.nuget.newtonsoft-json": "1.1.2",
"com.unity.render-pipelines.core": "7.1.6",
"com.unity.burst": "1.3.9",
"com.unity.simulation.client": "0.0.10-preview.9",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19"
"com.unity.simulation.client": "0.0.10-preview.10",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21"
}
},
"com.unity.platforms": {

"url": "https://packages.unity.com"
},
"com.unity.simulation.capture": {
"version": "0.0.10-preview.13",
"version": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.19"
"com.unity.simulation.core": "0.0.10-preview.21"
"version": "0.0.10-preview.9",
"version": "0.0.10-preview.10",
"depth": 1,
"source": "registry",
"dependencies": {},

"version": "0.0.10-preview.19",
"version": "0.0.10-preview.21",
"depth": 0,
"source": "registry",
"dependencies": {},

"version": "1.1.16",
"version": "1.1.19",
"com.unity.ext.nunit": "1.0.0",
"com.unity.ext.nunit": "1.0.5",
"com.unity.modules.imgui": "1.0.0",
"com.unity.modules.jsonserialize": "1.0.0"
},

4
TestProjects/PerceptionURP/ProjectSettings/PackageManagerSettings.asset


m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 0}
m_Script: {fileID: 13960, guid: 0000000000000000e000000000000000, type: 0}
m_EditorClassIdentifier: UnityEditor:UnityEditor.PackageManager.UI:PackageManagerProjectSettings
m_EditorClassIdentifier:
m_ScopedRegistriesSettingsExpanded: 1
oneTimeWarningShown: 0
m_Registries:

11
TestProjects/PerceptionURP/ProjectSettings/ProjectSettings.asset


metalEditorSupport: 1
metalAPIValidation: 1
iOSRenderExtraFrameOnPause: 0
iosCopyPluginsCodeInsteadOfSymlink: 0
appleDeveloperTeamID:
iOSManualSigningProvisioningProfileID:
tvOSManualSigningProvisioningProfileID:

webGLWasmStreaming: 0
scriptingDefineSymbols: {}
platformArchitecture: {}
scriptingBackend: {}
scriptingBackend:
Standalone: 0
il2cppCompilerConfiguration: {}
managedStrippingLevel: {}
incrementalIl2cppBuild: {}

XboxOneCapability: []
XboxOneGameRating: {}
XboxOneIsContentPackage: 0
XboxOneEnhancedXboxCompatibilityMode: 0
XboxOneEnableGPUVariability: 1
XboxOneSockets: {}
XboxOneSplashScreen: {fileID: 0}

m_VersionCode: 1
m_VersionName:
apiCompatibilityLevel: 6
cloudProjectId:
cloudProjectId: 7f560aa0-0da3-47a1-88e7-3884d7922bdd
projectName:
organizationId:
projectName: New Unity Project
organizationId: jonhunity
cloudEnabled: 0
enableNativePlatformBackendsForNewInputSystem: 0
disableOldInputManagerSupport: 0

4
TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.12f1
m_EditorVersionWithRevision: 2019.4.12f1 (225e826a680e)
m_EditorVersion: 2019.4.16f1
m_EditorVersionWithRevision: 2019.4.16f1 (e05b6e02d63e)

2
TestProjects/PerceptionURP/ProjectSettings/QualitySettings.asset


skinWeights: 2
textureQuality: 0
anisotropicTextures: 1
antiAliasing: 2
antiAliasing: 0
softParticles: 0
softVegetation: 1
realtimeReflectionProbes: 1

4
TestProjects/PerceptionURP/ProjectSettings/UnityConnectSettings.asset


UnityConnectSettings:
m_ObjectHideFlags: 0
serializedVersion: 1
m_Enabled: 0
m_Enabled: 1
m_TestMode: 0
m_EventOldUrl: https://api.uca.cloud.unity3d.com/v1/events
m_EventUrl: https://cdp.cloud.unity3d.com/v1/events

m_EventUrl: https://perf-events.cloud.unity3d.com
m_Enabled: 0
m_Enabled: 1
m_LogBufferSize: 10
m_CaptureEditorExceptions: 1
UnityPurchasingSettings:

33
com.unity.perception/CHANGELOG.md


## Unreleased
### Upgrade Notes
Before upgrading a project to this version of the Perception package, make sure to keep a record of **all sampler ranges** in your added Randomizers. Due to a change in how sampler ranges are serialized, **after upgrading to this version, ranges for all stock Perception samplers (Uniform and Normal Samplers) will be reset**, and will need to be manually reverted by the user.
### Added
Added keypoint ground truth labeling

Randomizer tags now support inheritance
Added AnimationCurveSampler, which returns random values according to a range and probability distribution denoted by a user provided AnimationCurve.
Added ParameterUIElementsEditor class to allow custom ScriptableObjects and MonoBehaviours to render Parameter and Sampler typed public fields correctly in their inspector windows.
Added new capture options to Perception Camera:
* Can now render intermediate frames between captures.
* Capture can now be triggered manually using a function call, instead of automatic capturing on a schedule.
### Changed
Randomizers now access their parent scenario through the static activeScenario property

Replaced ScenarioBase.GenerateRandomSeed() with ScenarioBase.NextRandomState()
Samplers now derive their random state from the static SamplerState class instead of individual scenarios to allow parameters and samplers to be used outside of the context of a scenario
Replaced ScenarioBase.GenerateRandomSeed() with SamplerState.NextRandomState() and SamplerState.CreateGenerator()
RandomizerTagManager.Query<T>() now returns RandomizerTags directly instead of the GameObjects attached to said tags
Semantic Segmentation Labeler now places data in folders with randomized filenames
Removed native sampling (through jobs) capability from all samplers and parameters as it introduced additional complexity to the code and was not a common usage pattern
Removed `range` as a required ISampler interface property.
### Fixed

Randomizer.OnCreate() is no longer called in edit-mode when adding a randomizer to a scenario
Fixed a bug where removing all randomizers from a scenario caused the randomizer container UI element to overflow over the end of Scenario component UI
Semantic Segmentation Labeler now produces output in the proper form for distributed data generation on Unity Simulation by placing output in randomized directory names
Texture Randomizer is now compatible with HDRP
Added analytics for Unity Simulation runs
Added instance segmentation labeler.

8
com.unity.perception/Documentation~/DatasetCapture.md


## Sensor scheduling
While sensors are registered, `DatasetCapture` ensures that frame timing is deterministic and run at the appropriate simulation times to let each sensor run at its own rate.
While sensors are registered, `DatasetCapture` ensures that frame timing is deterministic and run at the appropriate simulation times to let each sensor render and capture at its own rate.
Using [Time.CaptureDeltaTime](https://docs.unity3d.com/ScriptReference/Time-captureDeltaTime.html), it also decouples wall clock time from simulation time, allowing the simulation to run as fast as possible.
Using [Time.captureDeltaTime](https://docs.unity3d.com/ScriptReference/Time-captureDeltaTime.html), it also decouples wall clock time from simulation time, allowing the simulation to run as fast as possible.
You can register custom sensors using `DatasetCapture.RegisterSensor()`. The `period` you pass in at registration time determines how often (in simulation time) frames should be scheduled for the sensor to run. The sensor implementation then checks `ShouldCaptureThisFrame` on the returned `SensorHandle` each frame to determine whether it is time for the sensor to perform a capture. `SensorHandle.ReportCapture` should then be called in each of these frames to report the state of the sensor to populate the dataset.
You can register custom sensors using `DatasetCapture.RegisterSensor()`. The `simulationDeltaTime` you pass in at registration time is used as `Time.captureDeltaTime` and determines how often (in simulation time) frames should be simulated for the sensor to run. This and the `framesBetweenCaptures` value determine at which exact times the sensor should capture the simulated frames. The decoupling of simulation delta time and capture frequency based on frames simulated allows you to render frames in-between captures. If no in-between frames are desired, you can set `framesBetweenCaptures` to 0. When it is time to capture, the `ShouldCaptureThisFrame` check of the `SensorHandle` returns true. `SensorHandle.ReportCapture` should then be called in each of these frames to report the state of the sensor to populate the dataset.
`Time.captureDeltaTime` is set at every frame in order to precisely fall on the next sensor that requires simulation, and this includes multi-sensor simulations. For instance, if one sensor has a `simulationDeltaTime` of 2 and another 3, the first five values for `Time.captureDeltaTime` will be 2, 1, 1, 2, and 3, meaning simulation will happen on the timestamps 0, 2, 3, 4, 6, and 9.
## Custom annotations and metrics
In addition to the common annotations and metrics produced by [PerceptionCamera](PerceptionCamera.md), scripts can produce their own via `DatasetCapture`. You must first register annotation and metric definitions using `DatasetCapture.RegisterAnnotationDefinition()` or `DatasetCapture.RegisterMetricDefinition()`. These return `AnnotationDefinition` and `MetricDefinition` instances which you can then use to report values during runtime.

39
com.unity.perception/Documentation~/PerceptionCamera.md


# The Perception Camera component
# The Perception Camera Component
![Perception Camera component](images/PerceptionCameraFinished.png)
<br/>_Perception Camera component_
<p align="center">
<img src="images/PerceptionCameraFinished.png" width="600"/>
<br><i>The Inspector view of the Perception Camera component</i>
</p>
| Period | The amount of simulation time in seconds between frames for this Camera. For more information on sensor scheduling, see [DatasetCapture](DatasetCapture.md). |
| Start Time | The simulation time at which to run the first frame. This time offsets the period, which allows multiple Cameras to run at the correct times relative to each other. |
| Capture Rgb Images | When you enable this property, Unity captures RGB images as PNG files in the dataset each frame. |
| Show Visualizations | Display realtime visualizations for labelers that are currently active on this camera. |
| Capture RGB Images | When you enable this property, Unity captures RGB images as PNG files in the dataset each frame. |
| Capture Trigger Mode | The method of triggering captures for this camera. In `Scheduled` mode, captures happen automatically based on a start frame and frame delta time. In `Manual` mode, captures should be triggered manually through calling the `RequestCapture` method of `PerceptionCamera`. |
### Properties for Scheduled Capture Mode
| Property: | Function: |
|--|--|
| Simulation Delta Time | The simulation frame time (seconds) for this camera. E.g. 0.0166 translates to 60 frames per second. This will be used as Unity's `Time.captureDeltaTime`, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. For more information on sensor scheduling, see [DatasetCapture](DatasetCapture.md). |
| First Capture Frame | Frame number at which this camera starts capturing. |
| Frames Between Captures | The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame. |
### Properties for Manual Capture Mode
| Property: | Function: |
|--|--|
| Affect Simulation Timing | Have this camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time. Enabling this option will let you set the `Simulation Delta Time` property described above.|
### SemanticSegmentationLabeler
### Semantic Segmentation Labeler
### InstanceSegmentationLabeler
### Instance Segmentation Labeler
### BoundingBox2DLabeler
### Bounding Box 2D Labeler
![Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook](images/bounding_boxes.png)
<br/>_Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook_

The Bounding Box 3D Ground Truth Labeler prouces 3D ground truth bounding boxes for each labeled game object in the scene. Unlike the 2D bounding boxes, 3D bounding boxes are calculated from the labeled meshes in the scene and all objects (independent of their occlusion state) are recorded.
The Bounding Box 3D Ground Truth Labeler produces 3D ground truth bounding boxes for each labeled game object in the scene. Unlike the 2D bounding boxes, 3D bounding boxes are calculated from the labeled meshes in the scene and all objects (independent of their occlusion state) are recorded.
### ObjectCountLabeler
### Object Count Labeler
```
{

The ObjectCountLabeler records object counts for each label you define in the IdLabelConfig. Unity only records objects that have at least one visible pixel in the Camera frame.
### RenderedObjectInfoLabeler
### Rendered Object Info Labeler
```
{
"label_id": 24,

50
com.unity.perception/Documentation~/Randomization/Index.md


*NOTE: The Perception package's randomization toolset is currently marked as experimental and is subject to change.*
The randomization toolset simplifies randomizing aspects of generating synthetic data. It facilitates exposing parameters for randomization, offers samplers to pick random values from parameters, and provides scenarios to coordinate a full randomization process. Each of these also allows for custom implementations to fit particular randomization needs.
The randomization toolset simplifies randomizing aspects of generating synthetic data. It facilitates exposing parameters for randomization, offers samplers to pick random values from parameters, and provides Scenarios to coordinate a full randomization process. Each of these also allows for custom implementations to fit particular randomization needs.
Our use of domain randomization draws from Tobin et al. (2017) work training robotic pick and place using purely synthetic data.
Our use of domain randomization draws from Tobin et al.'s (2017) work on training robotic pick and place using purely synthetic data.
1. Create a scenario
2. Define and add randomizers to the scenario
3. Customize parameters and samplers in the randomizers
4. Generate randomized perception data
1. Create a Scenario
2. Define and add Randomizers to the Scenario
3. Customize Parameters and Samplers in the Randomizers
4. Generate randomized computer vision training data
Beginning with step 1, add a scenario component to your simulation. This scenario will act as the central hub for all randomization activities that occur when your scene is executed.
Beginning with step 1, add a Scenario component to your simulation. This Scenario will act as the central hub for all randomization activities that occur when your scene is executed.
Next, add a few randomizers to the scenario. The randomizers, in conjunction with the scenario, will perform the actual randomization activities within the simulation.
Next, add a few Randomizers to the Scenario. The Randomizers, in conjunction with the Scenario, will perform the actual randomization activities within the simulation.
After adding the necessary randomizers, configure the random parameters assigned to each randomizer to further customize how the simulation is randomized. The random parameters and samplers exposed in each randomizer's inspector can be manipulated to specify different probabilty distributions to use when generating random values.
After adding the necessary Randomizers, configure the random Parameters assigned to each Randomizer to further customize how the simulation is Randomized. The random Parameters and Samplers exposed in each Randomizer's inspector can be manipulated to specify different probability distributions to use when generating random values.
Once the project has been randomized and your scene has been configured with the data capture tools available in the perception package, enter play mode in the editor or execute your scenario through the Unity Simulation Cloud service to generate domain randomized perception data.
Once the project has been randomized and your scene has been configured with the data capture tools available in the Perception package, enter play mode in the editor or execute your Scenario through the Unity Simulation cloud service to generate domain randomized perception data.
Continue reading for more details concerning the primary components driving randomizations in the perception package, including:
Continue reading for more details concerning the primary components driving randomizations in the Perception package, including:
1. Scenarios
2. Randomizers
3. Randomizer Tags

## Scenarios
Within a randomized simulation, the scenario component has three responsibilities:
Within a randomized simulation, the Scenario component has three responsibilities:
2. Defining a list of randomizers
2. Defining a list of Randomizers
The fundamental principle of domain randomization is to simulate environments under a variety of randomized conditions. Each **iteration** of a scenario is intended to encapsulate one complete run of a simulated environment under uniquely randomized conditions. Scenarios futher define what conditions determine the end of an iteration and how many iterations to perform.
The fundamental principle of domain randomization is to simulate environments under a variety of randomized conditions. Each Iteration of a Scenario is intended to encapsulate one complete run of a simulated environment under uniquely randomized conditions. Scenarios further define what conditions determine the end of an Iteration and how many Iterations to perform.
To actually randomize a simulation, randomizers can be added to a scenario to vary different simulation properties. At runtime, the scenario will execute each randomizer according to its place within the randomizers list.
To actually randomize a simulation, Randomizers can be added to a Scenario to vary different simulation properties. At runtime, the Scenario will execute each Randomizer according to its place within the Randomizer list.
To read more about scenarios and how to customize them, navigate over to the [scenarios doc](Scenarios.md).
To read more about Scenarios and how to customize them, navigate over to the **[Scenarios documentation](Scenarios.md)**.
Randomizers encapsulate specific randomization activities to perform during the lifecycle of a randomized simulation. For example, randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations.
Randomizers encapsulate specific randomization activities to perform during the lifecycle of a randomized simulation. For example, Randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random Parameters to their inspector interface to further customize these variations.
To read more about how to create custom parameter types, navigate over to the [randomizers doc](Randomizers.md).
To read more about how to create custom Parameter types, navigate over to the **[Randomizers documentation](Randomizers.md)**.
RandomizerTags are the primary mechanism by which randomizers query for a certain subset of GameObjects to randoize within a simulation. For example, a rotation randomizer could query for all GameObjects with a RotationRandomizerTag component to obtain an array of all objects the randomizer should vary for the given simulation iteration.
RandomizerTags are the primary mechanism by which Randomizers query for a certain subset of GameObjects to randomize within a simulation. For example, a rotation Randomizer could query for all GameObjects with a RotationRandomizerTag component to obtain an array of all objects the Randomizer should vary for the given simulation Iteration.
To read more about how to use RandomizerTags, navigate over to the [RandomizerTags doc](RandomizerTags.md).
To read more about how to use RandomizerTags, navigate over to the **[RandomizerTags documentation](RandomizerTags.md)**.
Parameters are classes that utilize samplers to deterministically generate random typed values. Parameters are often exposed within the inspector interface of randomizers to allow users to customize said randomizer's behavior. To accomplish this, parameters combine and transform the float values produced by one or more samplers into various C# types. For example, a Vector3 parameter can be used to map three samplers to the x, y, and z dimensions of a GameObject. Or a material parameter can utilize a sampler to randomly select one material from a list of possible options.
Parameters are classes that utilize Samplers to deterministically generate random typed values. Parameters are often exposed within the inspector interface of Randomizers to allow users to customize said Randomizer's behavior. To accomplish this, Parameters combine and transform the float values produced by one or more Samplers into various C# types. For example, a Vector3 Parameter can be used to map three Samplers to the x, y, and z dimensions of a GameObject. Or a material Parameter can utilize a Sampler to randomly select one material from a list of possible options.
To read more about how to create custom parameter types, navigate over to the [parameters doc](Parameters.md).
To read more about how to create custom Parameter types, navigate over to the **[Parameters documentation](Parameters.md)**.
Samplers generate bounded random float values by sampling from probability distributions. They are considered bounded since each random sampler generates float values within a range defined by a minumum and maximum value.
Samplers generate bounded random float values by sampling from probability distributions. They are considered bounded since each random sampler generates float values within a range defined by a minimum and maximum value.
Take a look at the [samplers doc](Samplers.md) to learn more about implementing custom probability distributions and samplers that can integrate with the perception package.
Take a look at the **[Samplers doc](Samplers.md)** to learn more about implementing custom probability distributions and samplers that can integrate with the Perception package.
Visit the [Perception Tutorial](../Tutorial/TUTORIAL.md) to get started using the perception package's randomization tools in an example project.
Visit the [Perception Tutorial](../Tutorial/TUTORIAL.md) to get started using the Perception package's randomization tools in an example project.

71
com.unity.perception/Documentation~/Randomization/Parameters.md


## Creating and Sampling Parameters
Parameters are often defined as fields of a randomizer class, but they can also be instanced just like any other C# class:
Parameters are often defined as fields of a Randomizer class, but they can also be instanced just like any other C# class:
// Create a color parameter
// Create a color Parameter
var colorParameter = new HsvaColorParameter();
// Generate one color sample

Note that parameters, like samplers, generate new random values for each call to the Sample() method:
Note that Parameters, like Samplers, generate new random values for each call to the Sample() method:
```
var color1 = colorParameter.Sample();
var color2 = colorParameter.Sample();

## Defining Custom Parameters
All parameters derive from the `Parameter` abstract class, but all included perception package parameter types derive from two specialized Parameter base classes:
All Parameters derive from the `Parameter` abstract class. Additionally, the Parameters included in the Perception package types derive from two specialized Parameter base classes:
### Categorical Parameters
## Using Parameters outside of Randomizers (ie: in MonoBehaviours and ScriptableObjects)
Categorical parameters choose a value from a list of options that have no intrinsic ordering. For example, a material paramater randomly chooses from a list of material options, but the list of material options itself can be rearranged into any particular order without affecting the distribution of materials selected.
After adding a public Parameter field to a MonoBehaviour or ScriptableObject, you may have noticed that the Parameter's UI doesn't look the same as it does when added to a Randomizer. This is because the Inspector UI for most Perception randomization components is authored using Unity's relatively new UI Elements framework, though by default, Unity uses the old IMGUI framework to render default inspector editors.
If your custom parameter is a categorical in nature, take a look at the [StringParameter]() class included in the perception package as a reference for how to derive the `CategoricalParameter` class.
Say you have the following CustomMonoBehaviour that has a public GameObjectParameter field:
using UnityEngine.Perception.Randomization.Parameters.Attributes;
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Parameters;
namespace UnityEngine.Perception.Randomization.Parameters
public class CustomMonoBehaviour : MonoBehaviour
[AddComponentMenu("")]
[ParameterMetaData("String")]
public class StringParameter : CategoricalParameter<string> {}
public GameObjectParameter prefabs;
**Note:** the AddComponentMenu attribute with an empty string prevents parameters from appearing in the Add Component GameObject menu. Randomization parameters should only be created with by a `ParameterConfiguration`
### Numeric Parameters
Numeric parameters use samplers to generate randomized structs. Take a look at the [ColorHsvaParameter]() class included in the perception package for an example on how to implement a numeric parameter.
To force Unity to use UI Elements to render your CustomMonoBehaviour's inspector window, create a custom editor for your MonoBehaviour by deriving the ParameterUIElementsEditor class like so:
## Improving Sampling Performance
For numeric parameters, it is recommended to use the JobHandle overload of the Samples() method when generating a large number of samples. The JobHandle overload will utilize the Unity Burst Compiler and Job System to automatically optimize and multithread parameter sampling jobs. The code block below is an example of how to use this overload to sample two parameters in parallel:
// Get a reference to the parameter configuration attached to this GameObject
var parameterConfiguration = GetComponent<ParameterConfiguration>();
using UnityEditor;
using UnityEngine.Experimental.Perception.Editor;
// Lookup parameters
var cubeColorParameter = parameterConfiguration.GetParameter<HsvaColorParameter>("CubeColor");
var cubePositionParameter = parameterConfiguration.GetParameter<Vector3Parameter>("CubePosition");
[CustomEditor(typeof(CustomMonoBehaviour))]
public class TestClusterEditor : DefaultUIElementsEditor { }
```
// Schedule sampling jobs
var cubeColors = cubeColorParameter.Samples(constants.cubeCount, out var colorHandle);
var cubePositions = cubePositionParameter.Samples(constants.cubeCount, out var positionHandle);
### Categorical Parameters
// Combine job handles
var handles = JobHandle.CombineDependencies(colorHandle, positionHandle);
Categorical Parameters choose a value from a list of options that have no intrinsic ordering. For example, a material Parameter randomly chooses from a list of material options, but the list of material options itself can be rearranged into any particular order without affecting the distribution of materials selected.
// Wait for the jobs to complete
handles.Complete();
If your custom Parameter is categorical in nature, take a look at the [StringParameter]() class included in the perception package as a reference for how to derive the `CategoricalParameter` class.
```
using UnityEngine.Perception.Randomization.Parameters.Attributes;
// Use the created samples
for (var i = 0; i < constants.cubeCount; i++)
namespace UnityEngine.Perception.Randomization.Parameters
m_ObjectMaterials[i].SetColor(k_BaseColorProperty, cubeColors[i]);
m_Objects[i].transform.position = cubePositions[i];
[AddComponentMenu("")]
[ParameterMetaData("String")]
public class StringParameter : CategoricalParameter<string> {}
```
// Dispose of the generated samples
cubeColors.Dispose();
cubePositions.Dispose();
```
**Note:** the AddComponentMenu attribute with an empty string prevents Parameters from appearing in the Add Component GameObject menu. Randomization Parameters should only be created with by a `ParameterConfiguration`
### Numeric Parameters
Numeric Parameters use samplers to generate randomized structs. Take a look at the [ColorHsvaParameter]() class included in the Perception package for an example on how to implement a numeric Parameter.

6
com.unity.perception/Documentation~/Randomization/RandomizerTags.md


# Randomizer Tags
RandomizerTags are the primary mechanism by which randomizers query for a certain subset of GameObjects to randomize within a simulation.
RandomizerTags are the primary mechanism by which Randomizers query for a certain subset of GameObjects to randomize within a simulation.
More specifically, RandomizerTags are components that can be added to GameObjects to register them with the active scenario's TagManager. This TagManager is aware of all objects with tags in the scene and can be queried to find all GameObjects that contain a specific tag. Below is a simple example of a ColorRandomizer querying for all GameObjects with a ColorRandomizerTag that it will apply a random material base color to:
More specifically, RandomizerTags are components that can be added to GameObjects to register them with the active Scenario's TagManager. This TagManager is aware of all objects with tags in the scene and can be queried to find all GameObjects that contain a specific tag. Below is a simple example of a ColorRandomizer querying for all GameObjects with a ColorRandomizerTag that it will apply a random material base color to:
```
[Serializable]

}
```
RandomizerTags can also be used to customize how randomizers apply their randomizations to a particular GameObject. Visit [Phase 2 of the Perception Tutorial](../Tutorial/TUTORIAL.md) to explore an in depth example of implementing a LightRandomizer that does exactly this.
RandomizerTags can also be used to customize how Randomizers apply their randomizations to a particular GameObject. Visit [Phase 2 of the Perception Tutorial](../Tutorial/TUTORIAL.md) to explore an in depth example of implementing a LightRandomizer that does exactly this.

20
com.unity.perception/Documentation~/Randomization/Randomizers.md


# Randomizers
Randomizers encapsulate specific randomization activities to perform during the execution of a randomized simulation. For example, randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations. Users can add a set of randomizers to a scenario in order to define an ordered list randomization activities to perform during the lifecycle of a simulation.
Randomizers encapsulate specific randomization activities to perform during the execution of a randomized simulation. For example, Randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations. Users can add a set of Randomizers to a Scenario in order to define an ordered list of randomization activities to perform during the lifecycle of a simulation.
To define an entirely new randomizer, derive the Randomizer class and implement one or more of the methods listed in the section below to randomize GameObjects during the runtime of a simulation.
To define an entirely new Randomizer, derive the Randomizer class and implement one or more of the methods listed in the section below to randomize GameObjects during the runtime of a simulation.
1. OnCreate() - called when the Randomizer is added or loaded to a scenario
2. OnIterationStart() - called at the start of a new scenario iteration
3. OnIterationEnd() - called the after a scenario iteration has completed
4. OnScenarioComplete() - called the after the entire scenario has completed
1. OnCreate() - called when the Randomizer is added or loaded to a Scenario
2. OnIterationStart() - called at the start of a new Scenario Iteration
3. OnIterationEnd() - called the after a Scenario Iteration has completed
4. OnScenarioComplete() - called the after the entire Scenario has completed
5. OnStartRunning() - called on the first frame a Randomizer is enabled
6. OnStopRunning() - called on the first frame a disabled Randomizer is updated
7. OnUpdate() - executed every frame for enabled Randomizers

Below is the code for the sample rotation randomizer included with the perception package:
Below is the code for the sample rotation Randomizer included with the Perception package:
```
[Serializable]

```
There are a few key things to note from this example:
1. Make sure to add the [Serializable] tag to all randomizer implementations to ensure that the randomizer can be customized and saved within the Unity Editor.
2. The [AddRandomizerMenu] attribute customizes the "Add Randomizer" sub menu path in the scenario inspector for a particular randomizer. In this example, the RotationRandomizer can be added to a scenario by opening the add randomizer menu and clicking `Perception -> Rotation Randomizer`.
3. The line `var taggedObjects = tagManager.Query<RotationRandomizerTag>();` uses RandomizerTags in combination with the current Scenario's tagManager to query for all objects with RotationRandomizerTags to obtain the subset of GameObjects within the simulation that need to have their rotations randomzied. To learn more about how RandomizerTags work, visit the [RandomizerTags doc](RandomizerTags.md).
1. Make sure to add the [Serializable] tag to all Randomizer implementations to ensure that the Randomizer can be customized and saved within the Unity Editor.
2. The [AddRandomizerMenu] attribute customizes the "Add Randomizer" sub menu path in the Scenario inspector for a particular Randomizer. In this example, the RotationRandomizer can be added to a Scenario by opening the _**Add Randomizer**_ menu and clicking `Perception -> Rotation Randomizer`.
3. The line `var taggedObjects = tagManager.Query<RotationRandomizerTag>();` uses RandomizerTags in combination with the current Scenario's tagManager to query for all objects with RotationRandomizerTags and obtain the subset of GameObjects within the simulation that need to have their rotations randomzied. To learn more about how RandomizerTags work, visit the [RandomizerTags documentation page](RandomizerTags.md).

8
com.unity.perception/Documentation~/Randomization/Samplers.md


# Samplers
Samplers in the perception package are classes that deterministically generate random float values from bounded probability distributions. Although samplers are often used in conjunction with parameters to generate arrays of typed random values, samplers can be instantiated and used from any ordinary script:
Samplers in the perception package are classes that deterministically generate random float values from bounded probability distributions. Although Samplers are often used in conjunction with Parameters to generate arrays of typed random values, Samplers can be instantiated and used from any ordinary script:
```
var sampler = new NormalSampler();
sampler.mean = 3;

## Random Seeding
Samplers generate random values that are seeded by the active scenario's current random state. Changing the scenario's random seed will result in samplers generating different values. Changing the order of samplers, parameters, or randomizers will also result in different values being sampled during a simulation.
Samplers generate random values that are seeded by the active Scenario's current random state. Changing the Scenario's random seed will result in Samplers generating different values. Changing the order of Samplers, Parameters, or Randomizers will also result in different values being sampled during a simulation.
It is recommended that users do not generate random values using the UnityEngine.Random() class or the System.Random() class within a simulation since both of these classes can potentially generate non-determinisitic or improperly seeded random values. Using only Perception Samplers to generate random values will help ensure that Perception simulations generate consistent results during local execution and on Unity Simulation in the cloud.
It is recommended that users do not generate random values using the UnityEngine.Random() class or the System.Random() class within a simulation since both of these classes can potentially generate non-deterministic or improperly seeded random values. Using only Perception Samplers to generate random values will help ensure that Perception simulations generate consistent results during local execution and on Unity Simulation in the cloud.
## Custom Samplers

Samplers have a NativeSamples() method that can schedule a ready-made multi-threaded job intended for generating a large array of samples. Below is an example of how to combine two job handles returned by NativeSamples() to generate two arrays of samples simultaneously:
```
// Create samplers
// Create Samplers
var uniformSampler = new UniformSampler
{
range = new FloatRange(0, 1),

34
com.unity.perception/Documentation~/Randomization/Scenarios.md


Scenarios have three responsibilities:
1. Controlling the execution flow of your simulation
2. Defining a list of randomizers
2. Defining a list of Randomizers
By default, the perception package includes one ready-made scenario, the `FixedLengthScenario` class. This scenario runs each iteration for a fixed number of frames and is compatible with the Run in Unity Simulation window for cloud simulation execution.
By default, the Perception package includes one ready-made Scenario, the `FixedLengthScenario` class. This Scenario runs each Iteration for a fixed number of frames and is compatible with the Run in Unity Simulation window for cloud simulation execution.
Users can utilize Unity's Unity Simulation service to execute a scenario in the cloud through the perception package's Run in Unity Simulation window. To open this window from the Unity editor using the top menu bar, navigate to `Window -> Run in Unity Simulation`.
Users can utilize Unity's Unity Simulation service to execute a Scenario in the cloud through the perception package's Run in Unity Simulation window. To open this window from the Unity editor using the top menu bar, navigate to `Window -> Run in Unity Simulation`.
2. **Total Iterations** - The number of scenario iterations to complete during the run
2. **Total Iterations** - The number of Scenario Iterations to complete during the run
5. **Scenario** - The scenario to execute
6. **Sys-Param** - The system parameters or the hardware configuration of Unity Simulation worker instances to execute the scenario with. Determines per instance specifications such as the number of CPU cores, amount of memory, and presence of a GPU for accelerated execution.
5. **Scenario** - The Scenario to execute
6. **Sys-Param** - The system parameters or the hardware configuration of Unity Simulation worker instances to execute the Scenario with. Determines per instance specifications such as the number of CPU cores, amount of memory, and presence of a GPU for accelerated execution.
NOTE: To execute a scenario using the Run in Unity Simulation window, the scenario class must implement the UnitySimulationScenario class.
NOTE: To execute a Scenario using the Run in Unity Simulation window, the Scenario class must implement the UnitySimulationScenario class.
For use cases where the scenario should run for an arbitrary number of frames, implementing a custom scenario may be necessary. Below are the two most common scenario properties a user might want to override to implement custom scenario iteration conditions:
1. **isIterationComplete** - determines the conditions that cause the end of a scenario iteration
2. **isScenarioComplete** - determines the conditions that cause the end of a scenario
For use cases where the Scenario should run for an arbitrary number of frames, implementing a custom Scenario may be necessary. Below are the two most common Scenario properties a user might want to override to implement custom Scenario Iteration conditions:
1. **isIterationComplete** - determines the conditions that cause the end of a Scenario Iteration
2. **isScenarioComplete** - determines the conditions that cause the end of a Scenario
Scenarios can be serialized to JSON, modified, and reimported at runtime to configure simulation behavior even after a Unity player has been built. Constants and randomizer sampler settings are the two primary sections generated when serializing a scenario. Note that currently, only numerical samplers are serialized. Below is the contents of a JSON configuration file created when serializing the scenario used in Phase 1 of the [Perception Tutorial](../Tutorial/TUTORIAL.md):
Scenarios can be serialized to JSON, modified, and reimported at runtime to configure simulation behavior even after a Unity player has been built. Constants and Randomizer Sampler settings are the two primary sections generated when serializing a Scenario. Note that currently, only numerical samplers are serialized. Below is the contents of a JSON configuration file created when serializing the Scenario used in Phase 1 of the [Perception Tutorial](../Tutorial/TUTORIAL.md):
```
{
"constants": {

### Constants
Constants can include properties such as starting iteration value or total iteration count, and you can always add your own custom constants. Below is an example of the constants class used in the `FixedLengthScenario` class:
Constants can include properties such as starting Iteration value or total Iteration count, and you can always add your own custom constants. Below is an example of the Constants class used in the `FixedLengthScenario` class:
```
[Serializable]
public class Constants : UnitySimulationScenarioConstants

```
There are a few key things to note here:
1. The constants class will need to inherit from `UnitySimulationScenarioConstants` to be compatible with the Run in Unity Simulation window. Deriving from `UnitySimulationScenarioConstants` will add a few key properties to the constants class that are needed to coordinate a Unity Simulation run.
2. Make sure to include the `[Serializable]` attribute on a constant class. This will ensure that the constants can be manipulated from the Unity inspector.
3. A scenario class's `SerializeToJson()` and `DeserializeFromJson()` methods can be overriden to implement custom serialization strategies.
1. The Constants class will need to inherit from `UnitySimulationScenarioConstants` to be compatible with the Run in Unity Simulation window. Deriving from `UnitySimulationScenarioConstants` will add a few key properties to the Constants class that are needed to coordinate a Unity Simulation run.
2. Make sure to include the `[Serializable]` attribute on a constant class. This will ensure that the Constants can be manipulated from the Unity inspector.
3. A Scenario class's `SerializeToJson()` and `DeserializeFromJson()` methods can be overridden to implement custom serialization strategies.
Follow the instructions below to generate a scenario configuration file to modify your scenario constants and randomizers in a built player:
1. Click the serialize constants button in the scenario's inspector window. This will generate a `scenario_configuration.json` file and place it in the project's Assets/StreamingAssets folder.
Follow the instructions below to generate a Scenario configuration file to modify your Scenario Constants and Randomizers in a built player:
1. Click the _**Serialize Constants**_ button in the Scenario's inspector window. This will generate a `scenario_configuration.json` file and place it in the project's Assets/StreamingAssets folder.
2. Build your player. The new player will have a [ProjectName]_Data/StreamingAssets folder. A copy of the `scenario_configuration.json` file previously constructed in the editor will be found in this folder.
3. Change the contents of the `scenario_configuration.json` file. Any running player thereafter will utilize the newly authored values.

21
com.unity.perception/Documentation~/TableOfContents.md


* [Unity Perception Package](index.md)
* [Installation instructions](SetupSteps.md)
* [Getting started](GettingStarted.md)
* [Labeling](GroundTruthLabeling.md)
* [Perception Camera](PerceptionCamera.md)
* [Dataset capture](DatasetCapture.md)
* [Randomization](Randomization/index.md)
* [Parameters](Randomization/Parameters.md)
* [Samplers](Randomization/Samplers.md)
* [Scenarios](Randomization/Scenarios.md)
* [Tutorial](Randomization/Tutorial.md)
* [Quick Installation Instructions](SetupSteps.md)
* [Perception Tutorial](Tutorial/TUTORIAL.md)
* Package Documentation
* [Labeling](GroundTruthLabeling.md)
* [Perception Camera](PerceptionCamera.md)
* [Dataset capture](DatasetCapture.md)
* [Randomization](Randomization/index.md)
* [Parameters](Randomization/Parameters.md)
* [Samplers](Randomization/Samplers.md)
* [Scenarios](Randomization/Scenarios.md)
* [Tutorial](Randomization/Tutorial.md)

146
com.unity.perception/Documentation~/Tutorial/Phase1.md


In this phase of the Perception tutorial, you will start from downloading and installing Unity Editor and the Perception package. You will then use our sample assets and provided components to easily generate a synthetic dataset for training an object-detection model.
Through-out the tutorial, lines starting with bullet points followed by **"Action:"** denote the individual actions you will need to perform in order to progress through the tutorial. This is while non-bulleted lines will provide additional context and explanation around the actions. If in a hurry, you can just follow the actions!
Through-out the tutorial, lines starting with bullet points followed by **":green_circle: Action:"** denote the individual actions you will need to perform in order to progress through the tutorial. This is while non-bulleted lines will provide additional context and explanation around the actions. If in a hurry, you can just follow the actions!
Steps included this phase of the tutorial:
- [Step 1: Download Unity Editor and Create a New Project](#step-1)

- [Step 8: Verify Data Using Dataset Insights](#step-8)
### <a name="step-1">Step 1: Download Unity Editor and Create a New Project</a>
* **Action**: Navigate to [this](https://unity3d.com/get-unity/download/archive) page to download and install the latest version of **Unity Editor 2019.4.x**. (The tutorial has not yet been fully tested on newer versions.)
* **:green_circle: Action**: Navigate to [this](https://unity3d.com/get-unity/download/archive) page to download and install the latest version of **Unity Editor 2019.4.x**. (The tutorial has not yet been fully tested on newer versions.)
* **Action**: Make sure the _**Linux Build Support (Mono)**_ and _**Visual Studio**_ installation options are checked when selecting modules during installation.
* **:green_circle: Action**: Make sure the _**Linux Build Support (Mono)**_ and _**Visual Studio**_ installation options are checked when selecting modules during installation.
* **Action**: Open Unity and create a new project using the Universal Render Pipeline. Name your new project _**Perception Tutorial**_, and specify a desired location as shown below.
* **:green_circle: Action**: Open Unity and create a new project using the Universal Render Pipeline. Name your new project _**Perception Tutorial**_, and specify a desired location as shown below.
<p align="center">
<img src="Images/create_new_project.png" align="center" width="800"/>

Once your new project is created and loaded, you will be presented with the Unity Editor interface. From this point on, whenever we refer to _**the editor**_, we mean Unity Editor.
* **Action**: From the top menu bar, open _**Window**_ -> _**Package Manager**_.
* **:green_circle: Action**: From the top menu bar, open _**Window**_ -> _**Package Manager**_.
* **Action**: Click on the _**+**_ sign at the top-left corner of the _**Package Manager**_ window and then choose the option _**Add package from git URL...**_.
* **Action**: Enter the address `com.unity.perception` and click _**Add**_.
* **:green_circle: Action**: Click on the _**+**_ sign at the top-left corner of the _**Package Manager**_ window and then choose the option _**Add package from git URL...**_.
* **:green_circle: Action**: Enter the address `com.unity.perception` and click _**Add**_.
**Note:** If you would like a specific version of the package, you can append the version to the end of the url. For example `com.unity.perception@0.1.0-preview.5`. For this tutorial, **we do not need to add a version**. You can also install the package from a local clone of the Perception repository. More information on installing local packages is available [here](https://docs.unity3d.com/Manual/upm-ui-local.html).
> :information_source: If you would like to install a specific version of the package, you can append the version to the end of the url. For example `com.unity.perception@0.1.0-preview.5`. For this tutorial, **we do not need to add a version**. You can also install the package from a local clone of the Perception repository. More information on installing local packages is available [here](https://docs.unity3d.com/Manual/upm-ui-local.html).
It will take some time for the manager to download and import the package. Once the operation finishes, you will see the newly download Perception package automatically selected in the _**Package Manager**_, as depicted below:
It will take some time for the manager to download and import the package. Once the operation finishes, you will see the newly downloaded Perception package automatically selected in the _**Package Manager**_, as depicted below:
<p align="center">
<img src="Images/package_manager.png" width="600"/>

Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, randomizers, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those that the eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, Randomizer, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those that the eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
* **Action**: In the _**Package Manager**_ window, from the list of _**Samples**_ for the Perception package, click on the _**Import into Project**_ button for the sample named _**Tutorial Files**_.
* **:green_circle: Action**: In the _**Package Manager**_ window, from the list of _**Samples**_ for the Perception package, click on the _**Import into Project**_ button for the sample named _**Tutorial Files**_.
Once the sample files are imported, they will be placed inside the `Assets/Samples/Perception` folder in your Unity project. You can view your project's folder structure and access your files from the _**Project**_ tab of the editor, as seen in the image below:

* **Action**: **(For URP projects only)** The _**Project**_ tab contains a search bar; use it to find the file named `ForwardRenderer.asset`, as shown below:
* **:green_circle: Action**: **(For URP projects only)** The _**Project**_ tab contains a search bar; use it to find the file named `ForwardRenderer.asset`, as shown below:
* **Action**: **(For URP projects only)** Click on the found file to select it. Then, from the _**Inspector**_ tab of the editor, click on the _**Add Renderer Feature**_ button, and select _**Ground Truth Renderer Feature**_ from the dropdown menu:
* **:green_circle: Action**: **(For URP projects only)** Click on the found file to select it. Then, from the _**Inspector**_ tab of the editor, click on the _**Add Renderer Feature**_ button, and select _**Ground Truth Renderer Feature**_ from the dropdown menu:
<p align="center">
<img src="Images/forward_renderer_inspector.png" width="400"/>

### <a name="step-3">Step 3: Setup a Scene for Your Perception Simulation</a>
Simply put, in Unity, Scenes contain any object that exists in the world. This world can be a game, or in this case, a perception-oriented simulation. Every new project contains a Scene named `SampleScene`, which is automatically opened when the project is created. This Scene comes with several objects and settings that we do not need, so let's create a new one.
* **Action**: In the _**Project**_ tab, right-click on the `Assets/Scenes` folder and click _**Create -> Scene**_. Name this new Scene `TutorialScene` and **double-click on it to open it**.
* **:green_circle: Action**: In the _**Project**_ tab, right-click on the `Assets/Scenes` folder and click _**Create -> Scene**_. Name this new Scene `TutorialScene` and **double-click on it to open it**.
The _**Hierarchy**_ tab of the editor displays all the Scenes currently loaded, and all the objects currently present in each loaded Scene, as shown below:
<p align="center">

As seen above, the new Scene already contains a camera (`Main Camera`) and a light (`Directional Light`). We will now modify the camera's field of view and position to prepare it for the tutorial.
* **Action**: Click on `Main Camera` and in the _**Inspector**_ tab, modify the camera's `Position`, `Rotation`, `Projection` and `Size` to match the screenshot below. (Note that `Size` only becomes available once you set `Projection` to `Orthographic`)
* **:green_circle: Action**: Click on `Main Camera` and in the _**Inspector**_ tab, modify the camera's `Position`, `Rotation`, `Projection` and `Size` to match the screenshot below. (Note that `Size` only becomes available once you set `Projection` to `Orthographic`)
<p align="center">
<img src="Images/camera_prep.png"/>

For this tutorial, we prefer our light to not cast any shadows, therefore:
* **Action**: Click on `Directional Light` and in the _**Inspector**_ tab, set `Shadow Type` to `No Shadows`.
* **:green_circle: Action**: Click on `Directional Light` and in the _**Inspector**_ tab, set `Shadow Type` to `No Shadows`.
We will now add the necessary components to the camera in order to equip it for the perception workflow. To do this, we need to add a `Perception Camera` component to it, and then define which types of ground-truth we wish to generate using this camera.
We will now add the necessary components to the camera in order to equip it for the Perception workflow. To do this, we need to add a `Perception Camera` component to it, and then define which types of ground-truth we wish to generate using this camera.
* **Action**: Select `Main Camera` again and in the _**Inspector**_ tab, click on the _**Add Component**_ button.
* **Action**: Start typing `Perception Camera` in the search bar that appears, until the `Perception Camera` script is found, with a **#** icon to the left:
* **:green_circle: Action**: Select `Main Camera` again and in the _**Inspector**_ tab, click on the _**Add Component**_ button.
* **:green_circle: Action**: Start typing `Perception Camera` in the search bar that appears, until the `Perception Camera` script is found, with a **#** icon to the left:
* **Action**: Click on this script to add it as a component. Your camera is now a `Perception` camera.
* **:green_circle: Action**: Click on this script to add it as a component. Your camera is now a `Perception` camera.
**Note:** You may now see a warning regarding asynchronous shader compilation in the UI for the `Perception Camera` component. To fix this issue, from the top menu bar go to _**Edit -> Project Settings… -> Editor**_ and under _**Shader Compilation**_ settings, disable _**Asynchronous Shader Compilation**_.
> :information_source: You may now see a warning regarding asynchronous shader compilation in the UI for the `Perception Camera` component. To fix this issue, from the top menu bar go to _**Edit -> Project Settings… -> Editor**_ and under _**Shader Compilation**_ settings, disable _**Asynchronous Shader Compilation**_.
Adding components is the standard way in which objects can have various kinds of logic and data attached to them in Unity. This includes objects placed within the Scene (called GameObjects), such as the camera above, or objects outside of a Scene, in your project folders (called Prefabs).

As seen in the UI for `Perception Camera`, the list of `Camera Labelers` is currently empty. For each type of ground-truth you wish to generate along-side your captured frames (e.g. 2D bounding boxes around objects), you will need to add a corresponding `Camera Labeler` to this list.
To speed-up your perception workflow, the Perception package comes with five common labelers for object-detection tasks; however, if you are comfortable with code, you can also add your own custom labelers. The labelers that come with the Perception package cover **3D bounding boxes, 2D bounding boxes, object counts, object information (pixel counts and ids), and semantic segmentation images (each object rendered in a unique colour)**. We will use four of these in this tutorial.
To speed-up your workflow, the Perception package comes with five common labelers for object-detection tasks; however, if you are comfortable with code, you can also add your own custom labelers. The labelers that come with the Perception package cover **3D bounding boxes, 2D bounding boxes, object counts, object information (pixel counts and ids), and semantic segmentation images (each object rendered in a unique colour)**. We will use four of these in this tutorial.
* **Action**: Click on the _**+**_ button at the bottom right corner of the empty labeler list and select `BoundingBox2DLabeler`.
* **Action**: Repeat the above step to add `ObjectCountLabeler`, `RenderedObjectInfoLabeler`, `SemanticSegmentationLabeler`.
* **:green_circle: Action**: Click on the _**+**_ button at the bottom right corner of the empty labeler list and select `BoundingBox2DLabeler`.
* **:green_circle: Action**: Repeat the above step to add `ObjectCountLabeler`, `RenderedObjectInfoLabeler`, `SemanticSegmentationLabeler`.
Once you add the labelers, the _**Inspector**_ view of the `Perception Camera` component will look like this:

You will notice each added labeler has a `Label Config` field. By adding a label configuration here you can instruct the labeler to look for certain labels within the scene and ignore the rest. To do that, we should first create label configurations.
* **Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Id Label Config**_.
* **:green_circle: Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Id Label Config**_.
* **Action**: Rename the newly created `IdLabelConfig` asset to `TutorialIdLabelConfig`.
* **:green_circle: Action**: Rename the newly created `IdLabelConfig` asset to `TutorialIdLabelConfig`.
Click on this asset to bring up its _**Inspector**_ view. In there, you can specify the labels that this config will keep track of. You can type in labels, add any labels defined in the project (through being added to prefabs), and import/export this label config as a JSON file. A new label config like this one contains an empty list of labels.

* **Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Semantic Segmentation Label Config**_. Name this asset `TutorialSemanticSegmentationLabelConfig`.
* **:green_circle: Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Semantic Segmentation Label Config**_. Name this asset `TutorialSemanticSegmentationLabelConfig`.
* **Action**: Select the `Main Camera` object from the Scene _**Hierarchy**_, and in the _**Inspector**_ tab, assign the newly created `TutorialIdLabelConfig` to the first three labelers. To do so, you can either drag and drop the former into the corresponding fields for each labeler, or click on the small circular button in front of the `Id Label Config` field, which brings up an asset selection window filtered to only show compatible assets. Assign `TutorialSemanticSegmentationLabelConfig` to the fourth labeler. The `Perception Camera` component will now look like the image below:
* **:green_circle: Action**: Select the `Main Camera` object from the Scene _**Hierarchy**_, and in the _**Inspector**_ tab, assign the newly created `TutorialIdLabelConfig` to the first three labelers. To do so, you can either drag and drop the former into the corresponding fields for each labeler, or click on the small circular button in front of the `Id Label Config` field, which brings up an asset selection window filtered to only show compatible assets. Assign `TutorialSemanticSegmentationLabelConfig` to the fourth labeler. The `Perception Camera` component will now look like the image below:
<p align="center">
<img src="Images/pclabelconfigsadded.png" width="400"/>

In Unity, Prefabs are essentially reusable GameObjects that are stored to disk, along with all their child GameObjects, components, and property values. Let's see what our sample prefabs include.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`
* **Action**: Double click the file named `drink_whippingcream_lucerne.prefab` to open the Prefab asset.
* **:green_circle: Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`
* **:green_circle: Action**: Double click the file named `drink_whippingcream_lucerne.prefab` to open the Prefab asset.
When you open the Prefab asset, you will see the object shown in the Scene tab and its components shown on the right side of the editor, in the _**Inspector**_ tab:

Note that each object can have multiple labels assigned, and thus appear as different objects to labelers with different label configurations. For instance, you may want your semantic segmentation labeler to detect all cream cartons as `dairy_product`, while your bounding box labeler still distinguishes between different types of dairy product. To achieve this, you can add a `dairy_product` label to all your dairy products, and then in your label configuration for semantic segmentation, only add the `dairy_product` label, and not any specific products or brand names.
For this tutorial, we have already added the `Labeling` component to all the foreground Prefabs; however, if you are making your own Prefabs, you can easily add a `Labeling` component to them using the _**Add Component**_ button in the screenshot above.
For this tutorial, we have already prepared the foreground Prefabs for you and added the `Labeling` component to all of them. These Prefabs were based on 3D scans of the actual grocery items. If you are making your own Prefabs, you can easily add a `Labeling` component to them using the _**Add Component**_ button visible in the bottom right corner of the screenshot above.
**Note:** If you would like to start from `.fbx` models, the Perception package lets you quickly create Prefabs from multiple models. Just select all your models and from the top menu bar select _**Assets -> Perception -> Create Prefabs from Selected Models**_. The newly created Prefabs will be placed in the same folders as their corresponding models.
> :information_source: If you are interested in knowing more about the process of creating Unity compatible 3D models for use with the Perception package, you can visit [this page](https://github.com/Unity-Technologies/SynthDet/blob/master/docs/CreatingAssets.md). Once you have 3D models in `.fbx` format, the Perception package lets you quickly create Prefabs from multiple models. Just select all your models and from the top menu bar select _**Assets -> Perception -> Create Prefabs from Selected Models**_. The newly created Prefabs will be placed in the same folders as their corresponding models.
* **Action**: Select **all the files** inside the `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs` folder.
* **Action**: From the _**Inspector**_ tab, enable `Use Automatic Labeling for All Selected Items`, and then select `Use asset name` as the labeling scheme.
* **:green_circle: Action**: Select **all the files** inside the `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs` folder.
* **:green_circle: Action**: From the _**Inspector**_ tab, enable `Use Automatic Labeling for All Selected Items`, and then select `Use asset name` as the labeling scheme.
<p align="center">
<img src="Images/autolabel.png" width="400"/>

* **Action**: Click _**Add Automatic Labels of All Selected Assets to Config...**_.
* **:green_circle: Action**: Click _**Add Automatic Labels of All Selected Assets to Config...**_.
* **Action**: Add the list of labels to `TutorialIdLabelConfig` and `TutorialSemanticSegmentationLabelConfig` by clicking the _**Add All Labels**_ button for both.
* **:green_circle: Action**: Add the list of labels to `TutorialIdLabelConfig` and `TutorialSemanticSegmentationLabelConfig` by clicking the _**Add All Labels**_ button for both.
<p align="center">

<img src="Images/labelconfigs.png" width="800"/>
</p>
**Note:** Since we used automatic labels here and added them to our configurations, we are confident that the labels in the configurations match the labels of our objects. In cases where you decide to add manual labels to objects and configurations, make sure you use the exact same labels, otherwise, the objects for which a matching label is not found in your configurations will not be detected by the labelers that are using those configurations.
> :information_source: Since we used automatic labels here and added them to our configurations, we are confident that the labels in the configurations match the labels of our objects. In cases where you decide to add manual labels to objects and configurations, make sure you use the exact same labels, otherwise, the objects for which a matching label is not found in your configurations will not be detected by the labelers that are using those configurations.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **Action**: Drag and drop any of the Prefabs inside this folder into the Scene.
* **Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
* **:green_circle: Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **:green_circle: Action**: Drag and drop any of the Prefabs inside this folder into the Scene.
* **:green_circle: Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
Since we have visualizations enabled on our `Perception Camera`, you should now see a bounding box being drawn around the object you put in the scene, and the object itself being colored according to its label's color in `TutorialSemanticSegmentationLabelConfig`, similar to the image below:

In this tutorial, you will learn how to use the provided Randomizers, as well as how to create new ones that are custom-fitted to your randomization needs.
* **Action**: Create a new GameObject in your Scene by right-clicking in the _**Hierarchy**_ tab and clicking `Create Empty`.
* **Action**: Rename your new GameObject to `Simulation Scenario`.
* **Action**: In the _**Inspector**_ view of this new object, add a new `Fixed Length Scenario` component.
* **:green_circle: Action**: Create a new GameObject in your Scene by right-clicking in the _**Hierarchy**_ tab and clicking `Create Empty`.
* **:green_circle: Action**: Rename your new GameObject to `Simulation Scenario`.
* **:green_circle: Action**: In the _**Inspector**_ view of this new object, add a new `Fixed Length Scenario` component.
Each `Scenario` executes a number of `Iteration`s, and each Iteration carries on for a number of frames. These are timing elements you can leverage in order to customize your Scenarios and the timing of your randomizations. You will learn how to use Iterations and frames in Phase 2 of this tutorial. For now, we will use the `Fixed Length Scenario`, which is a special kind of Scenario that runs for a fixed number of frames during each Iteration, and is sufficient for many common use-cases. Note that at any given time, you can have only one Scenario active in your Scene.

There are a number of settings and properties you can modify here. `Quit On Complete` instructs the simulation to quit once this Scenario has completed executing. We can see here that the Scenario has been set to run for 100 Iterations, and that each Iteration will run for one frame. But this is currently an empty `Scenario`, so let's add some Randomizers.
* **Action**: Click _**Add Randomizer**_, and from the list choose `BackgroundObjectPlacementRandomizer`.
* **:green_circle: Action**: Click _**Add Randomizer**_, and from the list choose `BackgroundObjectPlacementRandomizer`.
* **Action**: Click _**Add Folder**_, and from the file explorer window that opens, choose the folder `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **:green_circle: Action**: Click _**Add Folder**_, and from the file explorer window that opens, choose the folder `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **Action**: Set the rest of the properties according to the image below. That is, `Depth = 0, Layer Count = 2, Separation Distance = 0.5, Placement Area = (6,6)`.
* **:green_circle: Action**: Set the rest of the properties according to the image below. That is, `Depth = 0, Layer Count = 2, Separation Distance = 0.5, Placement Area = (6,6)`.
<p align="center">
<img src="Images/background_randomizer.png" width = "400"/>

* **Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
* **:green_circle: Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
<p align="center">
<img src="Images/play.png" width = "500"/>

As seen in the image above, what we have now is just a beige-colored wall of shapes. This is because so far, we are only spawning them, and the beige color of our light is what gives them their current look. To make this background more useful, let's add a couple more `Randomizers`.
**Note:** If at this point you don't see any objects being displayed, make sure the Separation Distance for `BackgroundObjectPlacementRandomizer` is (6,6) and not (0,0).
> :information_source: If at this point you don't see any objects being displayed, make sure the Separation Distance for `BackgroundObjectPlacementRandomizer` is (6,6) and not (0,0).
**Note:** If your _**Game**_ tab has a different field of view than the one shown here, change the aspect ratio of your _**Game**_ tab to `4:3`, as shown below:
> :information_source: If your _**Game**_ tab has a different field of view than the one shown here, change the aspect ratio of your _**Game**_ tab to `4:3`, as shown below:
* **Action**: Repeat the previous steps to add `TextureRandomizer`, `HueOffsetRandomizer`, and `RotationRandomizer`.
* **:green_circle: Action**: Repeat the previous steps to add `TextureRandomizer`, `HueOffsetRandomizer`, and `RotationRandomizer`.
* **Action**: In the UI snippet for `TextureRandomizer`, click _**Add Folder**_ and choose `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Textures`.
* **:green_circle: Action**: In the UI snippet for `TextureRandomizer`, click _**Add Folder**_ and choose `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Textures`.
* **Action**: In the UI snippet for `RotationRandomizer`, verify that all the minimum values for the three ranges are `0` and that maximum values are `360`.
* **:green_circle: Action**: In the UI snippet for `RotationRandomizer`, verify that all the minimum values for the three ranges are `0` and that maximum values are `360`.
Your list of Randomizers should now look like the screenshot below:

To make sure each Randomizer knows which objects it should work with, we will use an object tagging and querying workflow that the bundled Randomizers already use. Each Randomizer can query the Scene for objects that carry certain types of `RandomizerTag` components. For instance, the `TextureRandomizer` queries the Scene for objects that have a `TextureRandomizerTag` component (you can change this in code!). Therefore, in order to make sure our background Prefabs are affected by the `TextureRandomizer` we need to make sure they have `TextureRandomizerTag` attached to them.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **Action**: Select all the files inside and from the _**Inspector**_ tab add a `TextureRandomizerTag` to them. This will add the component to all the selected files.
* **Action**: Repeat the above step to add `HueOffsetRandomizerTag` and `RotationRandomizerTag` to all selected Prefabs.
* **:green_circle: Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **:green_circle: Action**: Select all the files inside and from the _**Inspector**_ tab add a `TextureRandomizerTag` to them. This will add the component to all the selected files.
* **:green_circle: Action**: Repeat the above step to add `HueOffsetRandomizerTag` and `RotationRandomizerTag` to all selected Prefabs.
Once the above step is done, the _**Inspector**_ tab for a background Prefab should look like this:

It is now time to spawn and randomize our foreground objects.
* **Action**: Add `ForegroundObjectPlacementRandomizer` to your list of Randomizers. Click _**Add Folder**_ and select `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **Action**: Set these values for the above Randomizer: `Depth = -3, Separation Distance = 1.5, Placement Area = (5,5)`.
* **:green_circle: Action**: Add `ForegroundObjectPlacementRandomizer` to your list of Randomizers. Click _**Add Folder**_ and select `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **:green_circle: Action**: Set these values for the above Randomizer: `Depth = -3, Separation Distance = 1.5, Placement Area = (5,5)`.
* **Action**: From the _**Project**_ tab select all the foreground Prefabs located in `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`, and add a `RotationRandomizerTag` component to them.
* **:green_circle: Action**: From the _**Project**_ tab select all the foreground Prefabs located in `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`, and add a `RotationRandomizerTag` component to them.
* **Action**: Drag `ForegroundObjectPlacementRandomizer` using the striped handle bar (on its left side) and drop it above `RotationRandomizer`.
* **:green_circle: Action**: Drag `ForegroundObjectPlacementRandomizer` using the striped handle bar (on its left side) and drop it above `RotationRandomizer`.
Your full list of Randomizers should now look like the screenshot below:

You are now ready to generate your first dataset. Our current setup will produce 100 frames of annotated captures.
* **Action** Click **▷** (play) again and this time let the simulation finish. This should take only a few seconds.
* **:green_circle: Action** Click **▷** (play) again and this time let the simulation finish. This should take only a few seconds.
While the simulation is running, your _**Game**_ view will quickly generate frames similar to the gif below (note: visualization for `SemanticSegmentationLabeler` is disabled here):

<img src="Images/dataset_written.png"/>
</p>
* **Action**: Navigate to the dataset path addressed in the _**Console**_.
* **:green_circle: Action**: Navigate to the dataset path addressed in the _**Console**_.
- RGB images (raw camera output) (if the `Save Camera Output to Disk` checkmark is enabled on `Perception Camera`)
- RGB images (raw camera output) (if the `Save Camera Output to Disk` check mark is enabled on `Perception Camera`)
* **Action**: To get a quick feel of how the data is stored, open the folder whose name starts with `Dataset`, then open the file named `captures_000.json`. This file contains the output from `BoundingBox2DLabeler`. The `captures` array contains the position and rotation of the sensor (camera), the position and rotation of the ego (sensor group, currently only one), and the annotations made by `BoundingBox2DLabeler` for all visible objects defined in its label configuration. For each visible object, the annotations include:
* **:green_circle: Action**: To get a quick feel of how the data is stored, open the folder whose name starts with `Dataset`, then open the file named `captures_000.json`. This file contains the output from `BoundingBox2DLabeler`. The `captures` array contains the position and rotation of the sensor (camera), the position and rotation of the ego (sensor group, currently only one), and the annotations made by `BoundingBox2DLabeler` for all visible objects defined in its label configuration. For each visible object, the annotations include:
* `label_id`: The numerical id assigned to this object's label in the labeler's label configuration
* `label_name`: The object's label, e.g. `candy_minipralines_lindt`
* `instance_id`: Unique instance id of the object

* **Action**: Review the JSON meta-data and the images captured for the first annotated frame, and verify that the objects within them match.
* **:green_circle: Action**: Review the JSON meta-data and the images captured for the first annotated frame, and verify that the objects within them match.
### <a name="step-8">Step 8: Verify Data Using Dataset Insights</a>

* **Action**: Download and install [Docker Desktop](https://www.docker.com/products/docker-desktop)
* **Action**: Open a command line interface (Command Prompt on Windows, Terminal on Mac OS, etc.) and type the following command to run the Dataset Insights Docker image:
* **:green_circle: Action**: Download and install [Docker Desktop](https://www.docker.com/products/docker-desktop)
* **:green_circle: Action**: Open a command line interface (Command Prompt on Windows, Terminal on Mac OS, etc.) and type the following command to run the Dataset Insights Docker image:
* **Action**: The image is now running on your computer. Open a web browser and navigate to `http://localhost:8888` to open the Jupyter notebook:
* **:green_circle: Action**: The image is now running on your computer. Open a web browser and navigate to `http://localhost:8888` to open the Jupyter notebook:
* **Action**: To make sure your data is properly mounted, navigate to the `data` folder. If you see the dataset's folders there, we are good to go.
* **Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.
* **Action**: Once in the notebook, remove the `/<GUID>` part of the `data_root = /data/<GUID>` path. Since the dataset root is already mapped to `/data`, you can use this path directly.
* **:green_circle: Action**: To make sure your data is properly mounted, navigate to the `data` folder. If you see the dataset's folders there, we are good to go.
* **:green_circle: Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.
* **:green_circle: Action**: Once in the notebook, remove the `/<GUID>` part of the `data_root = /data/<GUID>` path. Since the dataset root is already mapped to `/data`, you can use this path directly.
<p align="center">
<img src="Images/jupyter2.png"/>

</p>
* **Action**: Follow the instructions laid out in the notebook and run each code block to view its outputs.
* **:green_circle: Action**: Follow the instructions laid out in the notebook and run each code block to view its outputs.
This concludes Phase 1 of the Perception tutorial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer. [Click here to continue to Phase 2: Custom Randomizations](Phase2.md)
This concludes Phase 1 of the Perception Tutorial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer.
**[Continue to Phase 2: Custom Randomizations](Phase2.md)**

82
com.unity.perception/Documentation~/Tutorial/Phase2.md


We need to create two C# classes for our light randomization, `MyLightRandomizer` and `MyLightRandomizerTag`. The first of these will sample random values and assign them to the intensity and color of the light, and the second class will be the component that will be added to `Directional Light`, making it a target of `MyLightRandomizer`.
* **Action**: In the _**Project**_ tab, right-click on the `Scripts` folder and select _**Create -> C# Script**_. Name your new script file `MyLightRandomizer.cs`.
* **Action**: Create another script and name it `MyLightRandomizerTag.cs`.
* **Action**: Double-click `MyLightRandomizer.cs` to open it in _**Visual Studio**_.
* **:green_circle: Action**: In the _**Project**_ tab, right-click on the `Scripts` folder and select _**Create -> C# Script**_. Name your new script file `MyLightRandomizer.cs`.
* **:green_circle: Action**: Create another script and name it `MyLightRandomizerTag.cs`.
* **:green_circle: Action**: Double-click `MyLightRandomizer.cs` to open it in _**Visual Studio**_.
* **Action**: Remove the contents of the class and copy/paste the code below:
* **:green_circle: Action**: Remove the contents of the class and copy/paste the code below:
```
```C#
using System;
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Parameters;

protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<MyLightRandomizerTag>();
var tags = tagManager.Query<MyLightRandomizerTag>();
foreach (var taggedObject in taggedObjects)
foreach (var tag in tags)
var light = taggedObject.GetComponent<Light>();
var light = tag.GetComponent<Light>();
light.intensity = lightIntensityParameter.Sample();
}
}

The purpose of this piece of code is to obtain a random float parameter and assign it to the light's `Intensity` field on the start of every Iteration. Let's go through the code above and understand each part. The `FloatParameter` field makes it possible for us to define a randomized float parameter and modify its properties from the editor UI, similar to how we already modified the properties for the previous Randomizers we used.
The purpose of this piece of code is to obtain a random float Parameter and assign it to the light's `Intensity` field on the start of every Iteration. Let's go through the code above and understand each part. The `FloatParameter` field makes it possible for us to define a randomized float Parameter and modify its properties from the editor UI, similar to how we already modified the properties for the previous Randomizers we used.
**Note:** If you look at the _**Console**_ tab of the editor now, you will see an error regarding `MyLightRandomizerTag` not being found. This is to be expected, since we have not yet created this class; the error will go away once we create the class later.
> :information_source: If you look at the _**Console**_ tab of the editor now, you will see an error regarding `MyLightRandomizerTag` not being found. This is to be expected, since we have not yet created this class; the error will go away once we create the class later.
* **Action**: Add `MyLightRandomizer` to the list of Randomizers in `SimulationScenario`.
* **:green_circle: Action**: Add `MyLightRandomizer` to the list of Randomizers in `SimulationScenario`.
You will notice that the Randomizer's UI snippet contains one Parameter named `Light Intensity Parameter`. This is the same Parameter we added in the code block above. Here, you can set the sampling distribution (`Value`) and `Range` for this float Parameter:

* **Action**: In the UI snippet for `MyLightRandomzier`, set the minimum and maximum for range to 0.5 and 3.
* **:green_circle: Action**: In the UI snippet for `MyLightRandomzier`, set the minimum and maximum for range to 0.5 and 3.
This range of intensities is arbitrary but will give us a typically nice lighting without excessive darkness or burnt-out highlights.

* **Action**: Open `MyLightRandomizerTag.cs` and replace its contents with the code below:
* **:green_circle: Action**: Open `MyLightRandomizerTag.cs` and replace its contents with the code below:
```
```C#
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Randomizers;

Notice there is a `RequireComponent(typeof(Light))` line at the top. This line makes it so that you can only add the `MyLightRandomizerTag` component to an object that already has a `Light` component attached. This way, the Randomizers that query for this tag can be confident that the found objects have a `Light` component and can thus be Randomized.
* **Action**: Select `Directional Light` in the Scene's _**Hierarchy**_, and in the _**Inspector**_ tab, add a `My Light Randomizer Tag` component.
* **Action**: Run the simulation again and inspect how `Directional Light` now switches between different intensities. You can pause the simulation and then use the step button (to the right of the pause button) to move the simulation one frame forward and clearly see the varying light intensity
* **:green_circle: Action**: Select `Directional Light` in the Scene's _**Hierarchy**_, and in the _**Inspector**_ tab, add a `My Light Randomizer Tag` component.
* **:green_circle: Action**: Run the simulation again and inspect how `Directional Light` now switches between different intensities. You can pause the simulation and then use the step button (to the right of the pause button) to move the simulation one frame forward and clearly see the varying light intensity
* **Action**: Back inside `MyLightRandomizer.cs`, define a new `ColorRgbParameter`:
* **:green_circle: Action**: Back inside `MyLightRandomizer.cs`, define a new `ColorRgbParameter`:
* **Action**: Inside the code block that intensity was previously applied, add code for sampling color from the above Parameter and applying it:
* **:green_circle: Action**: Inside the code block that intensity was previously applied, add code for sampling color from the above Parameter and applying it:
```
foreach (var taggedObject in taggedObjects)
```C#
foreach (var tag in tags)
var light = taggedObject.GetComponent<Light>();
var light = tag.GetComponent<Light>();
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So, let's increase the minimum a bit to avoid such a scenario.
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the Parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So, let's increase the minimum a bit to avoid such a scenario.
* **Action**: Increase the minimum value for red, green, and blue components to 0.4 (this is an arbitrary number that typically produces good-looking results).
* **:green_circle: Action**: Increase the minimum value for red, green, and blue components to 0.4 (this is an arbitrary number that typically produces good-looking results).
The UI for `My Light Randomizer` should now look like this:

* **Action**: Run the simulation for a few frames to observe the lighting color changing on each iteration.
* **:green_circle: Action**: Run the simulation for a few frames to observe the lighting color changing on each Iteration of the Scenario.
### <a name="step-2">Step 2: Bundle Data and Logic Inside RandomizerTags</a>

Let's try this approach with our `Directional Light` object. We will create a duplicate of this light and then have the two lights use different ranges of intensity while both using the exact same float Parameter from `MyLightRandomizer.cs`.
* **Action**: Right-click on `Directional Light` in the Scene _**Hierarchy**_ and select _**Duplicate**_. The new light will automatically be named `Directional Light (1)`.
* **Action**: Change the Y rotation of `Directional Light (1)` to 60, as shown below:
* **:green_circle: Action**: Right-click on `Directional Light` in the Scene _**Hierarchy**_ and select _**Duplicate**_. The new light will automatically be named `Directional Light (1)`.
* **:green_circle: Action**: Change the Y rotation of `Directional Light (1)` to 60, as shown below:
* **Action**: Change the Y rotation of `Directional Light` to -60.
* **:green_circle: Action**: Change the Y rotation of `Directional Light` to -60.
* **Action**: Open `MyLightRandomizerTag.cs` and modify it to match the code below:
```
* **:green_circle: Action**: Open `MyLightRandomizerTag.cs` and modify it to match the code below:
```C#
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Randomizers;

This component is already added to both our lights. We now need to set our desired minimum and maximum intensities, and this can be done through the _**Inspector**_ view.
* **Action**: Select `Directional Light` and from the **Inspector** UI for the `MyLightRandomizerTag` component, set `Min Intensity` to 0.5 and `Max Intensity` to 3.
* **Action**: Repeat the above step for `Directional Light (1)` and set `Min Intensity` to 0 and `Max Intensity` to 0.4.
* **:green_circle: Action**: Select `Directional Light` and from the **Inspector** UI for the `MyLightRandomizerTag` component, set `Min Intensity` to 0.5 and `Max Intensity` to 3.
* **:green_circle: Action**: Repeat the above step for `Directional Light (1)` and set `Min Intensity` to 0 and `Max Intensity` to 0.4.
* **Action**: Select `SimulationScenario` and from the UI snippet for `My Light Randomizer`, change the range for `Light Intensity Parameter` from (0.5,3.5) to (0,1).
* **:green_circle: Action**: Select `SimulationScenario` and from the UI snippet for `My Light Randomizer`, change the range for `Light Intensity Parameter` from (0.5,3.5) to (0,1).
* **Action**: Open `MyLightRandomizer.cs` and modify it as seen below:
* **:green_circle: Action**: Open `MyLightRandomizer.cs` and modify it as seen below:
```
```C#
using System;
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Parameters;

protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<MyLightRandomizerTag>();
foreach (var taggedObject in taggedObjects)
var tags = tagManager.Query<MyLightRandomizerTag>();
foreach (var tag in tags)
var light = taggedObject.GetComponent<Light>();
light.color = lightColorParameter.Sample();
var tag = taggedObject.GetComponent<MyLightRandomizerTag>();
var light = tag.GetComponent<Light>();
light.color = lightColorParameter.Sample();
tag.SetIntensity(lightIntensityParameter.Sample());
}
}

Notice how we now fetch the `MyLightRandomizerTag` component from the tagged object and use its `SetIntensity` function instead of directly setting the intensity of the `Light` component.
* **Action**: Run your simulation, then pause it. Go to the _**Scene**_ view and inspect the color and intensity of each of the lights. Try turning each on and off to see how they affect the current frame.
* **:green_circle: Action**: Run your simulation, then pause it. Go to the _**Scene**_ view and inspect the color and intensity of each of the lights. Try turning each on and off to see how they affect the current frame.
[Click here to continue to Phase 3: Cloud](Phase3.md)
**[Continue to Phase 3: Cloud](Phase3.md)**

93
com.unity.perception/Documentation~/Tutorial/Phase3.md


In order to use Unity Simulation, you need to first create a Unity account or login with your existing one. Once logged in, you will also need to sign-up for Unity Simulation.
* **Action** Click on the _**Cloud**_ button at the top-right corner of Unity Editor to open the _**Services**_ tab.
* **:green_circle: Action** Click on the _**Cloud**_ button at the top-right corner of Unity Editor to open the _**Services**_ tab.
<p align="center">
<img src="Images/cloud_icon.png" width="400"/>

<img src="Images/signin.png" width="400"/>
</p>
* **Action**: Click _**Sign in...**_ and follow the steps in the window that opens to sign in or create an account.
* **Action**: Sign up for a free trial of Unity Simulation [here](https://unity.com/products/unity-simulation).
* **:green_circle: Action**: Click _**Sign in...**_ and follow the steps in the window that opens to sign in or create an account.
* **:green_circle: Action**: Sign up for a free trial of Unity Simulation [here](https://unity.com/products/unity-simulation).
Unity Simulation is a cloud-based service that makes it possible for you to run hundreds of instances of Unity builds in order to generate massive amounts of data. The Unity Simulation service is billed on a per-usage basis, and the free trial offers up to $100 of free credit per month. In order to access the free trial, you will need to provide credit card information. **This information will be used to charge your account if you exceed the $100 monthly credit.** A list of hourly and daily rates for various computational resources is available in the page where you first register for Unity Simulation.

* **Action**: Return to Unity Editor. In the _**Services**_ tab click _**Select Organization**_ and choose the only available option (which typically has the same name as your Unity username).
* **:green_circle: Action**: Return to Unity Editor. In the _**Services**_ tab click _**Select Organization**_ and choose the only available option (which typically has the same name as your Unity username).
If you have used Unity before, you might have set-up multiple organizations for your account. In that case, choose whichever you would like to associate with this project.

* **Action**: Click _**Create**_ to create a new cloud project and connect your local project to it.
* **:green_circle: Action**: Click _**Create**_ to create a new cloud project and connect your local project to it.
### <a name="step-2">Step 2: Run Project on Unity Simulation</a>

* **Action**: From the _**Inspector**_ view of `Perception Camera`, disable real-time visualizations.
* **:green_circle: Action**: From the _**Inspector**_ view of `Perception Camera`, disable real-time visualizations.
* **Action**: From the top menu bar, open _**Edit -> Project Settings**_.
* **Action**: In the window that opens, navigate to the _**Player**_ tab, find the _**Scripting Backend**_ setting (under _**Other Settings**_), and change it to _**Mono**_:
* **:green_circle: Action**: From the top menu bar, open _**Edit -> Project Settings**_.
* **:green_circle: Action**: In the window that opens, navigate to the _**Player**_ tab, find the _**Scripting Backend**_ setting (under _**Other Settings**_), and change it to _**Mono**_:
* **Action**: Change _**Fullscreen Mode**_ to _**Windowed**_ and set a width and height of 800 by 600.
* **:green_circle: Action**: Change _**Fullscreen Mode**_ to _**Windowed**_ and set a width and height of 800 by 600.
* **Action**: Close _**Project Settings**_.
* **Action**: From the top menu bar, open _**Window -> Run in Unity Simulation**_.
* **:green_circle: Action**: Close _**Project Settings**_.
* **:green_circle: Action**: From the top menu bar, open _**Window -> Run in Unity Simulation**_.
* **Action**: Choose `TutorialScene` (which is the Scene we have been working in) as your _**Main Scene**_ and the `SimulationScenario` object as your _**Scenario**_.
* **:green_circle: Action**: Choose `TutorialScene` (which is the Scene we have been working in) as your _**Main Scene**_ and the `SimulationScenario` object as your _**Scenario**_.
Here, you can also specify a name for the run, the number of iterations the Scenario will execute for, and the number of _**Instances**_ (number of nodes the work will be distributed across) for the run.
Here, you can also specify a name for the run, the number of Iterations the Scenario will execute for, and the number of _**Instances**_ (number of nodes the work will be distributed across) for the run.
* **Action**: Name your run `FirstRun`, set the number of iterations to `1000`, and instances to `20`.
* **Action**: Click _**Build and Run**_.
* **:green_circle: Action**: Name your run `FirstRun`, set the number of Iterations to `1000`, and Instances to `20`.
* **:green_circle: Action**: Click _**Build and Run**_.
Your project will now be built and then uploaded to Unity Simulation. Depending on the upload speed of your internet connection, this might take anywhere from a few seconds to a couple of minutes.
Your project will now be built and then uploaded to Unity Simulation. This may take a few minutes to complete, during which the editor may become frozen; this is normal behaviour.
* **Action**: Once the operation is complete, you can find the **Build ID**, **Run Definition ID**, and **Execution ID** of this Unity Simulation run in the _**Console**_ tab:
* **:green_circle: Action**: Once the operation is complete, you can find the **Build ID**, **Run Definition ID**, and **Execution ID** of this Unity Simulation run in the _**Console**_ tab:
<p align="center">
<img src="Images/build_uploaded.png"/>

To keep track of the progress of your Unity Simulation run, you will need to use Unity Simulation's command-line interface (CLI). Detailed instructions for this CLI are provided [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/blob/master/doc/quickstart.md#download-unity-simulation-quickstart-materials). For the purposes of this tutorial, we will only go through the most essential commands, which will help us know when our Unity Simulation run is complete and where to find the produced dataset.
* **Action**: Download the latest version of `unity_simulation_bundle.zip` from [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/releases).
* **:green_circle: Action**: Download the latest version of `unity_simulation_bundle.zip` from [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/releases).
**Note**: If you are using a MacOS computer, we recommend using the _**curl**_ command from the Terminal to download the file, in order to avoid issues caused by the MacOS Gatekeeper when using the CLI. You can use these commands:
> :information_source: If you are using a MacOS computer, we recommend using the _**curl**_ command from the Terminal to download the file, in order to avoid issues caused by the MacOS Gatekeeper when using the CLI. You can use these commands:
```
curl -Lo ~/Downloads/unity_simulation_bundle.zip <URL-unity_simulation_bundle.zip>
unzip ~/Downloads/unity_simulation_bundle.zip -d ~/Downloads/unity_simulation_bundle

* **Action**: Extract the zip archive you downloaded.
* **Action**: Open a command-line interface (Terminal on MacOS, cmd on Windows, etc.) and navigate to the extracted folder.
* **:green_circle: Action**: Extract the zip archive you downloaded.
* **:green_circle: Action**: Open a command-line interface (Terminal on MacOS, cmd on Windows, etc.) and navigate to the extracted folder.
If you downloaded the zip archive in the default location in your downloads folder, you can use these commands to navigate to it from the command-line:

You will now be using the _**usim**_ executable to interact with Unity Simulation through commands.
* **Action** To see a list of available commands, simply run `usim` once:
* **:green_circle: Action** To see a list of available commands, simply run `usim` once:
MacOS:
`USimCLI/mac/usim`

The first step is to login.
* **Action**: Login to Unity Simulation using the `usim login auth` command.
* **:green_circle: Action**: Login to Unity Simulation using the `usim login auth` command.
MacOS:
`USimCLI/mac/usim login auth`

`Press [ENTER] to open your browser to ...`
* **Action**: Press Enter to open a browser window for logging in.
* **:green_circle: Action**: Press Enter to open a browser window for logging in.
Once you have logged you will see this page:

**Note**: On MacOS, you might get errors related to permissions. In these cases, try running your commands with the `sudo` qualifier. For example:
`sudo USimCLI/mac/usim login auth`. This will ask for your MacOS account's password and should help overcome the permission issues.
> :warning: On MacOS, you might get errors related to permissions. If that is the case, modify the permissions on the `~/.usim` folder and its contents to give your user full read and write permission.
**Note : From this point on we will only include MacOS formatted commands in the tutorial, but all the `usim` commands we use will work in all supported operating systems.**
> :information_source: From this point on we will only include MacOS formatted commands in the tutorial, but all the `usim` commands we use will work in all supported operating systems.**
* **Action**: Return to your command-line interface. Get a list of cloud projects associated with your Unity account using the `usim get projects` command:
* **:green_circle: Action**: Return to your command-line interface. Get a list of cloud projects associated with your Unity account using the `usim get projects` command:
MacOS:
`USimCLI/mac/usim get projects`

SynthDet 9ec23417-73cd-becd-9dd6-556183946153 2020-08-12T19:46:20+00:00
```
In case you have more than one cloud project, you will need to "activate" the one corresponding with your perception tutorial project. If there is only one project, it is already activated, and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
In case you have more than one cloud project, you will need to "activate" the one corresponding with your Perception Tutorial project. If there is only one project, it is already activated, and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
* **Action**: Activate the relevant project:
* **:green_circle: Action**: Activate the relevant project:
MacOS:
`USimCLI/mac/usim activate project <project-id>`

Now that we have made sure the correct project is active, we can get a list of all the current and past runs for the project.
* **Action**: Use the `usim get runs` command to obtain a list of current and past runs:
* **:green_circle: Action**: Use the `usim get runs` command to obtain a list of current and past runs:
MacOS:
`USimCLI/mac/usim get runs`

You may notice that the IDs seen above for the run named `FirstRun` match those we saw earlier in Unity Editor's _**Console**_. You can see here that the single execution for our recently uploaded build is `In_Progress` and that the execution ID is `yegz4WN`.
Unity Simulation utilizes the ability to run simulation instances in parallel. If you enter a number larger than 1 for the number of instances in the _**Run in Unity Simulation**_ window, your run will be parallelized, and multiple simulation instances will simultaneously execute. You can view the status of all simulation instances using the `usim summarize run-execution <execution-id>` command. This command will tell you how many instances have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
Unity Simulation utilizes the ability to run simulation Instances in parallel. If you enter a number larger than 1 for the number of Instances in the _**Run in Unity Simulation**_ window, your run will be parallelized, and multiple simulation Instances will simultaneously execute. You can view the status of all simulation Instances using the `usim summarize run-execution <execution-id>` command. This command will tell you how many Instances have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
* **Action**: Use the `usim summarize run-execution <execution-id>` command to observe the status of your execution nodes:
* **:green_circle: Action**: Use the `usim summarize run-execution <execution-id>` command to observe the status of your execution nodes:
MacOS:
`USimCLI/mac/usim summarize run-execution <execution-id>`

At this point, we will need to wait until the execution is complete. Check your run with the above command periodically until you see a 1 for `Successes` and 0 for `In Progress`.
Given the relatively small size of our Scenario (1,000 Iterations), this should take less than 5 minutes.
* **Action**: Use the `usim summarize run-execution <execution-id>` command periodically to check the progress of your run.
* **Action**: When execution is complete, use the `usim download manifest <execution-id>` command to download the execution's manifest:
* **:green_circle: Action**: Use the `usim summarize run-execution <execution-id>` command periodically to check the progress of your run.
* **:green_circle: Action**: When execution is complete, use the `usim download manifest <execution-id>` command to download the execution's manifest:
MacOS:
`USimCLI/mac/usim download manifest <execution-id>`

* **Action**: Open the manifest file to check it. Make sure there are links to various types of output and check a few of the links to see if they work.
* **:green_circle: Action**: Open the manifest file to check it. Make sure there are links to various types of output and check a few of the links to see if they work.
### <a name="step-4">Step 4: Analyze the Dataset using Dataset Insights</a>

* **Action**: Open the Dataset Insights Jupyter notebook again, using the command below:
* **:green_circle: Action**: Open the Dataset Insights Jupyter notebook again, using the command below:
`docker run -p 8888:8888 -v <download path>/data:/data -t unitytechnologies/datasetinsights:latest`

* **Action**: Open a web browser and navigate to `http://localhost:8888` to open the Jupyter notebook.
* **Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.
* **Action**: In the `data_root = /data/<GUID>` line, the `<GUID>` part will be the location inside your `<download path>` where the data will be downloaded. Therefore, you can just remove it so as to have data downloaded directly to the path you previously specified:
* **:green_circle: Action**: Open a web browser and navigate to `http://localhost:8888` to open the Jupyter notebook.
* **:green_circle: Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.
* **:green_circle: Action**: In the `data_root = /data/<GUID>` line, the `<GUID>` part will be the location inside your `<download path>` where the data will be downloaded. Therefore, you can just remove it so as to have data downloaded directly to the path you previously specified:
<p align="center">
<img src="Images/di_usim_1.png"/>

* **Action**: In the block of code titled "Unity Simulation [Optional]", uncomment the lines that assign values to variables, and insert the correct values, based on information from your Unity Simulation run.
* **:green_circle: Action**: In the block of code titled "Unity Simulation [Optional]", uncomment the lines that assign values to variables, and insert the correct values, based on information from your Unity Simulation run.
* **Action**: Return to your command-line interface and run the `usim inspect auth` command.
* **:green_circle: Action**: Return to your command-line interface and run the `usim inspect auth` command.
MacOS:
`USimCLI/mac/usim inspect auth`

The `access_token` you need for your Dataset Insights notebook is the access token shown by the above command, minus the `'Bearer '` part. So, in this case, we should input `0CfQbhJ6gjYIHjC6BaP5gkYn1x5xtAp7ZA9I003fTNT1sFp` in the notebook.
* **Action**: Copy the access token excluding the `'Bearer '` part to the corresponding field in the Dataset Insights notebook.
* **:green_circle: Action**: Copy the access token excluding the `'Bearer '` part to the corresponding field in the Dataset Insights notebook.
Once you have entered all the information, the block of code should look like the screenshot below (the actual values you input will be different):

* **Action**: Continue to the next code block and run it to download all the metadata files from the generated dataset. This includes JSON files and logs but does not include images (which will be downloaded later).
* **:green_circle: Action**: Continue to the next code block and run it to download all the metadata files from the generated dataset. This includes JSON files and logs but does not include images (which will be downloaded later).
You will see a progress bar while the data downloads:

The next couple of code blocks (under "Load dataset metadata") analyze the downloaded metadata and display a table containing annotation-definition-ids for the various metrics defined in the dataset.
* **Action**: Once you reach the code block titled "Built-in Statistics", make sure the value assigned to the field `rendered_object_info_definition_id` matches the id displayed for this metric in the table output by the code block immediately before it. The screenshot below demonstrates this (note that your ids might differ from the ones here):
* **:green_circle: Action**: Once you reach the code block titled "Built-in Statistics", make sure the value assigned to the field `rendered_object_info_definition_id` matches the id displayed for this metric in the table output by the code block immediately before it. The screenshot below demonstrates this (note that your ids might differ from the ones here):
<p align="center">
<img src="Images/di_usim_4.png"/>

This concludes the Perception tutorial. The next step in this workflow would be to train an object-detection model using a dataset generated on Unity Simulation. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the dataset's statistics. In order to generate data for training, we recommend a dataset of about 400,000 captures.
In the near future, we will expand this tutorial to Phase 4, which will include instructions on how to train a Faster R-CNN object-detection model using a dataset that can be generated by following this tutorial.
This concludes the Perception Tutorial. The next step in this workflow would be to train an object-detection model using a dataset generated on Unity Simulation. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the statistics of the dataset. In order to generate data for training, we recommend a dataset of about 400,000 captures.

6
com.unity.perception/Documentation~/Tutorial/TUTORIAL.md


## [Phase 2: Custom Randomizations](Phase2.md)
In order to get the best out of computer vision models, the training data needs to contain a large degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized randomizer in C# code. Once you complete this phase, you will know how to:
* Create custom randomizers by extending our provided samples.
* Coordinate the operation of several randomizers by specifying their order of execution and the objects they affect.
In order to get the best out of computer vision models, the training data needs to contain a large degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized Randomizer in C# code. Once you complete this phase, you will know how to:
* Create custom Randomizers by extending our provided samples.
* Coordinate the operation of several Randomizers by specifying their order of execution and the objects they affect.
* Have objects specify criteria (e.g. ranges, means, etc.) and logic (e.g. unique behaviors) for their randomizable attributes.
## [Phase 3: Cloud](Phase3.md)

882
com.unity.perception/Documentation~/images/PerceptionCameraFinished.png

之前 之后
宽度: 1222  |  高度: 1482  |  大小: 224 KiB

68
com.unity.perception/Editor/GroundTruth/PerceptionCameraEditor.cs


serializedObject.ApplyModifiedProperties();
}
const string k_FrametimeTitle = "Simulation Delta Time";
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.description)), new GUIContent("Description", "Provide a description for this perception camera (optional)."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.period)), new GUIContent("Capture Interval", "The interval at which the perception camera should render and capture (seconds)."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.startTime)), new GUIContent("Start Time","Time at which this perception camera starts rendering and capturing (seconds)."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.showVisualizations)), new GUIContent("Show Labeler Visualizations", "Display realtime visualizations for labelers that are currently active on this perception camera."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureRgbImages)),new GUIContent("Save Camera Output to Disk", "For each captured frame, save an RGB image of the perception camera's output to disk."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.description)), new GUIContent("Description", "Provide a description for this camera (optional)."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.showVisualizations)), new GUIContent("Show Labeler Visualizations", "Display realtime visualizations for labelers that are currently active on this camera."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureRgbImages)),new GUIContent("Save Camera RGB Output to Disk", "For each captured frame, save an RGB image of the camera's output to disk."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.captureTriggerMode)),new GUIContent("Capture Trigger Mode", $"The method of triggering captures for this camera. In {nameof(CaptureTriggerMode.Scheduled)} mode, captures happen automatically based on a start frame and frame delta time. In {nameof(CaptureTriggerMode.Manual)} mode, captures should be triggered manually through calling the {nameof(perceptionCamera.RequestCapture)} method of {nameof(PerceptionCamera)}."));
GUILayout.Space(5);
if (perceptionCamera.captureTriggerMode.Equals(CaptureTriggerMode.Scheduled))
{
GUILayout.BeginVertical("TextArea");
EditorGUILayout.LabelField("Scheduled Capture Properties", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.simulationDeltaTime)),new GUIContent(k_FrametimeTitle, $"Sets Unity's Time.{nameof(Time.captureDeltaTime)} to the specified number, causing a fixed number of frames to be simulated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. Thus, simulation time and real time will not be synchronized."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.firstCaptureFrame)), new GUIContent("Start at Frame",$"Frame number at which this camera starts capturing."));
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.framesBetweenCaptures)),new GUIContent("Frames Between Captures", "The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame."));
var interval = (perceptionCamera.framesBetweenCaptures + 1) * perceptionCamera.simulationDeltaTime;
var startTime = perceptionCamera.simulationDeltaTime * perceptionCamera.firstCaptureFrame;
EditorGUILayout.HelpBox($"First capture at {startTime} seconds and consecutive captures every {interval} seconds of simulation time.", MessageType.None);
GUILayout.EndVertical();
}
else
{
GUILayout.BeginVertical("TextArea");
EditorGUILayout.LabelField("Manual Capture Properties", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.manualSensorAffectSimulationTiming)),new GUIContent("Affect Simulation Timing", $"Have this camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time."));
if (perceptionCamera.manualSensorAffectSimulationTiming)
{
EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(perceptionCamera.simulationDeltaTime)),new GUIContent(k_FrametimeTitle, $"Sets Unity's Time.{nameof(Time.captureDeltaTime)} to the specified number, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. Thus, simulation time and real time will not be synchronized."));
}
EditorGUILayout.HelpBox($"Captures should be triggered manually through calling the {nameof(perceptionCamera.RequestCapture)} method of {nameof(PerceptionCamera)}.", MessageType.None);
GUILayout.EndVertical();
}
//EditorGUILayout.PropertyField(serializedObject.FindProperty(nameof(PerceptionCamera.labelers)));
GUILayout.Space(15);
var dir = PlayerPrefs.GetString(SimulationState.latestOutputDirectoryKey, string.Empty);
if (dir != string.Empty)
{
EditorGUILayout.LabelField("Latest Output Folder");
GUILayout.BeginVertical("TextArea");
EditorGUILayout.HelpBox(dir, MessageType.None);
GUILayout.BeginHorizontal();
if (GUILayout.Button("Show Folder"))
{
EditorUtility.RevealInFinder(dir);
}
if (GUILayout.Button("Copy Path"))
{
GUIUtility.systemCopyBuffer = dir;
}
GUILayout.EndHorizontal();
GUILayout.EndVertical();
}
if (EditorSettings.asyncShaderCompilation)
{
EditorGUILayout.HelpBox("Asynchronous shader compilation may result in invalid data in beginning frames. " +

24
com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs


async void RunInUnitySimulation()
{
ValidateSettings();
CreateLinuxBuildAndZip();
await StartUnitySimulationRun();
var runGuid = Guid.NewGuid();
PerceptionEditorAnalytics.ReportRunInUnitySimulationStarted(
runGuid,
m_TotalIterationsField.value,
m_InstanceCountField.value,
existingBuildId: null);
try
{
ValidateSettings();
CreateLinuxBuildAndZip();
await StartUnitySimulationRun(runGuid);
}
catch (Exception e)
{
PerceptionEditorAnalytics.ReportRunInUnitySimulationFailed(runGuid, e.Message);
throw;
}
}
void ValidateSettings()

return appParamIds;
}
async Task StartUnitySimulationRun()
async Task StartUnitySimulationRun(Guid runGuid)
{
m_RunButton.SetEnabled(false);
var cancellationTokenSource = new CancellationTokenSource();

cancellationTokenSource.Dispose();
Debug.Log($"Executing run: {run.executionId}");
m_RunButton.SetEnabled(true);
PerceptionEditorAnalytics.ReportRunInUnitySimulationSucceeded(runGuid, run.executionId);
}
}
}

43
com.unity.perception/Editor/Randomization/Editors/ScenarioBaseEditor.cs


using UnityEditor;
using System.Collections;
using System.Linq;
using UnityEditor;
using UnityEditor.UIElements;
using UnityEngine.Experimental.Perception.Randomization.Scenarios;
using UnityEngine.Experimental.Perception.Randomization.VisualElements;

[CustomEditor(typeof(ScenarioBase), true)]
class ScenarioBaseEditor : UnityEditor.Editor
{
bool m_HasConstantsField;
VisualElement m_ConstantsContainer;
VisualElement m_ConstantsListVisualContainer;
SerializedProperty m_ConstantsProperty;
public override VisualElement CreateInspectorGUI()
{

{
m_InspectorPropertiesContainer = m_Root.Q<VisualElement>("inspector-properties");
m_InspectorPropertiesContainer.Clear();
m_ConstantsListVisualContainer = m_Root.Q<VisualElement>("constants-list");
m_ConstantsListVisualContainer.Clear();
m_HasConstantsField = false;
if (iterator.NextVisible(true))
{
do

case "m_Script":
break;
case "constants":
m_ConstantsProperty = iterator.Copy();
break;
break;
case "constants":
m_HasConstantsField = true;
CreateConstantsFieldsWithToolTips(iterator.Copy());
break;
default:
{

void CheckIfConstantsExist()
{
m_ConstantsContainer = m_Root.Q<VisualElement>("constants-container");
if (m_ConstantsProperty == null)
m_ConstantsListVisualContainer = m_Root.Q<VisualElement>("constants-container");
if (!m_HasConstantsField)
m_ConstantsContainer.style.display = new StyleEnum<DisplayStyle>(DisplayStyle.None);
m_ConstantsListVisualContainer.style.display = new StyleEnum<DisplayStyle>(DisplayStyle.None);
}
void CreateConstantsFieldsWithToolTips(SerializedProperty constantsProperty)
{
constantsProperty.NextVisible(true);
do
{
var constantsPropertyField = new PropertyField(constantsProperty.Copy());
constantsPropertyField.Bind(m_SerializedObject);
var originalField = target.GetType().GetField("constants").FieldType.GetField(constantsProperty.name);
var tooltipAttribute = originalField.GetCustomAttributes(true).ToList().Find(att => att.GetType() == typeof(TooltipAttribute));
if (tooltipAttribute != null)
constantsPropertyField.tooltip = (tooltipAttribute as TooltipAttribute)?.tooltip;
m_ConstantsListVisualContainer.Add(constantsPropertyField);
} while (constantsProperty.NextVisible(true));
}
}
}

12
com.unity.perception/Editor/Randomization/Uss/Styles.uss


background-image: resource("Packages/com.unity.perception/Editor/Icons/ChevronRight.png");
}
/* Scenario classes */
.scenario__info-box {
border-width: 1px;

white-space: normal;
margin-top: 4px;
margin-bottom: 4px;
margin: 3px 3px 3px 3px;
}
.scenario__dark-viewport {

}
.scenario__title-label {
-unity-font-style: bold;
margin: 3px 3px 3px 3px;
color: #CACACA;
}
/* Randomizer classes */

min-width: auto;
margin-right: 4px;
}

18
com.unity.perception/Editor/Randomization/Uxml/Randomizer/RandomizerList.uxml


<UXML xmlns="UnityEngine.UIElements">
<VisualElement name="randomizers-container" class="scenario__dark-viewport" style="margin-top: 6px; min-height: 100px;"/>
<VisualElement style="flex-direction: row; align-items: center; justify-content: center; margin-top: 4px;">
<Button name="add-randomizer-button" text="Add Randomizer"/>
<Button name="expand-all" text="Expand All"/>
<Button name="collapse-all" text="Collapse All"/>
<VisualElement style="min-height: 132px;">
<VisualElement class="scenario__dark-viewport">
<TextElement text="Randomizers" class="scenario__title-label"/>
<TextElement
class="scenario__info-box"
text="Randomizers are executed in the order below. You can change the order by dragging Randomizers up or down using the handle bar to their left."/>
<VisualElement name="randomizers-container" style="margin-top: 3px; min-height: 100px;"/>
<VisualElement style="flex-direction: row; align-items: center; justify-content: center; margin-top: 2px;">
<Button name="add-randomizer-button" text="Add Randomizer"/>
<Button name="expand-all" text="Expand All"/>
<Button name="collapse-all" text="Collapse All"/>
</VisualElement>
</VisualElement>
</VisualElement>
</UXML>

17
com.unity.perception/Editor/Randomization/Uxml/ScenarioBaseElement.uxml


<UXML xmlns="UnityEngine.UIElements" xmlns:editor="UnityEditor.UIElements">
<VisualElement>
<Style src="../Uss/Styles.uss"/>
<TextElement
class="scenario__info-box"
text="Scenarios control the execution flow of your simulation by applying randomization parameters. Make sure to always have only one scenario active within your scene."/>
<VisualElement class="scenario__dark-viewport" style="padding-left: 16px">
<Toggle label="Quit On Complete" tooltip="Quit the application when the scenario completes" binding-path="quitOnComplete"/>
<VisualElement class="scenario__dark-viewport" >
<TextElement text="Scenario Properties" class="scenario__title-label"/>
<TextElement
class="scenario__info-box"
text="Scenarios control the execution flow of your simulation by applying randomization parameters. Make sure to always have only one scenario active within your scene."/>
<Toggle label="Quit On Complete" tooltip="Quit the application when the scenario completes" binding-path="quitOnComplete" style="margin-top:5px"/>
<editor:PropertyField
binding-path="constants"
tooltip="A list of parameters for this scenario that will be JSON serialized."/>
<Foldout style="padding-left: 16px" text="Constants" name="constants-list" tooltip="A list of parameters for this scenario that will be JSON serialized in the configuration file."/>
<editor:PropertyField name="configuration-file-name" label="Constants File Name" binding-path="serializedConstantsFileName"/>
<VisualElement style="flex-direction: row;">
<Button name="serialize" text="Serialize To Config File" style="flex-grow: 1;"

</VisualElement>
</VisualElement>
</VisualElement>
<VisualElement name="randomizer-list-placeholder"/>
<VisualElement name="randomizer-list-placeholder" style = "margin-top: 10px"/>
</VisualElement>
</UXML>

7
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/AddRandomizerMenu.cs


{
var rootList = new List<MenuItem>();
m_MenuItemsMap.Add(string.Empty, rootList);
var randomizerTypeSet = new HashSet<Type>();
foreach (var randomizer in m_RandomizerList.scenario.m_Randomizers)
randomizerTypeSet.Add(randomizer.GetType());
if (m_RandomizerList.randomizerTypeSet.Contains(randomizerType))
if (randomizerTypeSet.Contains(randomizerType))
continue;
var menuAttribute = (AddRandomizerMenuAttribute)Attribute.GetCustomAttribute(randomizerType, typeof(AddRandomizerMenuAttribute));
if (menuAttribute != null)

7
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs


{
SerializedProperty m_Property;
VisualElement m_Container;
ToolbarMenu m_AddRandomizerMenu;
public HashSet<Type> randomizerTypeSet = new HashSet<Type>();
ScenarioBase scenario => (ScenarioBase)m_Property.serializedObject.targetObject;
public ScenarioBase scenario => (ScenarioBase)m_Property.serializedObject.targetObject;
VisualElement inspectorContainer
{

m_Container.Clear();
for (var i = 0; i < m_Property.arraySize; i++)
m_Container.Add(new RandomizerElement(m_Property.GetArrayElementAtIndex(i), this));
randomizerTypeSet.Clear();
foreach (var randomizer in scenario.randomizers)
randomizerTypeSet.Add(randomizer.GetType());
}
public void AddRandomizer(Type randomizerType)

30
com.unity.perception/Editor/Randomization/VisualElements/Sampler/SamplerElement.cs


using System;
using System.Linq;
using UnityEditor;
using UnityEditor.UIElements;
using UnityEngine.Experimental.Perception.Randomization.Parameters;

void CreateSampler(Type samplerType)
{
var newSampler = (ISampler)Activator.CreateInstance(samplerType);
CopyFloatRangeToNewSampler(newSampler);
m_Sampler = newSampler;
m_Property.managedReferenceValue = newSampler;
m_Property.serializedObject.ApplyModifiedProperties();
}
if (m_RangeProperty != null)
newSampler.range = new FloatRange(
m_RangeProperty.FindPropertyRelative("minimum").floatValue,
m_RangeProperty.FindPropertyRelative("maximum").floatValue);
void CopyFloatRangeToNewSampler(ISampler newSampler)
{
if (m_RangeProperty == null)
return;
var rangeField = newSampler.GetType().GetField(m_RangeProperty.name);
if (rangeField == null)
return;
m_Sampler = newSampler;
m_Property.managedReferenceValue = newSampler;
m_Property.serializedObject.ApplyModifiedProperties();
var range = new FloatRange(
m_RangeProperty.FindPropertyRelative("minimum").floatValue,
m_RangeProperty.FindPropertyRelative("maximum").floatValue);
rangeField.SetValue(newSampler, range);
}
void CreatePropertyFields()

{
var propertyField = new PropertyField(currentProperty.Copy());
propertyField.Bind(m_Property.serializedObject);
var originalField = m_Sampler.GetType().GetField(currentProperty.name);
var tooltipAttribute = originalField.GetCustomAttributes(true).ToList().Find(att => att.GetType() == typeof(TooltipAttribute));
if (tooltipAttribute != null)
{
propertyField.tooltip = (tooltipAttribute as TooltipAttribute)?.tooltip;
}
m_Properties.Add(propertyField);
}
}

35
com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs


/// <param name="egoHandle">The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.</param>
/// <param name="modality">The kind of the sensor (ex. "camera", "lidar")</param>
/// <param name="description">A human-readable description of the sensor (ex. "front-left rgb camera")</param>
/// <param name="period">The period, in seconds, on which the sensor should capture. Frames will be scheduled in the simulation such that each sensor is triggered every _period_ seconds.</param>
/// <param name="firstCaptureTime">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
/// <param name="firstCaptureFrame">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
/// <param name="captureTriggerMode">The method of triggering captures for this sensor.</param>
/// <param name="simulationDeltaTime">The simulation frame time (seconds) requested by this sensor.</param>
/// <param name="framesBetweenCaptures">The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame.</param>
/// <param name="manualSensorAffectSimulationTiming">Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time</param>
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime)
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float simulationDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming = false)
SimulationState.AddSensor(egoHandle, modality, description, period, firstCaptureTime, sensor);
SimulationState.AddSensor(egoHandle, modality, description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming, sensor);
return sensor;
}

}
}
/// <summary>
/// Capture trigger modes for sensors.
/// </summary>
public enum CaptureTriggerMode
{
/// <summary>
/// Captures happen automatically based on a start frame and frame delta time.
/// </summary>
Scheduled,
/// <summary>
/// Captures should be triggered manually through calling the manual capture method of the sensor using this trigger mode.
/// </summary>
Manual
}
/// <summary>
/// A handle to a sensor managed by the <see cref="DatasetCapture"/>. It can be used to check whether the sensor
/// is expected to capture this frame and report captures, annotations, and metrics regarding the sensor.

/// they should capture during the frame. Captures should only be reported when this is true.
/// </summary>
public bool ShouldCaptureThisFrame => DatasetCapture.SimulationState.ShouldCaptureThisFrame(this);
/// <summary>
/// Requests a capture from this sensor on the next rendered frame. Can only be used with manual capture mode (<see cref="PerceptionCamera.CaptureTriggerMode.Manual"/>).
/// </summary>
public void RequestCapture()
{
DatasetCapture.SimulationState.SetNextCaptureTimeToNowForSensor(this);
}
/// <summary>
/// Report a metric regarding this sensor in the current frame.

6
com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs


const string k_SemanticSegmentationDirectory = "SemanticSegmentation";
const string k_SegmentationFilePrefix = "segmentation_";
internal string m_SemanticSegmentationDirectory;
/// <summary>
/// The id to associate with semantic segmentation annotations in the dataset.

targetTexture.Create();
targetTexture.name = "Labeling";
m_SemanticSegmentationDirectory = k_SemanticSegmentationDirectory + Guid.NewGuid();
#if HDRP_PRESENT
var gameObject = perceptionCamera.gameObject;

if (!m_AsyncAnnotations.TryGetValue(frameCount, out var annotation))
return;
var datasetRelativePath = $"{k_SemanticSegmentationDirectory}/{k_SegmentationFilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(k_SemanticSegmentationDirectory)}/{k_SegmentationFilePrefix}{frameCount}.png";
var datasetRelativePath = $"{m_SemanticSegmentationDirectory}/{k_SegmentationFilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(m_SemanticSegmentationDirectory)}/{k_SegmentationFilePrefix}{frameCount}.png";
annotation.ReportFile(datasetRelativePath);

2
com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/Materials/OutlineMaterial.mat


m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 2
version: 4
--- !u!21 &2100000
Material:
serializedVersion: 6

53
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


/// A human-readable description of the camera.
/// </summary>
public string description;
/// <summary>
/// The interval in seconds at which the camera should render and capture.
/// </summary>
public float period = .0166f;
/// <summary>
/// The start time in seconds of the first frame in the simulation.
/// </summary>
public float startTime;
/// <summary>
/// Whether camera output should be captured to disk
/// </summary>

public Camera attachedCamera => m_AttachedCamera;
/// <summary>
/// Frame number at which this camera starts capturing.
/// </summary>
public int firstCaptureFrame = 0;
/// <summary>
/// The method of triggering captures for this camera.
/// </summary>
public CaptureTriggerMode captureTriggerMode = CaptureTriggerMode.Scheduled;
/// <summary>
/// Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time
/// </summary>
public bool manualSensorAffectSimulationTiming = false;
/// <summary>
/// The simulation frame time (seconds) for this camera. E.g. 0.0166 translates to 60 frames per second. This will be used as Unity's <see cref="Time.captureDeltaTime"/>, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware.
/// </summary>
public float simulationDeltaTime = 0.0166f;
/// <summary>
/// The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame.
/// </summary>
public int framesBetweenCaptures = 0;
/// <summary>
/// Requests a capture from this camera on the next rendered frame. Can only be used when using <see cref="PerceptionCamera.CaptureTriggerMode.Manual"/> capture mode.
/// </summary>
public void RequestCapture()
{
if (captureTriggerMode.Equals(CaptureTriggerMode.Manual))
{
SensorHandle.RequestCapture();
}
else
{
Debug.LogError($"{nameof(RequestCapture)} can only be used if the camera is in {nameof(CaptureTriggerMode.Manual)} capture mode.");
}
}
/// <summary>
/// Event invoked after the camera finishes rendering during a frame.
/// </summary>

{
m_EgoMarker = GetComponentInParent<Ego>();
var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime);
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming);
}
}

EnsureSensorRegistered();
if (!SensorHandle.IsValid)
return;
m_AttachedCamera.enabled = SensorHandle.ShouldCaptureThisFrame;
bool anyVisualizing = false;
foreach (var labeler in m_Labelers)

4
com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs


protected override void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult)
{
if (s_LastFrameExecuted == Time.frameCount)
{
Debug.LogError("Semantic segmentation was run twice in the same frame. Multiple semantic segmentations are not currently supported.");
}
return;
s_LastFrameExecuted = Time.frameCount;
var renderList = CreateRendererListDesc(camera, cullingResult, "FirstPass", 0, m_OverrideMaterial, -1);

105
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


float m_LastTimeScale;
readonly string m_OutputDirectoryName;
string m_OutputDirectoryPath;
public const string latestOutputDirectoryKey = "latestOutputDirectory";
JsonSerializer m_AnnotationSerializer;
public bool IsRunning { get; private set; }

}
//A sensor will be triggered if sequenceTime is within includeThreshold seconds of the next trigger
const float k_IncludeInFrameThreshold = .01f;
const float k_SimulationTimingAccuracy = 0.01f;
const int k_MaxDeltaTime = 10;
const float k_MaxDeltaTime = 100f;
public SimulationState(string outputDirectory)
{

m_OutputDirectoryName = outputDirectory;
PlayerPrefs.SetString(latestOutputDirectoryKey, Manager.Instance.GetDirectoryFor("",""));
IsRunning = true;
}

{
public string modality;
public string description;
public float period;
public CaptureTriggerMode captureTriggerMode;
public float renderingDeltaTime;
public int framesBetweenCaptures;
public bool manualSensorAffectSimulationTiming;
public float sequenceTimeNextCapture;
public float sequenceTimeOfNextCapture;
public float sequenceTimeOfNextRender;
public int lastCaptureFrameCount;
public EgoHandle egoHandle;
}

foreach (var kvp in m_Sensors.ToArray())
{
var sensorData = kvp.Value;
sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = 0;
m_Sensors[kvp.Key] = sensorData;
}

m_LastTimeScale = Time.timeScale;
}
public void AddSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime, SensorHandle sensor)
public void AddSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float renderingDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming, SensorHandle sensor)
period = period,
firstCaptureTime = firstCaptureTime,
firstCaptureTime = firstCaptureFrame * renderingDeltaTime,
captureTriggerMode = captureTriggerMode,
renderingDeltaTime = renderingDeltaTime,
framesBetweenCaptures = framesBetweenCaptures,
manualSensorAffectSimulationTiming = manualSensorAffectSimulationTiming,
sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = 0;
float SequenceTimeOfNextCapture(SensorData sensorData)
float GetSequenceTimeOfNextCapture(SensorData sensorData)
return sensorData.firstCaptureTime;
return sensorData.sequenceTimeNextCapture;
{
return sensorData.captureTriggerMode == CaptureTriggerMode.Scheduled? sensorData.firstCaptureTime : float.MaxValue;
}
return sensorData.sequenceTimeOfNextCapture;
}
public bool Contains(Guid id) => m_Ids.Contains(id);

m_HasStarted = true;
}
EnsureSequenceTimingsUpdated();
if (!activeSensor.ShouldCaptureThisFrame)
continue;
if (Mathf.Abs(sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime) < k_SimulationTimingAccuracy)
{
//means this frame fulfills this sensor's simulation time requirements, we can move target to next frame.
sensorData.sequenceTimeOfNextRender += sensorData.renderingDeltaTime;
}
if (activeSensor.ShouldCaptureThisFrame)
{
if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Scheduled))
{
sensorData.sequenceTimeOfNextCapture += sensorData.renderingDeltaTime * (sensorData.framesBetweenCaptures + 1);
Debug.Assert(sensorData.sequenceTimeOfNextCapture > UnscaledSequenceTime,
$"Next scheduled capture should be after {UnscaledSequenceTime} but is {sensorData.sequenceTimeOfNextCapture}");
}
else if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Manual))
{
sensorData.sequenceTimeOfNextCapture = float.MaxValue;
}
// TODO: AISV-845 This is an errant modification of this record that can lead to undefined behavior
// Leaving as-is for now because too many components depend on this logic
sensorData.sequenceTimeNextCapture += sensorData.period;
Debug.Assert(sensorData.sequenceTimeNextCapture > UnscaledSequenceTime,
$"Next scheduled capture should be after {UnscaledSequenceTime} but is {sensorData.sequenceTimeNextCapture}");
// sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
sensorData.lastCaptureFrameCount = Time.frameCount;
sensorData.lastCaptureFrameCount = Time.frameCount;
}
m_Sensors[activeSensor] = sensorData;
}

{
float thisSensorNextFrameDt = -1;
var thisSensorNextFrameDt = sensorData.sequenceTimeNextCapture - UnscaledSequenceTime;
if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Scheduled))
{
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to run in the past but got skipped over.");
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to capture in the past but got skipped over.");
}
else if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Manual) && sensorData.manualSensorAffectSimulationTiming)
{
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
}
if (Math.Abs(nextFrameDt - k_MaxDeltaTime) < 0.0001)
{
//means no sensor is controlling simulation timing, so we set Time.captureDeltaTime to 0 (default) which means the setting does not do anything
nextFrameDt = 0;
}
WritePendingCaptures();
WritePendingMetrics();

public void SetNextCaptureTimeToNowForSensor(SensorHandle sensorHandle)
{
if (!m_Sensors.ContainsKey(sensorHandle))
return;
var data = m_Sensors[sensorHandle];
data.sequenceTimeOfNextCapture = UnscaledSequenceTime;
m_Sensors[sensorHandle] = data;
}
public bool ShouldCaptureThisFrame(SensorHandle sensorHandle)
{
if (!m_Sensors.ContainsKey(sensorHandle))

if (data.lastCaptureFrameCount == Time.frameCount)
return true;
return (data.sequenceTimeNextCapture - UnscaledSequenceTime) < k_IncludeInFrameThreshold;
return data.sequenceTimeOfNextCapture - UnscaledSequenceTime < k_SimulationTimingAccuracy;
}
public void End()

11
com.unity.perception/Runtime/Randomization/Parameters/NumericParameter.cs


public abstract T Sample();
/// <summary>
/// Schedules a job to generate an array of parameter samples.
/// Call Complete() on the JobHandle returned by this function to wait on the job generating the parameter samples.
/// </summary>
/// <param name="sampleCount">Number of parameter samples to generate</param>
/// <param name="jobHandle">The JobHandle returned from scheduling the sampling job</param>
/// <returns>A NativeArray containing generated samples</returns>
public abstract NativeArray<T> Samples(int sampleCount, out JobHandle jobHandle);
/// <summary>
/// Generates a generic sample
/// </summary>
/// <returns>The generated sample</returns>

{
base.Validate();
foreach (var sampler in samplers)
sampler.range.Validate();
sampler.Validate();
}
}
}

33
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/BooleanParameter.cs


{
return Sample(value.Sample());
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<bool> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<bool>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var rngSamples = value.Samples(sampleCount, out jobHandle);
jobHandle = new SamplesJob
{
rngSamples = rngSamples,
samples = samples,
threshold = threshold
}.Schedule(jobHandle);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> rngSamples;
public NativeArray<bool> samples;
public float threshold;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = rngSamples[i] >= threshold;
}
}
}
}

111
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters/ColorHsvaParameter.cs


{
return new ColorHsva(hue.Sample(), saturation.Sample(), value.Sample(), alpha.Sample());
}
/// <summary>
/// Schedules a job to generate an array of RGBA color samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<Color> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<Color>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var hueRng = hue.Samples(sampleCount, out var hueHandle);
var satRng = saturation.Samples(sampleCount, out var satHandle);
var valRng = value.Samples(sampleCount, out var valHandle);
var alphaRng = alpha.Samples(sampleCount, out var alphaHandle);
var handles = new NativeArray<JobHandle>(4, Allocator.TempJob)
{
[0] = hueHandle,
[1] = satHandle,
[2] = valHandle,
[3] = alphaHandle
};
var combinedJobHandles = JobHandle.CombineDependencies(handles);
jobHandle = new SamplesJob
{
hueRng = hueRng,
satRng = satRng,
valRng = valRng,
alphaRng = alphaRng,
samples = samples
}.Schedule(combinedJobHandles);
handles.Dispose(jobHandle);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> hueRng;
[DeallocateOnJobCompletion] public NativeArray<float> satRng;
[DeallocateOnJobCompletion] public NativeArray<float> valRng;
[DeallocateOnJobCompletion] public NativeArray<float> alphaRng;
public NativeArray<Color> samples;
static Color CreateColorHsva(float h, float s, float v, float a)
{
var color = Color.HSVToRGB(h, s, v);
color.a = a;
return color;
}
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = CreateColorHsva(hueRng[i], satRng[i], valRng[i], alphaRng[i]);
}
}
/// <summary>
/// Schedules a job to generate an array of HSVA color samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public NativeArray<ColorHsva> SamplesHsva(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<ColorHsva>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var hueRng = hue.Samples(sampleCount, out var hueHandle);
var satRng = saturation.Samples(sampleCount, out var satHandle);
var valRng = value.Samples(sampleCount, out var valHandle);
var alphaRng = alpha.Samples(sampleCount, out var alphaHandle);
var handles = new NativeArray<JobHandle>(4, Allocator.TempJob)
{
[0] = hueHandle,
[1] = satHandle,
[2] = valHandle,
[3] = alphaHandle
};
var combinedJobHandles = JobHandle.CombineDependencies(handles);
jobHandle = new SamplesHsvaJob
{
hueRng = hueRng,
satRng = satRng,
valRng = valRng,
alphaRng = alphaRng,
samples = samples
}.Schedule(combinedJobHandles);
handles.Dispose(jobHandle);
return samples;
}
[BurstCompile]
struct SamplesHsvaJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> hueRng;
[DeallocateOnJobCompletion] public NativeArray<float> satRng;
[DeallocateOnJobCompletion] public NativeArray<float> valRng;
[DeallocateOnJobCompletion] public NativeArray<float> alphaRng;
public NativeArray<ColorHsva> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = new ColorHsva(hueRng[i], satRng[i], valRng[i], alphaRng[i]);
}
}
}
}

52
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters/ColorRgbParameter.cs


{
return new Color(red.Sample(), green.Sample(), blue.Sample(), alpha.Sample());
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<Color> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<Color>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var redRng = red.Samples(sampleCount, out var redHandle);
var greenRng = green.Samples(sampleCount, out var greenHandle);
var blueRng = blue.Samples(sampleCount, out var blueHandle);
var alphaRng = alpha.Samples(sampleCount, out var alphaHandle);
var handles = new NativeArray<JobHandle>(4, Allocator.TempJob)
{
[0] = redHandle,
[1] = greenHandle,
[2] = blueHandle,
[3] = alphaHandle
};
var combinedJobHandles = JobHandle.CombineDependencies(handles);
jobHandle = new SamplesJob
{
redRng = redRng,
greenRng = greenRng,
blueRng = blueRng,
alphaRng = alphaRng,
samples = samples
}.Schedule(combinedJobHandles);
handles.Dispose(jobHandle);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> redRng;
[DeallocateOnJobCompletion] public NativeArray<float> greenRng;
[DeallocateOnJobCompletion] public NativeArray<float> blueRng;
[DeallocateOnJobCompletion] public NativeArray<float> alphaRng;
public NativeArray<Color> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = new Color(redRng[i], greenRng[i], blueRng[i], alphaRng[i]);
}
}
}
}

11
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/FloatParameter.cs


{
return value.Sample();
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<float> Samples(int sampleCount, out JobHandle jobHandle)
{
return value.Samples(sampleCount, out jobHandle);
}
}
}

31
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/IntegerParameter.cs


/// </summary>
/// <returns>The generated sample</returns>
public override int Sample() => (int)value.Sample();
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<int> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<int>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var rngSamples = value.Samples(sampleCount, out jobHandle);
jobHandle = new SamplesJob
{
rngSamples = rngSamples,
samples = samples
}.Schedule(jobHandle);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> rngSamples;
public NativeArray<int> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = (int)rngSamples[i];
}
}
}
}

35
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector2Parameter.cs


{
return new Vector2(x.Sample(), y.Sample());
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<Vector2> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<Vector2>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var xRng = x.Samples(sampleCount, out var xHandle);
var yRng = y.Samples(sampleCount, out var yHandle);
var combinedJobHandles = JobHandle.CombineDependencies(xHandle, yHandle);
jobHandle = new SamplesJob
{
xRng = xRng,
yRng = yRng,
samples = samples
}.Schedule(combinedJobHandles);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> xRng;
[DeallocateOnJobCompletion] public NativeArray<float> yRng;
public NativeArray<Vector2> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = new Vector2(xRng[i], yRng[i]);
}
}
}
}

38
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector3Parameter.cs


{
return new Vector3(x.Sample(), y.Sample(), z.Sample());
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<Vector3> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<Vector3>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var xRng = x.Samples(sampleCount, out var xHandle);
var yRng = y.Samples(sampleCount, out var yHandle);
var zRng = z.Samples(sampleCount, out var zHandle);
var combinedJobHandles = JobHandle.CombineDependencies(xHandle, yHandle, zHandle);
jobHandle = new SamplesJob
{
xRng = xRng,
yRng = yRng,
zRng = zRng,
samples = samples
}.Schedule(combinedJobHandles);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> xRng;
[DeallocateOnJobCompletion] public NativeArray<float> yRng;
[DeallocateOnJobCompletion] public NativeArray<float> zRng;
public NativeArray<Vector3> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = new Vector3(xRng[i], yRng[i], zRng[i]);
}
}
}
}

51
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/Vector4Parameter.cs


{
return new Vector4(x.Sample(), y.Sample(), z.Sample(), w.Sample());
}
/// <summary>
/// Schedules a job to generate an array of samples
/// </summary>
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of samples</returns>
public override NativeArray<Vector4> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<Vector4>(sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
var xRng = x.Samples(sampleCount, out var xHandle);
var yRng = y.Samples(sampleCount, out var yHandle);
var zRng = z.Samples(sampleCount, out var zHandle);
var wRng = w.Samples(sampleCount, out var wHandle);
var handles = new NativeArray<JobHandle>(4, Allocator.Temp)
{
[0] = xHandle,
[1] = yHandle,
[2] = zHandle,
[3] = wHandle
};
var combinedJobHandles = JobHandle.CombineDependencies(handles);
handles.Dispose();
jobHandle = new SamplesJob
{
xRng = xRng,
yRng = yRng,
zRng = zRng,
wRng = wRng,
samples = samples
}.Schedule(combinedJobHandles);
return samples;
}
[BurstCompile]
struct SamplesJob : IJob
{
[DeallocateOnJobCompletion] public NativeArray<float> xRng;
[DeallocateOnJobCompletion] public NativeArray<float> yRng;
[DeallocateOnJobCompletion] public NativeArray<float> zRng;
[DeallocateOnJobCompletion] public NativeArray<float> wRng;
public NativeArray<Vector4> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = new Vector4(xRng[i], yRng[i], zRng[i], wRng[i]);
}
}
}
}

2
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/BackgroundObjectPlacementRandomizer.cs


{
for (var i = 0; i < layerCount; i++)
{
var seed = scenario.NextRandomState();
var seed = SamplerState.NextRandomState();
var placementSamples = PoissonDiskSampling.GenerateSamples(
placementArea.x, placementArea.y, separationDistance, seed);
var offset = new Vector3(placementArea.x, placementArea.y, 0f) * -0.5f;

6
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/ColorRandomizer.cs


/// </summary>
protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<ColorRandomizerTag>();
foreach (var taggedObject in taggedObjects)
var tags = tagManager.Query<ColorRandomizerTag>();
foreach (var tag in tags)
var renderer = taggedObject.GetComponent<Renderer>();
var renderer = tag.GetComponent<Renderer>();
renderer.material.SetColor(k_BaseColor, colorParameter.Sample());
}
}

4
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/ForegroundObjectPlacementRandomizer.cs


using System;
using System.Collections.Generic;
using UnityEngine.Experimental.Perception.Randomization.Samplers;
namespace UnityEngine.Experimental.Perception.Randomization.Randomizers.SampleRandomizers
{

/// </summary>
protected override void OnIterationStart()
{
var seed = scenario.NextRandomState();
var seed = SamplerState.NextRandomState();
var placementSamples = PoissonDiskSampling.GenerateSamples(
placementArea.x, placementArea.y, separationDistance, seed);
var offset = new Vector3(placementArea.x, placementArea.y, 0f) * -0.5f;

6
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/HueOffsetRandomizer.cs


/// </summary>
protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<HueOffsetRandomizerTag>();
foreach (var taggedObject in taggedObjects)
var tags = tagManager.Query<HueOffsetRandomizerTag>();
foreach (var tag in tags)
var renderer = taggedObject.GetComponent<MeshRenderer>();
var renderer = tag.GetComponent<MeshRenderer>();
renderer.material.SetFloat(k_HueOffsetShaderProperty, hueOffset.Sample());
}
}

6
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/RotationRandomizer.cs


/// </summary>
protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<RotationRandomizerTag>();
foreach (var taggedObject in taggedObjects)
taggedObject.transform.rotation = Quaternion.Euler(rotation.Sample());
var tags = tagManager.Query<RotationRandomizerTag>();
foreach (var tag in tags)
tag.transform.rotation = Quaternion.Euler(rotation.Sample());
}
}
}

6
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/SunAngleRandomizer.cs


/// </summary>
protected override void OnIterationStart()
{
var lightObjects = tagManager.Query<SunAngleRandomizerTag>();
foreach (var lightObject in lightObjects)
var tags = tagManager.Query<SunAngleRandomizerTag>();
foreach (var tag in tags)
{
var earthSpin = Quaternion.AngleAxis((hour.Sample() + 12f) / 24f * 360f, Vector3.down);
var timeOfYearRads = dayOfTheYear.Sample() / 365f * Mathf.PI * 2f;

lightObject.transform.rotation = Quaternion.Euler(90,0,0) * Quaternion.Inverse(lightRotation);
tag.transform.rotation = Quaternion.Euler(90,0,0) * Quaternion.Inverse(lightRotation);
}
}
}

10
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/TextureRandomizer.cs


[AddRandomizerMenu("Perception/Texture Randomizer")]
public class TextureRandomizer : Randomizer
{
#if HDRP_PRESENT
static readonly int k_BaseTexture = Shader.PropertyToID("_BaseColorMap");
#else
#endif
/// <summary>
/// The list of textures to sample and apply to tagged objects

/// </summary>
protected override void OnIterationStart()
{
var taggedObjects = tagManager.Query<TextureRandomizerTag>();
foreach (var taggedObject in taggedObjects)
var tags = tagManager.Query<TextureRandomizerTag>();
foreach (var tag in tags)
var renderer = taggedObject.GetComponent<MeshRenderer>();
var renderer = tag.GetComponent<MeshRenderer>();
renderer.material.SetTexture(k_BaseTexture, texture.Sample());
}
}

4
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerTag.cs


void Awake()
{
tagManager.AddTag(GetType(), gameObject);
tagManager.AddTag(this);
tagManager.RemoveTag(GetType(), gameObject);
tagManager.RemoveTag(this);
}
}
}

39
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerTagManager.cs


namespace UnityEngine.Experimental.Perception.Randomization.Randomizers
{
/// <summary>
/// Organizes RandomizerTags attached to GameObjects in the scene
/// Organizes RandomizerTags present in the scene
/// </summary>
public class RandomizerTagManager
{

public static RandomizerTagManager singleton { get; } = new RandomizerTagManager();
Dictionary<Type, HashSet<Type>> m_TypeTree = new Dictionary<Type, HashSet<Type>>();
Dictionary<Type, HashSet<GameObject>> m_TagMap = new Dictionary<Type, HashSet<GameObject>>();
Dictionary<Type, HashSet<RandomizerTag>> m_TagMap = new Dictionary<Type, HashSet<RandomizerTag>>();
/// Enumerates all GameObjects in the scene that have a RandomizerTag of the given type
/// Enumerates over all RandomizerTags of the given type present in the scene
/// <returns>GameObjects with the given RandomizerTag</returns>
public IEnumerable<GameObject> Query<T>(bool returnSubclasses = false) where T : RandomizerTag
/// <returns>RandomizerTags of the given type</returns>
public IEnumerable<T> Query<T>(bool returnSubclasses = false) where T : RandomizerTag
{
var queriedTagType = typeof(T);
if (!m_TagMap.ContainsKey(queriedTagType))

var tagType = typeStack.Pop();
foreach (var derivedType in m_TypeTree[tagType])
typeStack.Push(derivedType);
foreach (var obj in m_TagMap[tagType])
yield return obj;
foreach (var tag in m_TagMap[tagType])
yield return (T)tag;
foreach (var obj in m_TagMap[queriedTagType])
yield return obj;
foreach (var tag in m_TagMap[queriedTagType])
yield return (T)tag;
internal void AddTag(Type tagType, GameObject obj)
internal void AddTag<T>(T tag) where T : RandomizerTag
var tagType = tag.GetType();
m_TagMap[tagType].Add(obj);
m_TagMap[tagType].Add(tag);
}
void AddTagTypeToTypeHierarchy(Type tagType)

if (tagType == null || !tagType.IsSubclassOf(typeof(RandomizerTag)))
throw new ArgumentException("Tag type is not a subclass of RandomizerTag");
m_TagMap.Add(tagType, new HashSet<GameObject>());
m_TagMap.Add(tagType, new HashSet<RandomizerTag>());
while (baseType!= null && baseType != typeof(RandomizerTag))
while (baseType != null && baseType != typeof(RandomizerTag))
m_TagMap.Add(baseType, new HashSet<GameObject>());
m_TagMap.Add(baseType, new HashSet<RandomizerTag>());
m_TypeTree[baseType] = new HashSet<Type> { tagType };
}
else

}
}
internal void RemoveTag(Type tagType, GameObject obj)
internal void RemoveTag<T>(T tag) where T : RandomizerTag
if (m_TagMap.ContainsKey(tagType) && m_TagMap[tagType].Contains(obj))
m_TagMap[tagType].Remove(obj);
var tagType = typeof(T);
if (m_TagMap.ContainsKey(tagType) && m_TagMap[tagType].Contains(tag))
m_TagMap[tagType].Remove(tag);
}
}
}

12
com.unity.perception/Runtime/Randomization/Samplers/ISampler.cs


public interface ISampler
{
/// <summary>
/// A range bounding the values generated by this sampler
/// </summary>
FloatRange range { get; set; }
/// <summary>
/// Generates one sample
/// </summary>
/// <returns>The generated sample</returns>

/// Schedules a job to generate an array of samples
/// Validates that the sampler is configured properly
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of generated samples</returns>
NativeArray<float> Samples(int sampleCount, out JobHandle jobHandle);
void Validate();
}
}

39
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/ConstantSampler.cs


public float value;
/// <summary>
/// A range bounding the values generated by this sampler
/// </summary>
public FloatRange range
{
get => new FloatRange(value, value);
set { }
}
/// <summary>
/// Constructs a ConstantSampler
/// </summary>
public ConstantSampler()

}
/// <summary>
/// Schedules a job to generate an array of samples
/// Validates that the sampler is configured properly
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of generated samples</returns>
public NativeArray<float> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<float>(
sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
jobHandle = new SampleJob
{
value = value,
samples = samples
}.Schedule();
return samples;
}
[BurstCompile]
struct SampleJob : IJob
{
public float value;
public NativeArray<float> samples;
public void Execute()
{
for (var i = 0; i < samples.Length; i++)
samples[i] = value;
}
}
public void Validate() {}
}
}

48
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/NormalSampler.cs


/// <summary>
/// A range bounding the values generated by this sampler
/// </summary>
[field: SerializeField]
public FloatRange range { get; set; }
public FloatRange range;
/// <summary>
/// Constructs a normal distribution sampler

/// <returns>The generated sample</returns>
public float Sample()
{
var rng = new Unity.Mathematics.Random(ScenarioBase.activeScenario.NextRandomState());
var rng = SamplerState.CreateGenerator();
/// Schedules a job to generate an array of samples
/// Validates that the sampler is configured properly
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of generated samples</returns>
public NativeArray<float> Samples(int sampleCount, out JobHandle jobHandle)
{
var samples = new NativeArray<float>(
sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
jobHandle = new SampleJob
{
min = range.minimum,
max = range.maximum,
mean = mean,
standardDeviation = standardDeviation,
seed = ScenarioBase.activeScenario.NextRandomState(),
samples = samples
}.ScheduleBatch(sampleCount, SamplerUtility.samplingBatchSize);
return samples;
}
[BurstCompile]
struct SampleJob : IJobParallelForBatch
public void Validate()
public float min;
public float max;
public float mean;
public float standardDeviation;
public uint seed;
public NativeArray<float> samples;
public void Execute(int startIndex, int count)
{
var endIndex = startIndex + count;
var batchIndex = startIndex / SamplerUtility.samplingBatchSize;
var rng = new Unity.Mathematics.Random(SamplerUtility.IterateSeed((uint)batchIndex, seed));
for (var i = startIndex; i < endIndex; i++)
{
samples[i] = SamplerUtility.TruncatedNormalSample(
rng.NextFloat(), min, max, mean, standardDeviation);
}
}
range.Validate();
}
}
}

41
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/UniformSampler.cs


/// <summary>
/// A range bounding the values generated by this sampler
/// </summary>
[field: SerializeField]
public FloatRange range { get; set; }
public FloatRange range;
/// <summary>
/// Constructs a UniformSampler

/// <returns>The generated sample</returns>
public float Sample()
{
var rng = new Unity.Mathematics.Random(ScenarioBase.activeScenario.NextRandomState());
var rng = SamplerState.CreateGenerator();
/// Schedules a job to generate an array of samples
/// Validates that the sampler is configured properly
/// <param name="sampleCount">The number of samples to generate</param>
/// <param name="jobHandle">The handle of the scheduled job</param>
/// <returns>A NativeArray of generated samples</returns>
public NativeArray<float> Samples(int sampleCount, out JobHandle jobHandle)
public void Validate()
var samples = new NativeArray<float>(
sampleCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
jobHandle = new SampleJob
{
min = range.minimum,
max = range.maximum,
seed = ScenarioBase.activeScenario.NextRandomState(),
samples = samples
}.ScheduleBatch(sampleCount, SamplerUtility.samplingBatchSize);
return samples;
}
[BurstCompile]
struct SampleJob : IJobParallelForBatch
{
public float min;
public float max;
public uint seed;
public NativeArray<float> samples;
public void Execute(int startIndex, int count)
{
var endIndex = startIndex + count;
var batchIndex = startIndex / SamplerUtility.samplingBatchSize;
var rng = new Unity.Mathematics.Random(SamplerUtility.IterateSeed((uint)batchIndex, seed));
for (var i = startIndex; i < endIndex; i++)
samples[i] = rng.NextFloat(min, max);
}
range.Validate();
}
}
}

66
com.unity.perception/Runtime/Randomization/Samplers/SamplerUtility.cs


using System;
using System.Runtime.CompilerServices;
using Unity.Collections;
using Unity.Mathematics;
namespace UnityEngine.Experimental.Perception.Randomization.Samplers

var stdTruncNorm = NormalCdfInverse(c);
return stdTruncNorm * stdDev + mean;
}
/// <summary>
/// Generate samples from probability distribution derived from a given AnimationCurve.
/// </summary>
/// <param name="integratedCurve">Numerical integration representing the AnimationCurve</param>
/// <param name="uniformSample">A sample value between 0 and 1 generated from a uniform distribution</param>
/// <param name="interval">The interval at which the original AnimationCurve was sampled in order to produce integratedCurve</param>
/// <param name="startTime">The time attribute of the first key of the original AnimationCurve</param>
/// <param name="endTime">The time attribute of the last key of the original AnimationCurve</param>
/// <returns>The generated sample</returns>
public static float AnimationCurveSample(float[] integratedCurve, float uniformSample, float interval, float startTime, float endTime)
{
var scaledSample = uniformSample * integratedCurve[integratedCurve.Length - 1];
for (var i = 0; i < integratedCurve.Length - 1; i++)
{
if (scaledSample > integratedCurve[i] && scaledSample < integratedCurve[i + 1])
{
var valueDifference = integratedCurve[i + 1] - integratedCurve[i];
var upperWeight = (scaledSample - integratedCurve[i]) / valueDifference;
var lowerWeight = 1 - upperWeight;
var matchingIndex = i * lowerWeight + (i + 1) * upperWeight;
var matchingTimeStamp = startTime + matchingIndex * interval;
return matchingTimeStamp;
}
}
throw new ArithmeticException("Could not find matching timestamp.");
}
/// <summary>
/// Numerically integrate a given AnimationCurve using the specified number of samples.
/// Based on https://en.wikipedia.org/wiki/Numerical_integration and http://blog.s-schoener.com/2018-05-05-animation-curves/
/// Using the trapezoidal rule for numerical interpolation
/// </summary>
/// <param name="array">The array to fill with integrated values</param>
/// <param name="curve">The animation curve to sample integrate</param>
/// <exception cref="ArgumentException"></exception>
public static void IntegrateCurve(float[] array, AnimationCurve curve)
{
if (curve.length == 0)
{
throw new ArgumentException("The provided Animation Curve includes no keys.");
}
var startTime = curve.keys[0].time;
var endTime = curve.keys[curve.length - 1].time;
var interval = (endTime - startTime) / (array.Length - 1);
array[0] = 0;
var previousValue = curve.Evaluate(startTime);
for (var i = 1; i < array.Length; i++)
{
if (curve.length == 1)
{
array[i] = previousValue;
}
else
{
var currentTime = startTime + i * interval;
var currentValue = curve.Evaluate(currentTime);
array[i] = array[i-1] + (previousValue + currentValue) * interval / 2;
previousValue = currentValue;
}
}
}
}
}

3
com.unity.perception/Runtime/Randomization/Scenarios/FixedLengthScenario.cs


public class Constants : UnitySimulationScenarioConstants
{
/// <summary>
/// The number of frames to generate per iteration
/// The number of frames to render per iteration.
[Tooltip("The number of frames to render per iteration.")]
public int framesPerIteration = 1;
}

14
com.unity.perception/Runtime/Randomization/Scenarios/Scenario.cs


var samplerObj = new JObject();
var fields = sampler.GetType().GetFields();
foreach (var field in fields)
samplerObj.Add(new JProperty(field.Name, field.GetValue(sampler)));
{
samplerObj.Add(new JProperty(field.Name, JToken.FromObject(field.GetValue(sampler))));
}
if (sampler.GetType() != typeof(ConstantSampler))
{
var rangeProperty = sampler.GetType().GetProperty("range");

if (samplerFieldPair.Key == "range")
{
var rangeObj = (JObject)samplerFieldPair.Value;
sampler.range = new FloatRange(
rangeObj["minimum"].ToObject<float>(), rangeObj["maximum"].ToObject<float>());
var field = sampler.GetType().GetField(samplerFieldPair.Key);
var range = new FloatRange(rangeObj["minimum"].ToObject<float>(), rangeObj["maximum"].ToObject<float>());
field.SetValue(sampler, range);
field.SetValue(sampler, ((JValue)samplerFieldPair.Value).Value);
{
field.SetValue(sampler, JsonConvert.DeserializeObject(samplerFieldPair.Value.ToString(), field.FieldType));
}
}
}
}

38
com.unity.perception/Runtime/Randomization/Scenarios/ScenarioBase.cs


{
static ScenarioBase s_ActiveScenario;
uint m_RandomState = SamplerUtility.largePrime;
const string k_ScenarioIterationMetricDefinitionId = "DB1B258E-D1D0-41B6-8751-16F601A2E230";
MetricDefinition m_IterationMetricDefinition;
IEnumerable<Randomizer> activeRandomizers
{

/// If true, this scenario will quit the Unity application when it's finished executing
/// </summary>
[HideInInspector] public bool quitOnComplete = true;
/// <summary>
/// The random state of the scenario
/// </summary>
public uint randomState => m_RandomState;
/// <summary>
/// The name of the Json file this scenario's constants are serialized to/from.

// Don't skip the first frame if executing on Unity Simulation
if (Configuration.Instance.IsSimulationRunningInCloud())
m_SkipFrame = false;
m_IterationMetricDefinition = DatasetCapture.RegisterMetricDefinition("scenario_iteration", "Iteration information for dataset sequences",
Guid.Parse(k_ScenarioIterationMetricDefinitionId));
}
void OnEnable()

Debug.Log($"No configuration file found at {defaultConfigFilePath}. " +
"Proceeding with built in scenario constants and randomizer settings.");
#endif
}
struct IterationMetricData
{
public int iteration;
}
void Update()

if (currentIterationFrame == 0)
{
DatasetCapture.StartNewSequence();
m_RandomState = SamplerUtility.IterateSeed((uint)currentIteration, genericConstants.randomSeed);
SamplerState.randomState = SamplerUtility.IterateSeed((uint)currentIteration, genericConstants.randomSeed);
DatasetCapture.ReportMetric(m_IterationMetricDefinition, new[]
{
new IterationMetricData()
{
iteration = currentIteration
}
});
foreach (var randomizer in activeRandomizers)
randomizer.IterationStart();
}

{
if (!randomizerType.IsSubclassOf(typeof(Randomizer)))
throw new ScenarioException(
$"Cannot add non-randomizer type {randomizerType.Name} to randomizer list");
$"Cannot remove non-randomizer type {randomizerType.Name} from randomizer list");
var removed = false;
for (var i = 0; i < m_Randomizers.Count; i++)
{

var randomizer = m_Randomizers[currentIndex];
m_Randomizers.RemoveAt(currentIndex);
m_Randomizers.Insert(nextIndex, randomizer);
}
/// <summary>
/// Generates a new random state and overwrites the old random state with the newly generated value
/// </summary>
/// <returns>The newly generated random state</returns>
public uint NextRandomState()
{
m_RandomState = SamplerUtility.Hash32NonZero(m_RandomState);
return m_RandomState;
}
void ValidateParameters()

3
com.unity.perception/Runtime/Randomization/Scenarios/ScenarioConstants.cs


public class ScenarioConstants
{
/// <summary>
/// The starting value initializing all random values sequences generated through Samplers, Parameters, and
/// The starting value initializing all random value sequences generated through Samplers, Parameters, and
[Tooltip("The starting value initializing all random value sequences generated through Samplers, Parameters, and Randomizers attached to a Scenario")]
public uint randomSeed = SamplerUtility.largePrime;
}
}

9
com.unity.perception/Runtime/Randomization/Scenarios/UnitySimulationScenarioConstants.cs


public class UnitySimulationScenarioConstants : ScenarioConstants
{
/// <summary>
/// The total number of iterations to run a scenario for
/// The total number of iterations to run a scenario for. At the start of each iteration, the timings for all Perception Cameras will be reset.
[Tooltip("The total number of iterations to run a scenario for. At the start of each iteration, the timings for all Perception Cameras will be reset.")]
/// The number of Unity Simulation instances assigned to executed this scenario
/// The number of Unity Simulation instances assigned to execute this scenario. The total number of iterations (N) will be divided by the number of instances (M), so each instance will run for N/M iterations.
[Tooltip("The number of Unity Simulation instances assigned to execute this scenario. The total number of iterations (N) will be divided by the number of instances (M), so each instance will run for N/M iterations.")]
/// The Unity Simulation instance index of the currently executing worker
/// The Unity Simulation instance index of the currently executing worker.
[Tooltip("The Unity Simulation instance index of the currently executing worker.")]
public int instanceIndex;
}
}

2
com.unity.perception/Tests/Editor/DatasetCaptureEditorTests.cs


yield return new EnterPlayMode();
DatasetCapture.ResetSimulation();
var ego = DatasetCapture.RegisterEgo("ego");
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0.1f, 0);
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0, CaptureTriggerMode.Scheduled, 0.1f, 0);
sensor.ReportCapture("file.txt", new SensorSpatialData());
expectedDatasetPath = DatasetCapture.OutputDirectory;
yield return new ExitPlayMode();

259
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureSensorSchedulingTests.cs


using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Unity.Mathematics;
using Random = UnityEngine.Random;
namespace GroundTruthTests
{

internal SimulationStateTestHelper()
{
var bindingFlags = BindingFlags.NonPublic | BindingFlags.Instance;
m_SequenceTimeOfNextCaptureMethod = m_State.GetType().GetMethod("SequenceTimeOfNextCapture", bindingFlags);
m_SequenceTimeOfNextCaptureMethod = m_State.GetType().GetMethod("GetSequenceTimeOfNextCapture", bindingFlags);
Debug.Assert(m_SequenceTimeOfNextCaptureMethod != null, "Couldn't find sequence time method.");
var sensorsField = m_State.GetType().GetField("m_Sensors", bindingFlags);
Debug.Assert(sensorsField != null, "Couldn't find internal sensors field");

public IEnumerator SequenceTimeOfNextCapture_ReportsCorrectTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 1.5f;
var period = .4f;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime);
var firstCaptureFrame = 2f;
var simulationDeltaTime = .4f;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
var startTime = firstCaptureFrame * simulationDeltaTime;
firstCaptureTime,
period + firstCaptureTime,
period * 2 + firstCaptureTime,
period * 3 + firstCaptureTime
startTime,
simulationDeltaTime + startTime,
simulationDeltaTime * 2 + startTime,
simulationDeltaTime * 3 + startTime
for (var i = 0; i < sequenceTimesExpected.Length; i++)
for (var i = 0; i < firstCaptureFrame; i++)
//render the non-captured frames before firstCaptureFrame
}
for (var i = 0; i < sequenceTimesExpected.Length; i++)
{
yield return null;
}
}
[UnityTest]
public IEnumerator SequenceTimeOfNextCapture_WithInBetweenFrames_ReportsCorrectTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureFrame = 2;
var simulationDeltaTime = .4f;
var framesBetweenCaptures = 2;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, CaptureTriggerMode.Scheduled, simulationDeltaTime, framesBetweenCaptures);
var startingFrame = Time.frameCount;
var startTime = firstCaptureFrame * simulationDeltaTime;
var interval = (framesBetweenCaptures + 1) * simulationDeltaTime;
float[] sequenceTimesExpected =
{
startTime,
interval + startTime,
interval * 2 + startTime,
interval * 3 + startTime
};
int[] simulationFramesToCheck =
{
firstCaptureFrame,
firstCaptureFrame + (framesBetweenCaptures + 1),
firstCaptureFrame + (framesBetweenCaptures + 1) * 2,
firstCaptureFrame + (framesBetweenCaptures + 1) * 3,
};
int checkedFrame = 0;
var currentSimFrame = Time.frameCount - startingFrame;
while (currentSimFrame <= simulationFramesToCheck[simulationFramesToCheck.Length - 1] && checkedFrame < simulationFramesToCheck.Length)
{
currentSimFrame = Time.frameCount - startingFrame;
if (currentSimFrame == simulationFramesToCheck[checkedFrame])
{
var sensorData = m_TestHelper.GetSensorData(sensorHandle);
var sequenceTimeActual = m_TestHelper.CallSequenceTimeOfNextCapture(sensorData);
Assert.AreEqual(sequenceTimesExpected[checkedFrame], sequenceTimeActual, 0.0001f);
checkedFrame++;
}
else
{
yield return null;
}
}
}

var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 1.5f;
var period = .4f;
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime);
var firstCaptureFrame = 2f;
var simulationDeltaTime = .4f;
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
firstCaptureTime,
period,
period,
period
simulationDeltaTime,
simulationDeltaTime,
simulationDeltaTime,
simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator FramesScheduled_WithTimeScale_ResultsInProperDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 2f;
var period = 1f;
var firstCaptureFrame = 2f;
var simulationDeltaTime = 1f;
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime);
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, CaptureTriggerMode.Scheduled, simulationDeltaTime, 0);
timeScale * firstCaptureTime,
timeScale * period,
timeScale * period,
timeScale * period
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime,
timeScale * simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator ChangingTimeScale_CausesDebugError()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 1f, 2f);
DatasetCapture.RegisterSensor(ego, "cam", "", 2f, CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
Time.timeScale = 5;

public IEnumerator ChangingTimeScale_DuringStartNewSequence_Succeeds()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 1f, 2f);
DatasetCapture.RegisterSensor(ego, "cam", "", 2f, CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
Time.timeScale = 1;

public IEnumerator FramesScheduled_WithChangingTimeScale_ResultsInProperDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
var firstCaptureTime = 2f;
var period = 1f;
var firstCaptureFrame = 2f;
var simulationDeltaTime = 1f;
float[] newTimeScalesPerFrame =
{
2f,

};
DatasetCapture.RegisterSensor(ego, "cam", "", period, firstCaptureTime);
DatasetCapture.RegisterSensor(ego, "cam", "", firstCaptureFrame, CaptureTriggerMode.Scheduled, 1, 0);
newTimeScalesPerFrame[0] * firstCaptureTime,
newTimeScalesPerFrame[1] * period,
newTimeScalesPerFrame[2] * period,
newTimeScalesPerFrame[3] * period
newTimeScalesPerFrame[0] * simulationDeltaTime,
newTimeScalesPerFrame[1] * simulationDeltaTime,
newTimeScalesPerFrame[2] * simulationDeltaTime,
newTimeScalesPerFrame[3] * simulationDeltaTime
};
float[] deltaTimeSamples = new float[deltaTimeSamplesExpected.Length];
for (int i = 0; i < deltaTimeSamples.Length; i++)

public IEnumerator ResetSimulation_ResetsCaptureDeltaTime()
{
var ego = DatasetCapture.RegisterEgo("ego");
DatasetCapture.RegisterSensor(ego, "cam", "", 4, 10);
DatasetCapture.RegisterSensor(ego, "cam", "", 0, CaptureTriggerMode.Scheduled, 5, 0);
Assert.AreEqual(10, Time.captureDeltaTime);
Assert.AreEqual(5, Time.captureDeltaTime);
public IEnumerator ShouldCaptureThisFrame_ReturnsTrueOnProperFrames()
public IEnumerator ShouldCaptureFlagsAndRenderTimesAreCorrectWithMultipleSensors()
var firstCaptureTime1 = 10;
var frequencyInMs1 = 4;
var sensor1 = DatasetCapture.RegisterSensor(ego, "cam", "1", frequencyInMs1, firstCaptureTime1);
var firstCaptureFrame1 = 2;
var simDeltaTime1 = 4;
var framesBetweenCaptures1 = 2;
var sensor1 = DatasetCapture.RegisterSensor(ego, "cam", "1", firstCaptureFrame1, CaptureTriggerMode.Scheduled, simDeltaTime1, framesBetweenCaptures1);
var firstCaptureTime2 = 10;
var frequencyInMs2 = 6;
var sensor2 = DatasetCapture.RegisterSensor(ego, "cam", "2", frequencyInMs2, firstCaptureTime2);
var firstCaptureFrame2 = 1;
var simDeltaTime2 = 6;
var framesBetweenCaptures2 = 1;
var sensor2 = DatasetCapture.RegisterSensor(ego, "cam", "2", firstCaptureFrame2, CaptureTriggerMode.Scheduled, simDeltaTime2, framesBetweenCaptures2);
var sensor3 = DatasetCapture.RegisterSensor(ego, "cam", "3", 1, 1);
sensor3.Enabled = false;
//Third sensor is a manually triggered one. All it does in this test is affect delta times.
var simDeltaTime3 = 5;
var sensor3 = DatasetCapture.RegisterSensor(ego, "cam", "3", 0, CaptureTriggerMode.Manual, simDeltaTime3, 0, true);
(float deltaTime, bool sensor1ShouldCapture, bool sensor2ShouldCapture)[] samplesExpected =
(float deltaTime, bool sensor1ShouldCapture, bool sensor2ShouldCapture, bool sensor3ShouldCapture)[] samplesExpected =
((float)firstCaptureTime1, true, true),
(4, true, false),
(2, false, true),
(2, true, false),
(4, true, true)
(4, false, false, false), //Simulation time since sensors created: 4
(1, false, false, false), //5
(1, false, true, false), //6
(2, true, false, false), //8
(2, false, false, false), //10
(2, false, false, false), //12
(3, false, false, false), //15
(1, false, false, false), //16
(2, false, true, false), //18
(2, true, false, false), //20
(4, false, false, false), //24
(1, false, false, false), //25
var samplesActual = new(float deltaTime, bool sensor1ShouldCapture, bool sensor2ShouldCapture)[samplesExpected.Length];
var samplesActual = new (float deltaTime, bool sensor1ShouldCapture, bool sensor2ShouldCapture, bool sensor3ShouldCapture)[samplesExpected.Length];
samplesActual[i] = (Time.deltaTime, sensor1.ShouldCaptureThisFrame, sensor2.ShouldCaptureThisFrame);
samplesActual[i] = (Time.deltaTime, sensor1.ShouldCaptureThisFrame, sensor2.ShouldCaptureThisFrame, sensor3.ShouldCaptureThisFrame);
[Test]
public void Enabled_StartsTrue()
[UnityTest]
public IEnumerator SequenceTimeOfManualCapture_ReportsCorrectTime_ManualSensorDoesNotAffectTimings()
{
var ego = DatasetCapture.RegisterEgo("ego");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", 0, CaptureTriggerMode.Manual, 0, 0, false);
var framesToCaptureOn = new List<int>();
var startFrame = Time.frameCount;
var startTime = Time.time;
while (framesToCaptureOn.Count < 10)
{
var randomFrame = Random.Range(startFrame, startFrame + 100);
if(!framesToCaptureOn.Contains(randomFrame))
framesToCaptureOn.Add(randomFrame);
}
framesToCaptureOn.Sort();
var frameIndex = 0;
for (var i = 0; i < framesToCaptureOn.Max(); i++)
{
if (frameIndex == framesToCaptureOn.Count)
break;
if (Time.frameCount == framesToCaptureOn[frameIndex])
{
frameIndex++;
sensorHandle.RequestCapture();
var sensorData = m_TestHelper.GetSensorData(sensorHandle);
var sequenceTimeActual = m_TestHelper.CallSequenceTimeOfNextCapture(sensorData);
var elapsed = Time.time - startTime;
Assert.AreEqual(elapsed, sequenceTimeActual, 0.0001f);
}
yield return null;
}
Assert.AreEqual(frameIndex, framesToCaptureOn.Count, 0.0001f);
}
[UnityTest]
public IEnumerator SequenceTimeOfManualCapture_ReportsCorrectTime_ManualSensorAffectsTimings()
var sensor1 = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "cam", "1", 1, 1);
Assert.IsTrue(sensor1.Enabled);
var ego = DatasetCapture.RegisterEgo("ego");
var simulationDeltaTime = 0.05f;
var sensorHandle = DatasetCapture.RegisterSensor(ego, "cam", "", 0, CaptureTriggerMode.Manual, simulationDeltaTime, 0, true);
var framesToCaptureOn = new List<int>();
var startFrame = Time.frameCount;
var startTime = Time.time;
while (framesToCaptureOn.Count < 10)
{
var randomFrame = Random.Range(startFrame, startFrame + 100);
if(!framesToCaptureOn.Contains(randomFrame))
framesToCaptureOn.Add(randomFrame);
}
framesToCaptureOn.Sort();
float[] sequenceTimesExpected = new float[framesToCaptureOn.Count];
for (int i = 0; i < sequenceTimesExpected.Length; i++)
{
sequenceTimesExpected[i] = (framesToCaptureOn[i] - startFrame) * simulationDeltaTime;
}
var frameIndex = 0;
for (var i = 0; i < framesToCaptureOn.Max(); i++)
{
if (frameIndex == framesToCaptureOn.Count)
break;
if (Time.frameCount == framesToCaptureOn[frameIndex])
{
sensorHandle.RequestCapture();
var sensorData = m_TestHelper.GetSensorData(sensorHandle);
var sequenceTimeActual = m_TestHelper.CallSequenceTimeOfNextCapture(sensorData);
Assert.AreEqual(sequenceTimesExpected[frameIndex], sequenceTimeActual, 0.0001f);
frameIndex++;
}
yield return null;
}
Assert.AreEqual(frameIndex, framesToCaptureOn.Count, 0.0001f);
}
}
}

40
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs


}}";
var ego = DatasetCapture.RegisterEgo(egoDescription);
var sensorHandle = DatasetCapture.RegisterSensor(ego, modality, sensorDescription, 1, 1);
var sensorHandle = DatasetCapture.RegisterSensor(ego, modality, sensorDescription, 1, CaptureTriggerMode.Scheduled, 1, 0);
Assert.IsTrue(sensorHandle.IsValid);
DatasetCapture.ResetSimulation();
Assert.IsFalse(sensorHandle.IsValid);

}}";
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "camera", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "camera", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var sensorSpatialData = new SensorSpatialData(new Pose(egoPosition, egoRotation), new Pose(position, rotation), egoVelocity, null);
sensorHandle.ReportCapture(filename, sensorSpatialData, ("camera_intrinsic", intrinsics));

};
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 2, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 2, 0);
var sensorSpatialData = new SensorSpatialData(default, default, null, null);
Assert.IsTrue(sensorHandle.ShouldCaptureThisFrame);
sensorHandle.ReportCapture("f", sensorSpatialData);

]";
var ego = DatasetCapture.RegisterEgo("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportCapture(filename, default);
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("semantic segmentation", "pixel-wise semantic segmentation label", "PNG", annotationDefinitionGuid);
sensorHandle.ReportAnnotationFile(annotationDefinition, "annotations/semantic_segmentation_000.png");

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportAnnotationValues(annotationDefinition, values);
DatasetCapture.ResetSimulation();

{
var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationFile(annotationDefinition, ""));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationValues(annotationDefinition, new int[0]));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportAnnotationAsync(annotationDefinition));
}

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportAnnotationAsync(annotationDefinition);
DatasetCapture.ResetSimulation();
LogAssert.Expect(LogType.Error, new Regex("Simulation ended with pending .*"));

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsValid);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsPending);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var asyncAnnotation = sensorHandle.ReportAnnotationAsync(annotationDefinition);
Assert.IsTrue(asyncAnnotation.IsPending);

var ego = DatasetCapture.RegisterEgo("");
var annotationDefinition = DatasetCapture.RegisterAnnotationDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
// Record one capture for this frame
sensorHandle.ReportCapture(fileName, default);

{
var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportMetric(metricDefinition, new int[0]));
}

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 100);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 100, CaptureTriggerMode.Scheduled, 1, 0);
Assert.Throws<InvalidOperationException>(() => sensorHandle.ReportMetricAsync(metricDefinition));
}

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
sensorHandle.ReportMetricAsync(metricDefinition);
DatasetCapture.ResetSimulation();
LogAssert.Expect(LogType.Error, new Regex("Simulation ended with pending .*"));

var ego = DatasetCapture.RegisterEgo("");
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 1, 0);
var sensorHandle = DatasetCapture.RegisterSensor(ego, "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var asyncMetric = sensorHandle.ReportMetricAsync(metricDefinition);
Assert.IsTrue(asyncMetric.IsValid);

var expectedLine = @"""step"": 0";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0);
DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
yield return null;

var expectedLine = @"""step"": 0";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0);
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
yield return null;
sensor.ReportMetric(metricDefinition, values);

}}";
var metricDefinition = DatasetCapture.RegisterMetricDefinition("");
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 1, 0);
var sensor = DatasetCapture.RegisterSensor(DatasetCapture.RegisterEgo(""), "", "", 0, CaptureTriggerMode.Scheduled, 1, 0);
var annotation = sensor.ReportAnnotationFile(DatasetCapture.RegisterAnnotationDefinition(""), "");
var valuesJsonArray = JArray.FromObject(values).ToString(Formatting.Indented);
if (async)

12
com.unity.perception/Tests/Runtime/GroundTruthTests/PerceptionCameraIntegrationTests.cs


[UnityTest]
public IEnumerator EnableSemanticSegmentation_GeneratesCorrectDataset([Values(true, false)] bool enabled)
{
SemanticSegmentationLabeler semanticSegmentationLabeler = null;
pc.AddLabeler(new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig()));
semanticSegmentationLabeler = new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig());
pc.AddLabeler(semanticSegmentationLabeler);
}, enabled);
string expectedImageFilename = $"segmentation_{Time.frameCount}.png";

{
var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesJson = File.ReadAllText(capturesPath);
var imagePath = $"SemanticSegmentation/{expectedImageFilename}";
var imagePath = $"{semanticSegmentationLabeler.m_SemanticSegmentationDirectory}/{expectedImageFilename}";
StringAssert.Contains(imagePath, capturesJson);
}
else

[UnityTest]
public IEnumerator Disabled_GeneratesCorrectDataset()
{
SemanticSegmentationLabeler semanticSegmentationLabeler = null;
pc.AddLabeler(new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig()));
semanticSegmentationLabeler = new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig());
pc.AddLabeler(semanticSegmentationLabeler);
});
string expectedImageFilename = $"segmentation_{Time.frameCount}.png";

var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesJson = File.ReadAllText(capturesPath);
var imagePath = $"SemanticSegmentation/{expectedImageFilename}";
var imagePath = $"{semanticSegmentationLabeler.m_SemanticSegmentationDirectory}/{expectedImageFilename}";
StringAssert.Contains(imagePath, capturesJson);
}

19
com.unity.perception/Tests/Runtime/Randomization/ParameterTests/StructParameterTests.cs


}
[Test]
public void CorrectNumberOfNativeSamplesAreGenerated()
public void CorrectNumberOfSamplesAreGenerated()
test.GeneratesNativeSamples();
test.GeneratesSamples();
public abstract void GeneratesNativeSamples();
public abstract void GeneratesSamples();
}
public class NumericParameterTest<T> : BaseStructParameterTest where T : struct

m_Parameter = parameter;
}
public override void GeneratesNativeSamples()
public override void GeneratesSamples()
var nativeSamples = m_Parameter.Samples(TestValues.TestSampleCount, out var handle);
handle.Complete();
Assert.AreEqual(nativeSamples.Length, TestValues.TestSampleCount);
nativeSamples.Dispose();
var samples = new T[TestValues.TestSampleCount];
for (var i = 0; i < samples.Length; i++)
{
samples[i] = m_Parameter.Sample();
}
Assert.AreEqual(samples.Length, TestValues.TestSampleCount);
}
}
}

14
com.unity.perception/Tests/Runtime/Randomization/RandomizerTests/RandomizerTagTests.cs


[TestFixture]
public class RandomizerTagTests
{
public class ParentTag : RandomizerTag { }
public class ChildTag : ParentTag { }
GameObject m_TestObject;
FixedLengthScenario m_Scenario;

{
const int copyCount = 5;
var gameObject = new GameObject();
gameObject.AddComponent<ExampleTag>();
gameObject.AddComponent<ParentTag>();
gameObject2.AddComponent<ExampleTag2>();
gameObject2.AddComponent<ChildTag>();
var queriedObjects = tagManager.Query<ExampleTag>().ToArray();
var queriedObjects = tagManager.Query<ParentTag>().ToArray();
queriedObjects = tagManager.Query<ExampleTag2>().ToArray();
queriedObjects = tagManager.Query<ChildTag>().ToArray();
queriedObjects = tagManager.Query<ExampleTag>(true).ToArray();
queriedObjects = tagManager.Query<ParentTag>(true).ToArray();
Assert.AreEqual(queriedObjects.Length, copyCount * 2);
}
}

21
com.unity.perception/Tests/Runtime/Randomization/SamplerTests/NormalSamplerTests.cs


namespace RandomizationTests.SamplerTests
{
[TestFixture]
public class NormalSamplerTests : RangedSamplerTests<NormalSampler>
public class NormalSamplerTestsBase : SamplerTestsBase<NormalSampler>
public NormalSamplerTests()
public NormalSamplerTestsBase()
}
[Test]
public void SamplesInRange()
{
var samples = new float[k_TestSampleCount];
for (var i = 0; i < samples.Length; i++)
{
samples[i] = m_Sampler.Sample();
}
Assert.AreEqual(samples.Length, k_TestSampleCount);
for (var i = 0; i < samples.Length; i++)
{
Assert.GreaterOrEqual(samples[i], m_Sampler.range.minimum);
Assert.LessOrEqual(samples[i], m_Sampler.range.maximum);
}
}
}
}

60
com.unity.perception/Tests/Runtime/Randomization/SamplerTests/SamplerTestsBase.cs


namespace RandomizationTests.SamplerTests
{
public abstract class RangedSamplerTests<T> where T : ISampler
public abstract class SamplerTestsBase<T> where T : ISampler
const int k_TestSampleCount = 30;
protected const int k_TestSampleCount = 30;
T m_Sampler;
protected T m_Sampler;
GameObject m_ScenarioObj;
static ScenarioBase activeScenario => ScenarioBase.activeScenario;

}
[Test]
public void SamplesInRange()
{
var samples = m_Sampler.Samples(k_TestSampleCount, out var handle);
handle.Complete();
Assert.AreEqual(samples.Length, k_TestSampleCount);
foreach (var sample in samples)
{
Assert.GreaterOrEqual(sample, m_Sampler.range.minimum);
Assert.LessOrEqual(sample, m_Sampler.range.maximum);
}
samples.Dispose();
}
[Test]
public void NativeSamplesInRange()
{
var samples = m_Sampler.Samples(k_TestSampleCount, out var handle);
handle.Complete();
Assert.AreEqual(samples.Length, k_TestSampleCount);
foreach (var sample in samples)
{
Assert.GreaterOrEqual(sample, m_Sampler.range.minimum);
Assert.LessOrEqual(sample, m_Sampler.range.maximum);
}
samples.Dispose();
}
[Test]
var state0 = activeScenario.randomState;
var state0 = SamplerState.randomState;
var state1 = activeScenario.randomState;
var state1 = SamplerState.randomState;
var state2 = activeScenario.randomState;;
Assert.AreNotEqual(state0, state1);
Assert.AreNotEqual(state1, state2);
}
[Test]
public void ConsecutiveSampleBatchesChangesState()
{
var state0 = activeScenario.randomState;
var samples1 = m_Sampler.Samples(k_TestSampleCount, out var handle1);
var state1 = activeScenario.randomState;
var samples2 = m_Sampler.Samples(k_TestSampleCount, out var handle2);
var state2 = activeScenario.randomState;
var state2 = SamplerState.randomState;;
JobHandle.CombineDependencies(handle1, handle2).Complete();
Assert.AreEqual(samples1.Length, samples2.Length);
Assert.AreNotEqual(samples1[0], samples2[0]);
samples1.Dispose();
samples2.Dispose();
}
}
}

21
com.unity.perception/Tests/Runtime/Randomization/SamplerTests/UniformSamplerTests.cs


namespace RandomizationTests.SamplerTests
{
[TestFixture]
public class UniformSamplerTests : RangedSamplerTests<UniformSampler>
public class UniformSamplerTestsBase : SamplerTestsBase<UniformSampler>
public UniformSamplerTests()
public UniformSamplerTestsBase()
}
[Test]
public void SamplesInRange()
{
var samples = new float[k_TestSampleCount];
for (var i = 0; i < samples.Length; i++)
{
samples[i] = m_Sampler.Sample();
}
Assert.AreEqual(samples.Length, k_TestSampleCount);
for (var i = 0; i < samples.Length; i++)
{
Assert.GreaterOrEqual(samples[i], m_Sampler.range.minimum);
Assert.LessOrEqual(samples[i], m_Sampler.range.maximum);
}
}
}
}

7
com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs


using NUnit.Framework;
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Randomizers.SampleRandomizers;
using UnityEngine.Experimental.Perception.Randomization.Samplers;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.Experimental.Perception.Randomization.Scenarios;
using UnityEngine.TestTools;

// Second frame, first iteration
yield return null;
Assert.AreEqual(DatasetCapture.SimulationState.SequenceTime, perceptionCamera.period);
Assert.AreEqual(DatasetCapture.SimulationState.SequenceTime, perceptionCamera.simulationDeltaTime);
// Third frame, second iteration, SequenceTime has been reset
yield return null;

yield return CreateNewScenario(3, 1);
var seeds = new uint[3];
for (var i = 0; i < 3; i++)
seeds[i] = m_Scenario.NextRandomState();
seeds[i] = SamplerState.NextRandomState();
Assert.AreNotEqual(seeds[i], m_Scenario.NextRandomState());
Assert.AreNotEqual(seeds[i], SamplerState.NextRandomState());
}
PerceptionCamera SetupPerceptionCamera()

6
com.unity.perception/package.json


"com.unity.render-pipelines.core": "7.1.6",
"com.unity.burst": "1.3.9",
"com.unity.entities": "0.8.0-preview.8",
"com.unity.simulation.client": "0.0.10-preview.9",
"com.unity.simulation.capture": "0.0.10-preview.14",
"com.unity.simulation.core": "0.0.10-preview.20"
"com.unity.simulation.client": "0.0.10-preview.10",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21"
},
"description": "Tools for generating large-scale data sets for perception-based machine learning training and validation",
"displayName": "Perception",

99
com.unity.perception/Editor/Randomization/Editors/PerceptionEditorAnalytics.cs


using System;
using JetBrains.Annotations;
using UnityEditor;
using UnityEngine.Analytics;
namespace UnityEngine.Perception.Randomization.Editor
{
static class PerceptionEditorAnalytics
{
static int k_MaxItems = 100;
static int k_MaxEventsPerHour = 100;
const string k_VendorKey = "unity.perception";
const string k_RunInUnitySimulationName = "runinunitysimulation";
static bool s_IsRegistered = false;
static bool TryRegisterEvents()
{
if (s_IsRegistered)
return true;
bool success = true;
success &= EditorAnalytics.RegisterEventWithLimit(k_RunInUnitySimulationName, k_MaxEventsPerHour, k_MaxItems,
k_VendorKey) == AnalyticsResult.Ok;
s_IsRegistered = success;
return success;
}
enum RunStatus
{
Started,
Failed,
Succeeded
}
struct RunInUnitySimulationData
{
[UsedImplicitly]
public string runId;
[UsedImplicitly]
public int totalIterations;
[UsedImplicitly]
public int instanceCount;
[UsedImplicitly]
public string existingBuildId;
[UsedImplicitly]
public string errorMessage;
[UsedImplicitly]
public string runExecutionId;
[UsedImplicitly]
public string runStatus;
}
public static void ReportRunInUnitySimulationStarted(Guid runId, int totalIterations, int instanceCount, string existingBuildId)
{
if (!TryRegisterEvents())
return;
var data = new RunInUnitySimulationData()
{
runId = runId.ToString(),
totalIterations = totalIterations,
instanceCount = instanceCount,
existingBuildId = existingBuildId,
runStatus = RunStatus.Started.ToString()
};
EditorAnalytics.SendEventWithLimit(k_RunInUnitySimulationName, data);
}
public static void ReportRunInUnitySimulationFailed(Guid runId, string errorMessage)
{
if (!TryRegisterEvents())
return;
var data = new RunInUnitySimulationData()
{
runId = runId.ToString(),
errorMessage = errorMessage,
runStatus = RunStatus.Failed.ToString()
};
EditorAnalytics.SendEventWithLimit(k_RunInUnitySimulationName, data);
}
public static void ReportRunInUnitySimulationSucceeded(Guid runId, string runExecutionId)
{
if (!TryRegisterEvents())
return;
var data = new RunInUnitySimulationData()
{
runId = runId.ToString(),
runExecutionId = runExecutionId,
runStatus = RunStatus.Succeeded.ToString()
};
EditorAnalytics.SendEventWithLimit(k_RunInUnitySimulationName, data);
}
}
}

3
com.unity.perception/Editor/Randomization/Editors/PerceptionEditorAnalytics.cs.meta


fileFormatVersion: 2
guid: 3216e9e700a34acc8ac808ffccb8297c
timeCreated: 1607709612

3
com.unity.perception/Editor/Utilities.meta


fileFormatVersion: 2
guid: 7c33b05b860544a5a026b97302d0358e
timeCreated: 1611791860

32
com.unity.perception/Runtime/Randomization/Samplers/SamplerState.cs


namespace UnityEngine.Experimental.Perception.Randomization.Samplers
{
/// <summary>
/// Encapsulates the random state that all samplers mutate when generating random values
/// </summary>
public static class SamplerState
{
/// <summary>
/// The central random state that all samplers mutate when generating random numbers
/// </summary>
public static uint randomState = SamplerUtility.largePrime;
/// <summary>
/// Creates a random number generator seeded with a unique random state
/// </summary>
/// <returns>The seeded random number generator</returns>
public static Unity.Mathematics.Random CreateGenerator()
{
return new Unity.Mathematics.Random { state = NextRandomState() };
}
/// <summary>
/// Generates a new random state and overwrites the old random state with the newly generated value
/// </summary>
/// <returns>The newly generated random state</returns>
public static uint NextRandomState()
{
randomState = SamplerUtility.Hash32NonZero(randomState);
return randomState;
}
}
}

3
com.unity.perception/Runtime/Randomization/Samplers/SamplerState.cs.meta


fileFormatVersion: 2
guid: dab4e2f4d003402193a82cfe926b91d0
timeCreated: 1611261496

99
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/AnimationCurveSampler.cs


using System;
using System.Runtime.CompilerServices;
using Unity.Burst;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Jobs;
using UnityEngine.Experimental.Perception.Randomization.Scenarios;
namespace UnityEngine.Experimental.Perception.Randomization.Samplers
{
/// <summary>
/// Returns random values according to a range and probability distribution denoted by a user provided AnimationCurve.
/// The X axis of the AnimationCurve corresponds to the values this sampler will pick from,
/// and the Y axis corresponds to the relative probability of the values.
/// The relative probabilities (Y axis) do not need to max out at 1, as only the shape of the curve matters.
/// The Y values cannot however be negative.
/// </summary>
[Serializable]
public class AnimationCurveSampler : ISampler
{
/// <summary>
/// The Animation Curve associated with this sampler
/// </summary>
[Tooltip("Probability distribution curve used for this sampler. The X axis corresponds to the values this sampler will pick from, and the Y axis corresponds to the relative probability of the values. The relative probabilities (Y axis) do not need to max out at 1 as only the shape of the curve matters. The Y values cannot however be negative.")]
public AnimationCurve distributionCurve;
/// <summary>
/// Number of samples used for integrating over the provided AnimationCurve.
/// The larger the number of samples, the more accurate the resulting probability distribution will be.
/// </summary>
[Tooltip("Number of internal samples used for integrating over the provided AnimationCurve. The larger the number of samples, the more accurately the resulting probability distribution will follow the provided AnimationCurve. Increase this if the default value proves insufficient.")]
public int numOfSamplesForIntegration = 500;
float[] m_IntegratedCurve;
bool m_Initialized;
float m_StartTime;
float m_EndTime;
float m_Interval;
/// <summary>
/// Constructs a default AnimationCurveSampler
/// </summary>
public AnimationCurveSampler()
{
distributionCurve = new AnimationCurve(
new Keyframe(0, 0), new Keyframe(0.5f, 1), new Keyframe(1, 0));
}
/// <summary>
/// Constructs an AnimationCurveSampler with a given animation curve
/// </summary>
/// <param name="curve">The animation curve to sample from</param>
/// <param name="numberOfSamples">Number of samples used for integrating over the provided AnimationCurve</param>
public AnimationCurveSampler(AnimationCurve curve, int numberOfSamples=500)
{
distributionCurve = curve;
numOfSamplesForIntegration = numberOfSamples;
}
/// <summary>
/// Generates one sample
/// </summary>
/// <returns>The generated sample</returns>
public float Sample()
{
Initialize();
var rng = SamplerState.CreateGenerator();
return SamplerUtility.AnimationCurveSample(
m_IntegratedCurve, rng.NextFloat(), m_Interval, m_StartTime, m_EndTime);
}
/// <summary>
/// Validates that the sampler is configured properly
/// </summary>
/// <exception cref="SamplerValidationException"></exception>
public void Validate()
{
if (distributionCurve.length == 0)
throw new SamplerValidationException("The distribution curve provided is empty");
if (numOfSamplesForIntegration < 2)
throw new SamplerValidationException("Insufficient number of integration samples");
}
void Initialize()
{
if (m_Initialized)
return;
Validate();
m_IntegratedCurve = new float[numOfSamplesForIntegration];
SamplerUtility.IntegrateCurve(m_IntegratedCurve, distributionCurve);
m_StartTime = distributionCurve.keys[0].time;
m_EndTime = distributionCurve.keys[distributionCurve.length - 1].time;
m_Interval = (m_EndTime - m_StartTime) / (numOfSamplesForIntegration - 1);
m_Initialized = true;
}
}
}

11
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/AnimationCurveSampler.cs.meta


fileFormatVersion: 2
guid: d4147226e4d134ac8b0c69c14db5d23b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

10
com.unity.perception/Runtime/Randomization/Samplers/SamplerValidationException.cs


using System;
namespace UnityEngine.Experimental.Perception.Randomization.Samplers
{
class SamplerValidationException : Exception
{
public SamplerValidationException(string msg) : base(msg) {}
public SamplerValidationException(string msg, Exception innerException) : base(msg, innerException) {}
}
}

3
com.unity.perception/Runtime/Randomization/Samplers/SamplerValidationException.cs.meta


fileFormatVersion: 2
guid: 232e97ede3ab4d769096833ee3e8d0e4
timeCreated: 1610570590

34
com.unity.perception/Tests/Runtime/Randomization/SamplerTests/AnimationCurveSamplerTests.cs


using NUnit.Framework;
using UnityEngine.Experimental.Perception.Randomization.Samplers;
namespace RandomizationTests.SamplerTests
{
[TestFixture]
public class AnimationCurveSamplerTestsBase : SamplerTestsBase<AnimationCurveSampler>
{
public AnimationCurveSamplerTestsBase()
{
m_BaseSampler = new AnimationCurveSampler();
}
[Test]
public void SamplesInRange()
{
var min = m_Sampler.distributionCurve.keys[0].time;
var max = m_Sampler.distributionCurve.keys[m_Sampler.distributionCurve.length - 1].time;
var samples = new float[k_TestSampleCount];
for (var i = 0; i < samples.Length; i++)
{
samples[i] = m_Sampler.Sample();
}
Assert.AreEqual(samples.Length, k_TestSampleCount);
for (var i = 0; i < samples.Length; i++)
{
Assert.GreaterOrEqual(samples[i], min);
Assert.LessOrEqual(samples[i], max);
}
}
}
}

11
com.unity.perception/Tests/Runtime/Randomization/SamplerTests/AnimationCurveSamplerTests.cs.meta


fileFormatVersion: 2
guid: 15fca3a26ddcf4459be02aea956806bc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

3
com.unity.perception/Editor/Utilities/ParameterUIElementsEditor.cs.meta


fileFormatVersion: 2
guid: ec638a81755645739ca5834e2e44fc13
timeCreated: 1611791890

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存