浏览代码

Merge branch 'master' into randomizer_tag_inheritence

/resint_updates
Jon Hogins 4 年前
当前提交
9ce49113
共有 204 个文件被更改,包括 3369 次插入4487 次删除
  1. 32
      .yamato/environments.yml
  2. 2
      .yamato/promote.yml
  3. 39
      .yamato/upm-ci-full.yml
  4. 10
      .yamato/upm-ci-testprojects.yml
  5. 2
      TestProjects/PerceptionHDRP/Assets/IdLabelConfig.asset
  6. 355
      TestProjects/PerceptionHDRP/Assets/Scenes/SampleScene.unity
  7. 4
      TestProjects/PerceptionHDRP/Assets/SemanticSegmentationLabelingConfiguration.asset
  8. 11
      TestProjects/PerceptionHDRP/Packages/packages-lock.json
  9. 4
      TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt
  10. 6
      TestProjects/PerceptionURP/Assets/IdLabelConfig.asset
  11. 528
      TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity
  12. 6
      TestProjects/PerceptionURP/Assets/SemanticSegmentationLabelingConfiguration.asset
  13. 11
      TestProjects/PerceptionURP/Packages/packages-lock.json
  14. 4
      TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt
  15. 2
      TestProjects/PerceptionURP/ProjectSettings/QualitySettings.asset
  16. 48
      com.unity.perception/CHANGELOG.md
  17. 5
      com.unity.perception/Documentation~/GroundTruthLabeling.md
  18. 5
      com.unity.perception/Documentation~/PerceptionCamera.md
  19. 5
      com.unity.perception/Documentation~/Randomization/Scenarios.md
  20. 2
      com.unity.perception/Documentation~/SetupSteps.md
  21. 999
      com.unity.perception/Documentation~/Tutorial/Images/camera_prep.png
  22. 999
      com.unity.perception/Documentation~/Tutorial/Images/exampleprefab.png
  23. 999
      com.unity.perception/Documentation~/Tutorial/Images/hierarchy.png
  24. 898
      com.unity.perception/Documentation~/Tutorial/Images/jupyter2.png
  25. 201
      com.unity.perception/Documentation~/Tutorial/Phase1.md
  26. 34
      com.unity.perception/Documentation~/Tutorial/Phase2.md
  27. 78
      com.unity.perception/Documentation~/Tutorial/Phase3.md
  28. 10
      com.unity.perception/Documentation~/Tutorial/TUTORIAL.md
  29. 4
      com.unity.perception/Documentation~/index.md
  30. 305
      com.unity.perception/Editor/GroundTruth/IdLabelConfigEditor.cs
  31. 2
      com.unity.perception/Editor/GroundTruth/IdLabelConfigEditor.cs.meta
  32. 929
      com.unity.perception/Editor/GroundTruth/LabelingEditor.cs
  33. 2
      com.unity.perception/Editor/GroundTruth/LabelingEditor.cs.meta
  34. 141
      com.unity.perception/Editor/GroundTruth/SemanticSegmentationLabelConfigEditor.cs
  35. 4
      com.unity.perception/Editor/GroundTruth/SemanticSegmentationLabelConfigEditor.cs.meta
  36. 14
      com.unity.perception/Editor/Randomization/Editors.meta
  37. 17
      com.unity.perception/Editor/Randomization/Editors/ScenarioBaseEditor.cs.meta
  38. 30
      com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs
  39. 14
      com.unity.perception/Editor/Randomization/PropertyDrawers.meta
  40. 17
      com.unity.perception/Editor/Randomization/PropertyDrawers/ColorHsvaDrawer.cs.meta
  41. 17
      com.unity.perception/Editor/Randomization/PropertyDrawers/ParameterDrawer.cs.meta
  42. 12
      com.unity.perception/Editor/Randomization/StaticData.cs.meta
  43. 9
      com.unity.perception/Editor/Randomization/Uss.meta
  44. 20
      com.unity.perception/Editor/Randomization/Uss/Styles.uss
  45. 9
      com.unity.perception/Editor/Randomization/Uxml.meta
  46. 16
      com.unity.perception/Editor/Randomization/Uxml/Parameter/ParameterDrawer.uxml.meta
  47. 2
      com.unity.perception/Editor/Randomization/Uxml/RunInUnitySimulationWindow.uxml
  48. 14
      com.unity.perception/Editor/Randomization/VisualElements.meta
  49. 9
      com.unity.perception/Editor/Randomization/VisualElements/Parameter.meta
  50. 17
      com.unity.perception/Editor/Randomization/VisualElements/Parameter/CategoricalOptionElement.cs.meta
  51. 17
      com.unity.perception/Editor/Randomization/VisualElements/Parameter/ColorHsvaField.cs.meta
  52. 17
      com.unity.perception/Editor/Randomization/VisualElements/Parameter/DrawerParameterElement.cs.meta
  53. 17
      com.unity.perception/Editor/Randomization/VisualElements/Parameter/ParameterElement.cs.meta
  54. 9
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer.meta
  55. 12
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/AddRandomizerMenu.cs.meta
  56. 12
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerElement.cs.meta
  57. 11
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs
  58. 12
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs.meta
  59. 13
      com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerReorderingIndicator.cs.meta
  60. 9
      com.unity.perception/Editor/Randomization/VisualElements/Sampler.meta
  61. 17
      com.unity.perception/Editor/Randomization/VisualElements/Sampler/FloatRangeElement.cs.meta
  62. 17
      com.unity.perception/Editor/Randomization/VisualElements/Sampler/RandomSeedField.cs.meta
  63. 17
      com.unity.perception/Editor/Randomization/VisualElements/Sampler/SamplerElement.cs.meta
  64. 1
      com.unity.perception/Runtime/AssemblyInfo.cs
  65. 20
      com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs
  66. 12
      com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs
  67. 14
      com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs
  68. 82
      com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBox3DLabeler.cs
  69. 9
      com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBoxLabeler.cs
  70. 7
      com.unity.perception/Runtime/GroundTruth/Labelers/CameraLabeler.cs
  71. 8
      com.unity.perception/Runtime/GroundTruth/Labelers/RenderedObjectInfoLabeler.cs
  72. 103
      com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs
  73. 1
      com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/Materials/SegmentationMaterial.mat
  74. 3
      com.unity.perception/Runtime/GroundTruth/Labeling/IdLabelConfig.cs
  75. 25
      com.unity.perception/Runtime/GroundTruth/Labeling/LabelConfig.cs
  76. 7
      com.unity.perception/Runtime/GroundTruth/Labeling/LabelEntryMatchCache.cs
  77. 37
      com.unity.perception/Runtime/GroundTruth/Labeling/Labeling.cs
  78. 13
      com.unity.perception/Runtime/GroundTruth/Labeling/SemanticSegmentationLabelConfig.cs
  79. 13
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  80. 7
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera_InstanceSegmentation.cs
  81. 6
      com.unity.perception/Runtime/GroundTruth/RenderedObjectInfo.cs
  82. 48
      com.unity.perception/Runtime/GroundTruth/RenderedObjectInfoGenerator.cs
  83. 11
      com.unity.perception/Runtime/GroundTruth/Resources/InstanceSegmentation.shader
  84. 21
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  85. 2
      com.unity.perception/Runtime/Randomization/Parameters/CategoricalParameter.cs
  86. 14
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters.meta
  87. 14
      com.unity.perception/Runtime/Randomization/Randomizers.meta
  88. 2
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/BackgroundObjectPlacementRandomizer.cs
  89. 5
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/NormalSampler.cs
  90. 5
      com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/UniformSampler.cs
  91. 4
      com.unity.perception/Runtime/Randomization/Scenarios/FixedLengthScenario.cs
  92. 14
      com.unity.perception/Runtime/Randomization/Scenarios/ScenarioBase.cs
  93. 12
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs
  94. 41
      com.unity.perception/Tests/Runtime/GroundTruthTests/LabelEntryMatchCacheTests.cs
  95. 71
      com.unity.perception/Tests/Runtime/GroundTruthTests/RenderedObjectInfoTests.cs
  96. 97
      com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs
  97. 8
      com.unity.perception/Tests/Runtime/GroundTruthTests/VisualizationTests.cs
  98. 14
      com.unity.perception/Tests/Runtime/Randomization/RandomizerTests.meta
  99. 15
      com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs
  100. 7
      com.unity.perception/package.json

32
.yamato/environments.yml


per_commit_editors:
- version: 2019.4.6f1
# - version: 2020.1.15f1
performance_editors:
- version: 2019.4.6f1
- version: 2020.1.3f1
# - version: 2020.1.3f1
# - version: 2020.1.15f1
# - version: 2020.2.0a21
publish_platforms:

standalone-platform: StandaloneOSX
- name: ubuntu
type: Unity::VM
image: package-ci/ubuntu:latest
image: package-ci/ubuntu:stable
flavor: b1.large

image: package-ci/ubuntu:latest
flavor: b1.large
performance_platforms:
- name: win
type: Unity::VM
image: package-ci/win10:stable
flavor: b1.large
extra-args: --force-d3d11
standalone-platform: StandaloneWindows64
- name: mac
type: Unity::VM::osx
image: package-ci/mac:stable
flavor: m1.mac
extra-args: --force-metal
standalone-platform: StandaloneOSX
- name: ubuntu
type: Unity::VM
image: package-ci/ubuntu:latest
flavor: b1.large
performance_suites:
- name: standalone
display_name: standalone
args: --suite=playmode --platform=
suites:
- name: standalone
display_name: standalone

2
.yamato/promote.yml


UPMCI_PROMOTION: 1
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils -g --registry {{ upmci_registry }}
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- upm-ci package test -u {{ editor.version }} --package-path ./com.unity.perception --type vetting-tests
artifacts:
logs:

39
.yamato/upm-ci-full.yml


flavor: b1.large
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- upm-ci package pack --package-path ./com.unity.perception/
artifacts:
packages:

flavor: {{ platform.flavor}}
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils -g --registry {{ upmci_registry }}
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- upm-ci package test -u {{ editor.version }} --package-path ./com.unity.perception --type vetting-tests
artifacts:
logs:

- .yamato/upm-ci-testprojects.yml#{{project.name}}_linux_editmode_{{editor.version}}
{% endfor %}
{% endfor %}
{% for editor in complete_editors %}
{% for project in projects %}
- .yamato/upm-ci-performance.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-performance.yml#{{project.name}}_linux_standalone_{{editor.version}}
{% endfor %}
{% endfor %}
all_tests_nightly_perf:
name: Nightly Performance Tests
agent:
type: Unity::VM
image: package-ci/win10:stable
flavor: b1.small
commands:
- dir
triggers:
recurring:
- branch: performance_testing
frequency: daily
cancel_old_ci: true
artifacts:
logs:
paths:
- "upm-ci~/test-results/**/*"
packages:
paths:
- "upm-ci~/packages/**/*"
dependencies:
{% for editor in performance_editors %}
{% for project in projects %}
- .yamato/upm-ci-performance.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-performance.yml#{{project.name}}_linux_standalone_{{editor.version}}
{% endfor %}
{% endfor %}

10
.yamato/upm-ci-testprojects.yml


- pip install unity-downloader-cli --index-url https://artifactory.prd.it.unity3d.com/artifactory/api/pypi/pypi/simple --upgrade
- unity-downloader-cli -u {{ editor.version }} -c editor -c StandaloneSupport-IL2CPP -c Linux --wait --published
{% if suite.name == "standalone" %}
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}StandaloneWindows64
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}StandaloneWindows64
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}
{% endif %}
artifacts:
logs:

name : {{project.name}} {{ suite.display_name }} tests ({{ editor.version }}, Linux)
agent:
type: Unity::VM::GPU
model: rtx2080
image: cds-ops/ubuntu-18.04-base:latest
flavor: b1.large
variables:

- pip config set global.index-url https://artifactory.prd.it.unity3d.com/artifactory/api/pypi/pypi/simple
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}StandaloneLinux64
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}StandaloneLinux64
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}
{% endif %}
artifacts:
logs:

2
TestProjects/PerceptionHDRP/Assets/IdLabelConfig.asset


id: 2
- label: Box
id: 3
- label: Terrain
id: 4
autoAssignIds: 1
startingLabelId: 1

355
TestProjects/PerceptionHDRP/Assets/Scenes/SampleScene.unity


m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
- {fileID: 2100000, guid: ddfd18df3e5ef3043b7889c5a070d8ca, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 411238276}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 155.99806, y: 83.926025, z: -149.97618}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_LocalPosition: {x: 121.42, y: 72.4, z: -161.73}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &464025704
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 464025709}
- component: {fileID: 464025708}
- component: {fileID: 464025707}
- component: {fileID: 464025706}
- component: {fileID: 464025705}
- component: {fileID: 464025710}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &464025705
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Cube
--- !u!65 &464025706
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &464025707
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &464025708
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &464025709
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 92.92311, y: 83.926025, z: -136.20119}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &464025710
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &705507993
GameObject:
m_ObjectHideFlags: 0

m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_BoundingSphereOverride: {x: 6.3e-43, y: 108.39679, z: 6.3e-43, w: 0}
m_BoundingSphereOverride: {x: 0, y: 1.1418e-41, z: 0, w: 0}
m_UseBoundingSphereOverride: 0
m_ShadowRadius: 0
m_ShadowAngle: 0

showAdditionalSettings: 0
m_AreaLightEmissiveMeshShadowCastingMode: 0
m_AreaLightEmissiveMeshMotionVectorGenerationMode: 0
--- !u!1 &934158981
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 934158987}
- component: {fileID: 934158986}
- component: {fileID: 934158985}
- component: {fileID: 934158984}
- component: {fileID: 934158983}
- component: {fileID: 934158982}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &934158982
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!114 &934158983
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Cube
--- !u!65 &934158984
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &934158985
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &934158986
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &934158987
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 107.21, y: 73.4, z: -144.12}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &963194225
GameObject:
m_ObjectHideFlags: 0

height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 60
field of view: 59.991566
orthographic: 0
orthographic size: 5
m_Depth: -1

m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.17179534, y: 0.30667058, z: -0.056378223, w: -0.93448436}
m_LocalPosition: {x: 198.01884, y: 126.545494, z: -267.4195}
m_LocalRotation: {x: -0.22882307, y: 0.124303445, z: -0.029468497, w: -0.96504945}
m_LocalPosition: {x: 116.52855, y: 91.11087, z: -194.85445}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children: []
m_Father: {fileID: 0}

- id: 1
- id: 2
- id: 3
showVisualizations: 0
showVisualizations: 1
references:
version: 1
00000000:

objectInfoMetricId: 5ba92024-b3b7-41a7-9d3f-c03a6a8ddd01
idLabelConfig: {fileID: 11400000, guid: 258de5b48703743468d34fc5bbdfa3aa,
type: 2}
--- !u!1 &1321518866
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1321518869}
- component: {fileID: 1321518868}
- component: {fileID: 1321518867}
- component: {fileID: 1321518870}
m_Layer: 0
m_Name: Terrain
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 4294967295
m_IsActive: 1
--- !u!154 &1321518867
TerrainCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Material: {fileID: 0}
m_Enabled: 1
m_TerrainData: {fileID: 15600000, guid: 15ded0116bd9f864f80b9813d4f4477f, type: 2}
m_EnableTreeColliders: 1
--- !u!218 &1321518868
Terrain:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Enabled: 1
serializedVersion: 6
m_TerrainData: {fileID: 15600000, guid: 15ded0116bd9f864f80b9813d4f4477f, type: 2}
m_TreeDistance: 5000
m_TreeBillboardDistance: 50
m_TreeCrossFadeLength: 5
m_TreeMaximumFullLODCount: 50
m_DetailObjectDistance: 80
m_DetailObjectDensity: 1
m_HeightmapPixelError: 5
m_SplatMapDistance: 1000
m_HeightmapMaximumLOD: 0
m_ShadowCastingMode: 2
m_DrawHeightmap: 1
m_DrawInstanced: 0
m_DrawTreesAndFoliage: 1
m_ReflectionProbeUsage: 1
m_MaterialTemplate: {fileID: 2100000, guid: 22ff8771d87ef27429e670136399094b, type: 2}
m_BakeLightProbesForTrees: 1
m_PreserveTreePrototypeLayers: 0
m_DeringLightProbesForTrees: 1
m_ScaleInLightmap: 0.0256
m_LightmapParameters: {fileID: 15203, guid: 0000000000000000f000000000000000, type: 0}
m_GroupingID: 0
m_RenderingLayerMask: 1
m_AllowAutoConnect: 1
--- !u!4 &1321518869
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_LocalRotation: {x: -0, y: -0.07853227, z: -0, w: 0.9969116}
m_LocalPosition: {x: -604.5, y: 65.1, z: -902.3}
m_LocalScale: {x: 0.95858, y: 0.95858, z: 0.95858}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: -9.008, z: 0}
--- !u!114 &1321518870
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Terrain
--- !u!1 &1640252278
GameObject:
m_ObjectHideFlags: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1640252278}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 96.1856, y: 83.926025, z: -193.83864}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_LocalPosition: {x: 95.88, y: 73.33, z: -165.54}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 2

4
TestProjects/PerceptionHDRP/Assets/SemanticSegmentationLabelingConfiguration.asset


m_Script: {fileID: 11500000, guid: a9d0176327854820ab53adc46cbec7c1, type: 3}
m_Name: SemanticSegmentationLabelingConfiguration
m_EditorClassIdentifier:
LabelEntries:
m_LabelEntries:
- label: Box
color: {r: 1, g: 0, b: 0, a: 1}
- label: Cube

- label: Terrain
color: {r: 0.8207547, g: 0, b: 0.6646676, a: 1}

11
TestProjects/PerceptionHDRP/Packages/packages-lock.json


"com.unity.nuget.newtonsoft-json": "1.1.2",
"com.unity.render-pipelines.core": "7.1.6",
"com.unity.entities": "0.8.0-preview.8",
"com.unity.simulation.client": "0.0.10-preview.9",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19"
}

},
"url": "https://packages.unity.com"
},
"com.unity.simulation.client": {
"version": "0.0.10-preview.9",
"depth": 1,
"source": "registry",
"dependencies": {},
"url": "https://packages.unity.com"
},
"com.unity.simulation.core": {
"version": "0.0.10-preview.19",
"depth": 0,

"depth": 0,
"source": "builtin",
"dependencies": {
"com.unity.modules.ui": "1.0.0"
"com.unity.modules.ui": "1.0.0",
"com.unity.modules.imgui": "1.0.0"
}
},
"com.unity.visualeffectgraph": {

4
TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.8f1
m_EditorVersionWithRevision: 2019.4.8f1 (60781d942082)
m_EditorVersion: 2019.4.12f1
m_EditorVersionWithRevision: 2019.4.12f1 (225e826a680e)

6
TestProjects/PerceptionURP/Assets/IdLabelConfig.asset


m_Name: IdLabelConfig
m_EditorClassIdentifier:
m_LabelEntries:
- label: Box
- label: Crate
- label: Crate
- label: Box
- label: Terrain
id: 4
autoAssignIds: 1
startingLabelId: 1

528
TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity


m_TrainingDataDestination: TrainingData
m_LightProbeSampleCountMultiplier: 4
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 1
m_UseShadowmask: 0
--- !u!196 &4
NavMeshSettings:
serializedVersion: 2

- component: {fileID: 411238278}
- component: {fileID: 411238277}
- component: {fileID: 411238282}
- component: {fileID: 411238283}
m_Layer: 0
m_Name: Crate
m_TagString: Untagged

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 411238276}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 155.99806, y: 83.926025, z: -149.97618}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_LocalPosition: {x: 121.42, y: 72.4, z: -161.73}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!114 &411238283
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 411238276}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 695e410829600ff40bcdd76fa0818f6a, type: 3}
m_Name:
m_EditorClassIdentifier:
materialPropertyTarget: 1
color: {r: 0.745283, g: 0.40428087, b: 0.40428087, a: 0}
--- !u!1 &464025704
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 464025709}
- component: {fileID: 464025708}
- component: {fileID: 464025707}
- component: {fileID: 464025706}
- component: {fileID: 464025705}
- component: {fileID: 464025710}
- component: {fileID: 464025711}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &464025705
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Cube
--- !u!65 &464025706
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &464025707
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &464025708
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &464025709
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 92.92311, y: 83.926025, z: -136.20119}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &464025710
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!114 &464025711
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 695e410829600ff40bcdd76fa0818f6a, type: 3}
m_Name:
m_EditorClassIdentifier:
materialPropertyTarget: 1
color: {r: 0.34109113, g: 0.42664438, b: 0.6886792, a: 0}
--- !u!1 &705507993
GameObject:
m_ObjectHideFlags: 0

m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_BoundingSphereOverride: {x: 4.7e-43, y: 0, z: 0, w: 0}
m_BoundingSphereOverride: {x: 0, y: 1.1418e-41, z: 0, w: 0}
m_UseBoundingSphereOverride: 0
m_ShadowRadius: 0
m_ShadowAngle: 0

m_Father: {fileID: 0}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0}
--- !u!1 &963194225
--- !u!1 &934158981
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}

m_Component:
- component: {fileID: 963194228}
- component: {fileID: 963194226}
- component: {fileID: 963194230}
- component: {fileID: 963194229}
- component: {fileID: 963194227}
- component: {fileID: 963194231}
- component: {fileID: 934158987}
- component: {fileID: 934158986}
- component: {fileID: 934158985}
- component: {fileID: 934158984}
- component: {fileID: 934158983}
- component: {fileID: 934158982}
m_Name: Main Camera
m_TagString: MainCamera
m_Name: Cube
m_TagString: Untagged
--- !u!81 &963194226
AudioListener:
--- !u!114 &934158982
MonoBehaviour:
m_GameObject: {fileID: 963194225}
m_GameObject: {fileID: 934158981}
--- !u!114 &963194227
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!114 &934158983
m_GameObject: {fileID: 963194225}
m_GameObject: {fileID: 934158981}
m_Script: {fileID: 11500000, guid: a79441f348de89743a2939f4d699eac1, type: 3}
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_RenderShadows: 1
m_RequiresDepthTextureOption: 2
m_RequiresOpaqueTextureOption: 2
m_CameraType: 0
m_Cameras: []
m_RendererIndex: -1
m_VolumeLayerMask:
serializedVersion: 2
m_Bits: 1
m_VolumeTrigger: {fileID: 0}
m_RenderPostProcessing: 0
m_Antialiasing: 0
m_AntialiasingQuality: 2
m_StopNaN: 0
m_Dithering: 0
m_ClearDepth: 1
m_RequiresDepthTexture: 0
m_RequiresColorTexture: 0
m_Version: 2
--- !u!4 &963194228
labels:
- Cube
--- !u!65 &934158984
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &934158985
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &934158986
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &934158987
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.17179534, y: 0.30667058, z: -0.056378223, w: -0.93448436}
m_LocalPosition: {x: 198.01884, y: 126.545494, z: -267.4195}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_GameObject: {fileID: 934158981}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 107.21, y: 72.77, z: -144.12}
m_LocalScale: {x: 15, y: 15, z: 15}
m_RootOrder: 4
m_RootOrder: 1
--- !u!20 &963194229
--- !u!1 &963194225
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 963194228}
- component: {fileID: 963194227}
- component: {fileID: 963194226}
- component: {fileID: 963194230}
- component: {fileID: 963194231}
m_Layer: 0
m_Name: Main Camera
m_TagString: MainCamera
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!81 &963194226
AudioListener:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_Enabled: 1
--- !u!20 &963194227
Camera:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}

height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 60
field of view: 59.991566
m_Depth: 0
m_Depth: -1
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295

m_TargetEye: 3
m_HDR: 1
m_AllowMSAA: 1
m_HDR: 0
m_AllowMSAA: 0
--- !u!4 &963194228
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.22882307, y: 0.124303445, z: -0.029468497, w: -0.96504945}
m_LocalPosition: {x: 116.52855, y: 91.11087, z: -194.85445}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &963194230
MonoBehaviour:
m_ObjectHideFlags: 0

- id: 1
- id: 2
- id: 3
- id: 4
type: {class: ObjectCountLabeler, ns: UnityEngine.Perception.GroundTruth, asm: Unity.Perception.Runtime}
data:
enabled: 1
objectCountMetricId: 51da3c27-369d-4929-aea6-d01614635ce2
m_LabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
00000001:
type: {class: BoundingBox2DLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:

type: 2}
00000002:
type: {class: RenderedObjectInfoLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
objectInfoMetricId: 5ba92024-b3b7-41a7-9d3f-c03a6a8ddd01
idLabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
00000003:
00000001:
type: {class: SemanticSegmentationLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:

m_TargetTextureOverride: {fileID: 0}
00000004:
type: {class: BoundingBox3DLabeler, ns: UnityEngine.Perception.GroundTruth,
00000002:
type: {class: ObjectCountLabeler, ns: UnityEngine.Perception.GroundTruth, asm: Unity.Perception.Runtime}
data:
enabled: 1
objectCountMetricId: 51da3c27-369d-4929-aea6-d01614635ce2
m_LabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
00000003:
type: {class: RenderedObjectInfoLabeler, ns: UnityEngine.Perception.GroundTruth,
annotationId: 0bfbe00d-00fa-4555-88d1-471b58449f5c
mode: 0
objectInfoMetricId: 5ba92024-b3b7-41a7-9d3f-c03a6a8ddd01
idLabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
--- !u!114 &963194231

m_GameObject: {fileID: 963194225}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 7c51d9f2c5784bb4aee3fdf021966e14, type: 3}
m_Script: {fileID: 11500000, guid: a79441f348de89743a2939f4d699eac1, type: 3}
targetLight: {fileID: 705507993}
target: {fileID: 1640252278}
m_RenderShadows: 1
m_RequiresDepthTextureOption: 2
m_RequiresOpaqueTextureOption: 2
m_CameraType: 0
m_Cameras: []
m_RendererIndex: -1
m_VolumeLayerMask:
serializedVersion: 2
m_Bits: 1
m_VolumeTrigger: {fileID: 0}
m_RenderPostProcessing: 0
m_Antialiasing: 0
m_AntialiasingQuality: 2
m_StopNaN: 0
m_Dithering: 0
m_ClearDepth: 1
m_RequiresDepthTexture: 0
m_RequiresColorTexture: 0
m_Version: 2
--- !u!1 &1640252278
GameObject:
m_ObjectHideFlags: 0

m_Component:
- component: {fileID: 1640252283}
- component: {fileID: 1640252282}
- component: {fileID: 1640252281}
- component: {fileID: 1640252280}
- component: {fileID: 1640252280}
m_Name: Box1234
m_Name: Box
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_EditorClassIdentifier:
labels:
- Box
--- !u!137 &1640252280
SkinnedMeshRenderer:
--- !u!65 &1640252280
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1640252278}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &1640252281
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 0
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:

m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
serializedVersion: 2
m_Quality: 0
m_UpdateWhenOffscreen: 0
m_SkinnedMotionVectors: 1
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
m_Bones: []
m_BlendShapeWeights: []
m_RootBone: {fileID: 0}
m_AABB:
m_Center: {x: 0, y: 0, z: 0}
m_Extent: {x: 0.5, y: 0.5, z: 0.5}
m_DirtyAABB: 0
--- !u!33 &1640252282
MeshFilter:
m_ObjectHideFlags: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1640252278}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 96.1856, y: 83.926025, z: -193.83864}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_LocalPosition: {x: 95.88, y: 73.33, z: -165.54}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 2

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &1800622449
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1800622452}
- component: {fileID: 1800622451}
- component: {fileID: 1800622450}
- component: {fileID: 1800622453}
m_Layer: 0
m_Name: Terrain
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 4294967295
m_IsActive: 1
--- !u!154 &1800622450
TerrainCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Material: {fileID: 0}
m_Enabled: 1
m_TerrainData: {fileID: 15600000, guid: 627ddb42b637b9148bc53c50bf82faff, type: 2}
m_EnableTreeColliders: 1
--- !u!218 &1800622451
Terrain:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Enabled: 1
serializedVersion: 6
m_TerrainData: {fileID: 15600000, guid: 627ddb42b637b9148bc53c50bf82faff, type: 2}
m_TreeDistance: 5000
m_TreeBillboardDistance: 50
m_TreeCrossFadeLength: 5
m_TreeMaximumFullLODCount: 50
m_DetailObjectDistance: 80
m_DetailObjectDensity: 1
m_HeightmapPixelError: 5
m_SplatMapDistance: 1000
m_HeightmapMaximumLOD: 0
m_ShadowCastingMode: 2
m_DrawHeightmap: 1
m_DrawInstanced: 0
m_DrawTreesAndFoliage: 1
m_ReflectionProbeUsage: 1
m_MaterialTemplate: {fileID: 2100000, guid: 594ea882c5a793440b60ff72d896021e, type: 2}
m_BakeLightProbesForTrees: 1
m_PreserveTreePrototypeLayers: 0
m_DeringLightProbesForTrees: 1
m_ScaleInLightmap: 0.0256
m_LightmapParameters: {fileID: 15203, guid: 0000000000000000f000000000000000, type: 0}
m_GroupingID: 0
m_RenderingLayerMask: 1
m_AllowAutoConnect: 1
--- !u!4 &1800622452
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: -385, y: 64.6, z: -673}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1800622453
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Terrain

6
TestProjects/PerceptionURP/Assets/SemanticSegmentationLabelingConfiguration.asset


- label: Box
color: {r: 1, g: 0, b: 0, a: 1}
- label: Cube
color: {r: 0, g: 1, b: 0.14507627, a: 1}
color: {r: 0, g: 0.024693727, b: 1, a: 1}
color: {r: 0, g: 0, b: 1, a: 1}
color: {r: 0, g: 1, b: 0.16973758, a: 1}
- label: Terrain
color: {r: 0.8207547, g: 0, b: 0.6646676, a: 1}

11
TestProjects/PerceptionURP/Packages/packages-lock.json


"com.unity.nuget.newtonsoft-json": "1.1.2",
"com.unity.render-pipelines.core": "7.1.6",
"com.unity.entities": "0.8.0-preview.8",
"com.unity.simulation.client": "0.0.10-preview.9",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19"
}

},
"url": "https://packages.unity.com"
},
"com.unity.simulation.client": {
"version": "0.0.10-preview.9",
"depth": 1,
"source": "registry",
"dependencies": {},
"url": "https://packages.unity.com"
},
"com.unity.simulation.core": {
"version": "0.0.10-preview.19",
"depth": 0,

"depth": 0,
"source": "builtin",
"dependencies": {
"com.unity.modules.ui": "1.0.0"
"com.unity.modules.ui": "1.0.0",
"com.unity.modules.imgui": "1.0.0"
}
},
"nuget.castle-core": {

4
TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.8f1
m_EditorVersionWithRevision: 2019.4.8f1 (60781d942082)
m_EditorVersion: 2019.4.12f1
m_EditorVersionWithRevision: 2019.4.12f1 (225e826a680e)

2
TestProjects/PerceptionURP/ProjectSettings/QualitySettings.asset


skinWeights: 2
textureQuality: 0
anisotropicTextures: 1
antiAliasing: 0
antiAliasing: 2
softParticles: 0
softVegetation: 1
realtimeReflectionProbes: 1

48
com.unity.perception/CHANGELOG.md


### Fixed
Fixed a bug in instance segmentation labeler that erroneously logged that object ID 255 was not supported
## [0.6.0-preview.1] - 2020-12-03
### Added
Added support for labeling Terrain objects. Trees and details are not labeled but will occlude other objects.
Added instance segmentation labeler.
Added support for full screen visual overlays and overlay manager.
All-new editor interface for the Labeling component and Label Configuration assets. The new UI improves upon various parts of the label specification and configuration workflow, making it more efficient and less error-prone to setup a new Perception project.
Added Assets->Perception menu for current and future asset preparation and validation tools. Currently contains one function which lets the user create prefabs out of multiple selected models with one click, removing the need for going through all models individually.
### Changed
Updated dependencies to com.unity.simulation.capture:0.0.10-preview.14, com.unity.simulation.core:0.0.10-preview.20, and com.unity.burst:1.3.9.
Changed InstanceSegmentationImageReadback event to provide a NativeArray\<Color32\> instead of NativeArray\<uint\>.
Expanded all Unity Simulation references from USim to Unity Simulation.
Uniform and Normal samplers now serialize their random seeds.
The ScenarioBase's GenerateIterativeRandomSeed() method has been renamed to GenerateRandomSeedFromIndex().
### Deprecated
### Removed
### Fixed
UnitySimulationScenario now correctly deserializes app-params before offsetting the current scenario iteration when executing on Unity Simulation.
Fixed Unity Simulation nodes generating one extra empty image before generating their share of the randomization scenario iterations.
Fixed enumeration in the CategoricalParameter.categories property.
The GenerateRandomSeedFromIndex method now correctly hashes the current scenario iteration into the random seed it generates.
Corrupted .meta files have been rebuilt and replaced.
The Randomizer list inspector UI now updates appropriately when a user clicks undo.
## [0.5.0-preview.1] - 2020-10-14
### Known Issues

5
com.unity.perception/Documentation~/GroundTruthLabeling.md


## Labeling component
The Labeling component associates a list of string-based labels with a GameObject and its descendants. A Labeling component on a descendant overrides its parent's labels.
### Limitations
Labeling is supported on MeshRenderers, SkinnedMeshRenderers, and partially supported on Terrains.
On terrains, the labels will be applied to the entire terrain. Trees and details can not be labeled. They will always render as black or zero in instance and segmentation images and will occlude other objects in ground truth.
## Label Config
Many labelers require a Label Config asset. This asset specifies a list of all labels to be captured in the dataset along with extra information used by the various labelers.

5
com.unity.perception/Documentation~/PerceptionCamera.md


The SemanticSegmentationLabeler generates a 2D RGB image with the attached Camera. Unity draws objects in the color you associate with the label in the SemanticSegmentationLabelingConfiguration. If Unity can't find a label for an object, it draws it in black.
### InstanceSegmentationLabeler
The instance segmentation labeler generates a 2D RGB image with the attached camera. Unity draws each instance of a labeled
object with a unique color.
### BoundingBox2DLabeler
![Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook](images/bounding_boxes.png)
<br/>_Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook_

5
com.unity.perception/Documentation~/Randomization/Scenarios.md


2. Make sure to include the [Serializable] attribute on a constant class. This will ensure that the constants can be manipulated from the Unity inspector.
3. By default, UnityEngine.Object class references cannot be serialized to JSON in a meaningful way. This includes Monobehaviors and SerializedObjects. For more information on what can and can't be serialized, take a look at the [Unity JsonUtility manual](https://docs.unity3d.com/ScriptReference/JsonUtility.html).
4. A scenario class's Serialize() and Deserialized() methods can be overriden to implement custom serialization strategies.
Follow the instructions below to generate a constants configuration file to modify your scenario constants in a built player:
1. Click the serialize constants button in the scenario's inspector window. This will generate a constants.json file and place it in the project's Assets/StreamingAssets folder.
2. Build your player. The new player will have a [ProjectName]_Data/StreamingAssets folder. A copy of the constants.json file previously constructed in the editor will be found in this folder.
3. Change the contents of the constants file. Any running player thereafter will utilize the newly authored constants values.

2
com.unity.perception/Documentation~/SetupSteps.md


This page provides brief instructions on installing the Perception package. Head over to the [Perception Tutorial](Tutorial/TUTORIAL.md) for more detailed instructions and steps for building a sample project.
1. Install the latest version of 2020.1.x Unity Editor from [here](https://unity3d.com/get-unity/download/archive). (Perception has not been tested on Unity versions newer than 2020.1)
1. Install the latest version of 2019.4.x or 2020.1.x Unity Editor from [here](https://unity3d.com/get-unity/download/archive). (Perception has not been tested on Unity versions newer than 2020.1)
1. Create a new HDRP or URP project, or open an existing project.
1. Open `Window` -> `Package Manager`
1. In the Package Manager window find and click the ***+*** button in the upper lefthand corner of the window

999
com.unity.perception/Documentation~/Tutorial/Images/camera_prep.png
文件差异内容过多而无法显示
查看文件

999
com.unity.perception/Documentation~/Tutorial/Images/exampleprefab.png
文件差异内容过多而无法显示
查看文件

999
com.unity.perception/Documentation~/Tutorial/Images/hierarchy.png
文件差异内容过多而无法显示
查看文件

898
com.unity.perception/Documentation~/Tutorial/Images/jupyter2.png

之前 之后
宽度: 2306  |  高度: 616  |  大小: 98 KiB

201
com.unity.perception/Documentation~/Tutorial/Phase1.md


- [Step 1: Download Unity Editor and Create a New Project](#step-1)
- [Step 2: Download the Perception Package and Import Samples](#step-2)
- [Step 3: Setup a Scene for Your Perception Simulation](#step-3)
- [Step 4: Specify Ground-Truth and Setup Object Labels](#step-4)
- [Step 5: Add and Set-up Randomizers](#step-5)
- [Step 6: Generate and Verify Synthetic Data](#step-6)
- [Step 4: Specify Ground-Truth and Set Up Object Labels](#step-4)
- [Step 5: Set Up Background Randomizers](#step-5)
- [Step 6: Set Up Foreground Randomizers](#step-6)
- [Step 7: Inspect Generated Synthetic Data](#step-7)
- [Step 8: Verify Data Using Dataset Insights](#step-8)
### <a name="step-1">Step 1: Download Unity Editor and Create a New Project</a>
* **Action**: Navigate to [this](https://unity3d.com/get-unity/download/archive) page to download and install the latest version of **Unity Editor 2019.4.x**. (The tutorial has not yet been fully tested on newer versions.)

During the installation of Unity, you will be asked to choose which modules you would like to include. This will depend on the types of applications you eventually intend to build with your Unity installation; however, for the purposes of this tutorial, we need to make make sure _**Linux Build Support**_ is checked. In addition, if you do not already have _**Visual Studio**_ on your computer, the wizard will give you an option to install it. Go ahead and check this option, as we will need _**Visual Studio**_ for writing some simple scripts in Phase 2 of the tutorial.
During the installation of Unity, you will be asked to choose which modules you would like to include. This will depend on the types of applications you eventually intend to build with your Unity installation; however, for the purposes of this tutorial, we need to make sure _**Linux Build Support (Mono)**_ is checked (the IL2CPP option may be selected by default, but for this tutorial, we will need the Mono option). In addition, if you do not already have _**Visual Studio**_ on your computer, the wizard will give you an option to install it. Go ahead and check this option, as we will need _**Visual Studio**_ for writing some simple scripts in Phase 2 of the tutorial.
* **Action**: Make sure the _**Linux Build Support**_ and _**Visual Studio**_ installation options are checked when selecting modules during installation.
* **Action**: Make sure the _**Linux Build Support (Mono)**_ and _**Visual Studio**_ installation options are checked when selecting modules during installation.
When you first run Unity, you will be asked to open an existing project, or create a new one.

As the name suggests, the _**Package Manager**_ is where you can download new packages, update or remove existing ones, and access a variety of information and additional actions for each package.
* **Action**: Click on the _**+**_ sign at the top-left corner of the _**Package Manager**_ window and then choose the option _**Add package frim git URL...**_.
* **Action**: Click on the _**+**_ sign at the top-left corner of the _**Package Manager**_ window and then choose the option _**Add package from git URL...**_.
* **Action**: Enter the address `com.unity.perception` and click _**Add**_.
**Note:** If you would like a specific version of the package, you can append the version to the end of the url. For example `com.unity.perception@0.1.0-preview.5`. For this tutorial, **we do not need to add a version**. You can also install the package from a local clone of the Perception repository. More information on installing local packages is available [here](https://docs.unity3d.com/Manual/upm-ui-local.html).

</p>
Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, randomizers, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those thatthe eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, randomizers, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those that the eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
* **Action**: In the _**Package Manager**_ window, from the list of _**Samples**_ for the Perception package, click on the _**Import into Project**_ button for the sample named _**Tutorial Files**_.

<img src="Images/project_folders_samples.png" width="600"/>
</p>
* **Action**: The _**Project**_ tab contains a search bar; use it to find the file named `ForwardRenderer.asset`, as shown below:
* **Action**: **(For URP projects only)** The _**Project**_ tab contains a search bar; use it to find the file named `ForwardRenderer.asset`, as shown below:
* **Action**: Click on the found file to select it. Then, from the _**Inspector**_ tab of the editor, click on the _**Add Renderer Feature**_ button, and select _**Ground Truth Renderer Feature**_ from the dropdown menu:
* **Action**: **(For URP projects only)** Click on the found file to select it. Then, from the _**Inspector**_ tab of the editor, click on the _**Add Renderer Feature**_ button, and select _**Ground Truth Renderer Feature**_ from the dropdown menu:
<p align="center">
<img src="Images/forward_renderer_inspector.png" width="400"/>

### <a name="step-3">Step 3: Setup a Scene for Your Perception Simulation</a>
Simply put, in Unity, Scenes contain any object that exists in the world. This world can be a game, or in this case, a perception-oriented simulation. Every new project contains a Scene named `SampleScene`, which is automatically openned when the project is created. This Scenes comes with several objects and settings that we do not need, so let's create a new one.
Simply put, in Unity, Scenes contain any object that exists in the world. This world can be a game, or in this case, a perception-oriented simulation. Every new project contains a Scene named `SampleScene`, which is automatically opened when the project is created. This Scene comes with several objects and settings that we do not need, so let's create a new one.
* **Action**: In the _**Project**_ tab, right-click on the `Assets/Scenes` folder and click _**Create -> Scene**_. Name this new Scene `TutorialScene` and double-click on it to open it.
* **Action**: In the _**Project**_ tab, right-click on the `Assets/Scenes` folder and click _**Create -> Scene**_. Name this new Scene `TutorialScene` and **double-click on it to open it**.
The _**Hierarchy**_ tab of the editor displays all the Scenes currently loaded, and all the objects currently present in each loaded Scene, as shown below:
<p align="center">

* **Action**: Click on `Directional Light` and in the _**Inspector**_ tab, set `Shadow Type` to `No Shadows`.
We will now add the necessary components to the camera in order to equip it for the perception workflow. To do this, we need to add a `PerceptionCamera` component to it, and then define which types of ground-truth we wish to generate using this camera.
We will now add the necessary components to the camera in order to equip it for the perception workflow. To do this, we need to add a `Perception Camera` component to it, and then define which types of ground-truth we wish to generate using this camera.
* **Action**: Select `Main Camera` again and in the _**Inspector**_ tab, click on the _**Add Component**_ button.
* **Action**: Start typing `Perception Camera` in the search bar that appears, until the `Perception Camera` script is found, with a **#** icon to the left:

</p>
* **Action**: Click on this script to add it as a component. Your camera is now a `Perception` camera.
**Note:** You may now see a warning regarding asynchronous shader compilation in the UI for the `Perception Camera` component. To fix this issue, from the top menu bar go to _**Edit -> Project Settings… -> Editor**_ and under _**Shader Compilation**_ settings, disable _**Asynchronous Shader Compilation**_.
Adding components is the standard way in which objects can have various kinds of logic and data attached to them in Unity. This includes objects placed within the Scene (called GameObjects), such as the camera above, or objects outside of a Scene, in your project folders (called Prefabs).

If you hover your mouse pointer over each of the fields shown (e.g. `Capture Interval`), you will see a tooltip popup with an explanation on what the item controls. You may see a warning at the bottom of this UI regarding asynchronous shader compilation. If so, follow the instructions in the warning message to disable this functionality and remove the warning.
As seen in the UI for `Perception Camera`, the list of `Camera Lebelers` is currently empty. For each type of ground-truth you wish to generate along-side your captured frames (e.g. 2D bounding boxes around objects), you will need to add a corresponding `Camera Labeler` to this list.
As seen in the UI for `Perception Camera`, the list of `Camera Labelers` is currently empty. For each type of ground-truth you wish to generate along-side your captured frames (e.g. 2D bounding boxes around objects), you will need to add a corresponding `Camera Labeler` to this list.
* **Action**: Click on the _**+**_ button at the bottom right corner of the empty labeler list, and select `BoundingBox2DLabeler`.
* **Action**: Click on the _**+**_ button at the bottom right corner of the empty labeler list and select `BoundingBox2DLabeler`.
* **Action**: Repeat the above step to add `ObjectCountLabeler`, `RenderedObjectInfoLabeler`, `SemanticSegmentationLabeler`.
Once you add the labelers, the _**Inspector**_ view of the `Perception Camera` component will look like this:

</p>
One of the useful features that comes with the `Perception Camera` component is the ability to display real-time visualizations of the labelers when your simulation is running. For instance, `BoundingBox2DLabeler` can display two-dimensional bounding boxes around the foreground objects that it tracks in real-time and `SemanticSegmentationLabeler` displays the semantic segmentation image overlaid on top of the camera's view . To enable this feature, make sure the `Show Labeler Visualizations` checkmark is enabled.
One of the useful features that comes with the `Perception Camera` component is the ability to display real-time visualizations of the labelers when your simulation is running. For instance, `BoundingBox2DLabeler` can display two-dimensional bounding boxes around the foreground objects that it tracks in real-time and `SemanticSegmentationLabeler` displays the semantic segmentation image overlaid on top of the camera's view. To enable this feature, make sure the `Show Labeler Visualizations` checkmark is enabled.
### <a name="step-4">Step 4: Specify Ground-Truth and Setup Object Labels</a>
### <a name="step-4">Step 4: Specify Ground-Truth and Set Up Object Labels</a>
It is now time to tell each labeler added to the `Perception Camera` which objects it should label in the generated dataset. For instance, if your workflow is intended for generating frames and ground-truth for detecting chairs, your labelers would need to know that they should look for objects labeled "chair" within the scene. The chairs should in turn also be labeled "chair" in order to make them visible to the labelers. We will now learn how to set-up these configuartions.
It is now time to tell each labeler added to the `Perception Camera` which objects it should label in the generated dataset. For instance, if your workflow is intended for generating frames and ground-truth for detecting chairs, your labelers would need to know that they should look for objects labeled "chair" within the scene. The chairs should in turn also be labeled "chair" in order to make them visible to the labelers. We will now learn how to set up these configurations.
You will notice each added labeler has a field named `Id Label Config`. By adding a label configuration here you can instruct the labeler to look for certain labels within the scene and ignore the rest. To do that, we should first create label configurations.
You will notice each added labeler has a `Label Config` field. By adding a label configuration here you can instruct the labeler to look for certain labels within the scene and ignore the rest. To do that, we should first create label configurations.
* **Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Id Label Config**_.

Then, click on this asset to bring up its _**Inspector**_ view. In there, you can specify the labels that this config will keep track of. A new label config like this one contains an empty list of labels.
Click on this asset to bring up its _**Inspector**_ view. In there, you can specify the labels that this config will keep track of. You can type in labels, add any labels defined in the project (through being added to prefabs), and import/export this label config as a JSON file. A new label config like this one contains an empty list of labels.
In this tutorial, we will generate synthetic data intended for detecting 10 everyday grocery items. These grocery items were imported into your project when you imported the tutorial files from the _**Package Manager**_, and are located in the folder `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
The label configuration we have created (`TutorialIdLabelConfig`) is of type `IdLabelConfig`, and is compatible with three of the four labelers we have attached to our `Perception Camera`. This type of label configuration carries a unique numerical ID for each label. However, `SemanticSegmentationLabeler` requires a different kind of label configuration which includes unique colors for each label instead of numerical IDs. This is because the output of this labeler is a set of images in which each visible foreground object is painted in a unique color.
* **Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Semantic Segmentation Label Config**_. Name this asset `TutorialSemanticSegmentationLabelConfig`.
Now that you have created your label configurations, we need to assign them to labelers that you previously added to your `Perception Camera` component.
* **Action**: Select the `Main Camera` object from the Scene _**Hierarchy**_, and in the _**Inspector**_ tab, assign the newly created `TutorialIdLabelConfig` to the first three labelers. To do so, you can either drag and drop the former into the corresponding fields for each labeler, or click on the small circular button in front of the `Id Label Config` field, which brings up an asset selection window filtered to only show compatible assets. Assign `TutorialSemanticSegmentationLabelConfig` to the fourth labeler. The `Perception Camera` component will now look like the image below:
In this tutorial, we will generate synthetic data intended for detecting 10 everyday grocery items. Thus, in this step, you will add labels for each of these 10 items to the list of labels for `TutorialIdLabelConfig`.
<p align="center">
<img src="Images/pclabelconfigsadded.png" width="400"/>
</p>
* **Action**: Select `TutorialIdLabelConfig` and in the _**Inspector**_ tab, click on the _**+**_ button to add 10 new label entries. Use the following exact names for these entries:
It is now time to assign labels to the objects that are supposed to be detected by an eventual object-detection model, and add those labels to both of the label configurations we have created. As mentioned above, these objects are located at `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
1 `candy_minipralines_lindt`
In Unity, Prefabs are essentially reusable GameObjects that are stored to disk, along with all their child GameObjects, components, and property values. Let's see what our sample prefabs include.
2 `cereal_cheerios_honeynut`
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`
* **Action**: Double click the file named `drink_whippingcream_lucerne.prefab` to open the Prefab asset.
3 `cleaning_snuggle_henkel`
When you open the Prefab asset, you will see the object shown in the Scene tab and its components shown on the right side of the editor, in the _**Inspector**_ tab:
4 `craft_yarn_caron`
<p align="center">
<img src="Images/exampleprefab.png"/>
</p>
5 `drink_greentea_itoen`
The Prefab contains a number of components, including a `Transform`, a `Mesh Filter`, a `Mesh Renderer` and a `Labeling` component (highlighted in the image above). While the first three of these are common Unity components, the fourth one is specific to the Perception package, and is used for assigning labels to objects. You can see here that the Prefab has one label already added, displayed in the list of `Added Labels`. The UI here provides a multitude of ways for you to assign labels to the object. You can either choose to have the asset automatically labeled (by enabling `Use Automatic Labeling`), or add labels manually. In case of automatic labeling, you can choose from a number of labeling schemes, e.g. the asset's name or folder name. If you go the manual route, you can type in labels, add labels from any of the label configurations included in the project, or add from lists of suggested labels based on the Prefab's name and path.
6 `drink_whippingcream_lucerne`
Note that each object can have multiple labels assigned, and thus appear as different objects to labelers with different label configurations. For instance, you may want your semantic segmentation labeler to detect all cream cartons as `dairy_product`, while your bounding box labeler still distinguishes between different types of dairy product. To achieve this, you can add a `dairy_product` label to all your dairy products, and then in your label configuration for semantic segmentation, only add the `dairy_product` label, and not any specific products or brand names.
7 `lotion_essentially_nivea`
For this tutorial, we have already added the `Labeling` component to all the foreground Prefabs; however, if you are making your own Prefabs, you can easily add a `Labeling` component to them using the _**Add Component**_ button in the screenshot above.
8 `pasta_lasagne_barilla`
**Note:** If you would like to start from `.fbx` models, the Perception package lets you quickly create Prefabs from multiple models. Just select all your models and from the top menu bar select _**Assets -> Perception -> Create Prefabs from Selected Models**_. The newly created Prefabs will be placed in the same folders as their corresponding models.
9 `snack_biscotti_ghiott`
Even though the sample Prefabs already have a label manually added, to learn more about how to use the Labeling component, we will now use automatic labeling to label all our foreground objects. This will overwrite their manually added labels.
10 `snack_granolabar_naturevalley`
Once done, the _**Inspector**_ window for `TutorialIdLabelConfig` will look like this:
* **Action**: Select **all the files** inside the `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs` folder.
* **Action**: From the _**Inspector**_ tab, enable `Use Automatic Labeling for All Selected Items`, and then select `Use asset name` as the labeling scheme.
<img src="Images/idlabelconfig.png" width="400"/>
<img src="Images/autolabel.png" width="400"/>
These are the names of the 10 grocery items that we will work with in this tutorial. Wonder were the actual objects are? They were imported into your project when you imported the tutorial files from the _**Package Manager**_, and are located in the folder `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs` .
This will assign each of the selected Prefabs its own name as a label.
Notice that each of the labels you entered automatically has a numerical ID assigned. These ids are required in order to use the generated data in machine learning models, which typically require numerical ids for classification of objects.
* **Action**: Click _**Add Automatic Labels of All Selected Assets to Config...**_.
The label configuration we have created is compatible with three of the four labelers we plan to attach to our `Perception Camera`. However, `SemanticSegmentationLabeler` requires a different kind of label configuration which includes unique colors for each label instead of numerical IDs. This is because the output of this labeler are images in which each visibile foreground object is painted in a unique color.
In the window that opens, you can add all the automatic labels you just added to your Prefabs, to the label configurations you created earlier. At the top, there is a list of all the labels you are about to add, and below that, a list of all label configurations currently present in the project.
* **Action**: In the _**Project**_ tab, right-click the `Assets` folder, then click _**Create -> Perception -> Semantic Segmentation Label Config**_. Name this asset `TutorialSemanticSegmentationLabelConfig`.
* **Action**: Add the same 10 labels from the above list to this new label configuration. Note how this time they each get a new unique color instead of a number:
* **Action**: Add the list of labels to `TutorialIdLabelConfig` and `TutorialSemanticSegmentationLabelConfig` by clicking the _**Add All Labels**_ button for both.
<img src="Images/semseglabelconfig.png" width="400"/>
<img src="Images/addtoconfigwindow.png" width="500"/>
Now that you have created your label configurations, we need to assign them to labelers that you previously added to your `Perception Camera` component.
* **Action**: Select the `Main Camera` object from the Scene _**Hierarchy**_, and in the _**Inspector**_ tab, assign the newly created `TutorialIdLabelConfig` to the first three labelers. To do so, you can either drag and drop the former into the corresponding fields for each labeler, or click on the small circular button in front of the `Id Label Config` field, which brings up an asset selection window filtered to only show compatible assets. Assign `TutorialSemanticSegmentationLabelConfig` to the fourth labeler. The `Perception Camera` component will now look like the image below:
Here, you can also open either of the configurations by clicking the _**Open**_ buttons. Open both configurations to make sure the list of labels has been added to them. They should now look similar to the screenshots below:
<img src="Images/pclabelconfigsadded.png" width="400"/>
<img src="Images/labelconfigs.png" width="800"/>
The final piece of the label set-up workflow is to assign the same 10 labels to the objects that are supposed to be detected by an eventual object-detection model. As mentioned above, these are located at `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/ Foreground Objects/Phase 1/Prefabs`.
**Note:** Since we used automatic labels here and added them to our configurations, we are confident that the labels in the configurations match the labels of our objects. In cases where you decide to add manual labels to objects and configurations, make sure you use the exact same labels, otherwise, the objects for which a matching label is not found in your configurations will not be detected by the labelers that are using those configurations.
In Unity, Prefabs are essentially reusable GameObjects that are stored to disk, along with all their child GameObjects, components, and property values. Let's see what our sample prefabs include.
Now that we have labelled all our foreground objects and setup our label configurations, let's briefly test things.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/ Foreground Objects/Phase 1/Prefbas`
* **Action**: Double click the file named `drink_whippingcream_lucerne.prefab` to open the Prefab asset.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **Action**: Drag and drop any of the Prefabs inside this folder into the Scene.
* **Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
When you open the Prefab asset, you will see the object shown in the Scene tab and its components shown on the right side of the editor, in the _**Inspector**_ tab:
Since we have visualizations enabled on our `Perception Camera`, you should now see a bounding box being drawn around the object you put in the scene, and the object itself being colored according to its label's color in `TutorialSemanticSegmentationLabelConfig`, similar to the image below:
<img src="Images/exampleprefab.png"/>
<img src="Images/one_object_run.png" width="600"/>
The Prefab contains a number of components, including a `Transform`, a `Mesh Filter`, a `Mesh Renderer` and a `Labeling` component (highlighted in the image above). While the first three of these are common Unity components, the fourth one is specific to the Perception package, and is used for assigning labels to objects. You can see here that the cream carton is already labeled `drink_whippingcream_lucerner`. This is true for all the foreground objects supplied in the sample tutorial files in order to save time, which means you do not need to perform any additonal steps to label your foreground objects. However, adding a label to a prefab would be as simple as clicking _**Add Component**_ and adding the `Labeling` script, then typing the label in.
Note that each object can have multiple labels assigned, and thus appear as different objects to labelers with different label configurations. For instance, you may want your semantic segmentation labeler to detect all cream cartons as `dairy_product`, while your bounding box labeler still distinguishes between different types of dairy product. To achieve this, you can add a `dairy_product` label to all your dairy products, and then in your label configuration for semantic segmentation, only add the `dairy_product` label, and not any specific products or brand names. To add an additional label to the cream carton, you can click on the _**+**_ button to the bottom right corner of the label list, in the `Labeling` component.
### <a name="step-5">Step 5: Add and Set-up Randomizers</a>
### <a name="step-5">Step 5: Set Up Background Randomizers</a>
To start randomizing your simulation you will first need to add a `Scenario` to your scene. Scenarios control the execution flow of your simulation by coordinating all `Randomizer` components added to them. The Perception package comes with a useful set of Randomizers that let you quickly place your foreground objects in the Scene, generate varied backgrounds, as well as randomize various parameters of the simulation over time, including things such as positon, scale, and rotation of objects, number of objects within the camera's view, and so on. Randomizers achieve this through coordinating a number of `Parameter`s, which essentially define the most granular randomization behaviors. For instance, for continuous variable types such as floats, vectors, and colors, Parameters can define the range, sampling distribution, and seed for randomization. This is while another class of Paramters let you randomly select one out of a number of categorical options.
To start randomizing your simulation you will first need to add a `Scenario` to your scene. Scenarios control the execution flow of your simulation by coordinating all `Randomizer` components added to them. The Perception package comes with a useful set of Randomizers that let you quickly place your foreground objects in the Scene, generate varied backgrounds, as well as randomize various parameters of the simulation over time, including things such as position, scale, and rotation of objects, number of objects within the camera's view, and so on. Randomizers achieve this through coordinating a number of `Parameter`s, which essentially define the most granular randomization behaviors. For instance, for continuous variable types such as floats, vectors, and colors, Parameters can define the range, sampling distribution, and seed for randomization. This is while another class of Parameters let you randomly select one out of a number of categorical options.
To summarize, a sample `Scenario` could look like this:

* **Action**: Rename your new GameObject to `Simulation Scenario`.
* **Action**: In the _**Inspector**_ view of this new object, add a new `Fixed Length Scenario` component.
Each `Scenario` executes a number of `Iteration`s, and each Iteration carries on for a number of frames. These are timing elements you can leverage in order to customize your Scenarios and the timing of your randomizations. You will learn how to use Iteartions and frames in Phase 2 of this tutorial. For now, we will use the `Fixed Length Scenario`, which is a special kind of Scenario that runs for a fixed number of frames during each Iteration, and is sufficient for many common use-cases. Note that at any given time, you can have only one Scenario active in your Scene.
Each `Scenario` executes a number of `Iteration`s, and each Iteration carries on for a number of frames. These are timing elements you can leverage in order to customize your Scenarios and the timing of your randomizations. You will learn how to use Iterations and frames in Phase 2 of this tutorial. For now, we will use the `Fixed Length Scenario`, which is a special kind of Scenario that runs for a fixed number of frames during each Iteration, and is sufficient for many common use-cases. Note that at any given time, you can have only one Scenario active in your Scene.
The _**Inspector**_ view of `Fixed Length Scenario` looks like below:

There are a number settings and properties you can modify here. `Quit On Complete` instructs the simulation to quit once this Scenario has completed executing. We can see here that the Scenario has been set to run for 100 Iterations, and that each Iteration will run for one frame. But this is currently an empty `Scneario`, so let's add some Randomizers.
There are a number of settings and properties you can modify here. `Quit On Complete` instructs the simulation to quit once this Scenario has completed executing. We can see here that the Scenario has been set to run for 100 Iterations, and that each Iteration will run for one frame. But this is currently an empty `Scenario`, so let's add some Randomizers.
* **Action**: Click _**Add Folder**_, and from the file explorer window that opnes, choose the folder `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **Action**: Click _**Add Folder**_, and from the file explorer window that opens, choose the folder `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
The beckground Prefabs are primitve shapes devoid of color or texture. Later Randomizers will take care of those aspects.
The background Prefabs are primitive shapes devoid of color or texture. Later Randomizers will take care of those aspects.
* **Action**: Set the rest of the properties (except for `Seed`) according to the image below. The `Seed` attribute is the seed used for the underlying random sampler, and does not need to match the image shown.
* **Action**: Set the rest of the properties (except for `Seed`) according to the image below. That is, `Depth = 0, Layer Count = 2, Separation Distance = 0.5, Placement Area = (6,6)`. The `Seed` attribute is the seed used for the underlying random sampler and does not need to match the image shown.
* **Action**: Click on the **▷** (play) button located at top middle section of the editor to run your simulation.
* **Action**: Click on the **▷** (play) button located at the top middle section of the editor to run your simulation.
<p align="center">
<img src="Images/play.png" width = "500"/>

To generate data as fast as possible, the simulation utilizes asynchronous processing to churn through frames quickly, rearranging and randomizing the objects in each frame. To be able to check out individual frames and inspect the real-time visualizations, click on the pause button (next to play). You can also switch back to the Scene view to be able to inspect each object individually. For performance reasons, it is recommended to disable visualizations altogether (from the _**Inspector**_ view of `Perception Camera`) once you are ready to generate a large dataset.
As seen in the image above, what we have now is just a beige-colored wall of shapes. This is because so far we are only spawning them, and the beige color of our light is what gives them their current look. To make this background more useful, let's add a couple more `Randomizers`.
As seen in the image above, what we have now is just a beige-colored wall of shapes. This is because so far, we are only spawning them, and the beige color of our light is what gives them their current look. To make this background more useful, let's add a couple more `Randomizers`.
**Note:** If at this point you don't see any objects being displayed, make sure the Separation Distance for `BackgroundObjectPlacementRandomizer` is (6,6) and not (0,0).
**Note:** If your _**Game**_ tab has a different field of view than the one shown here, change the aspect ratio of your _**Game**_ tab to `4:3`, as shown below:
<p align="center">
<img src="Images/game_aspect.png" width = "400"/>
</p>
`TextureRandomizer` will have the task of attaching random textures to our colorless background objects at each Iteration of the Scenario. Simlarly, `HueOffsetRandomizer` will alter the color of the objects, and `RotationRandomizer` will give the objects a new random rotation each Iteration.
`TextureRandomizer` will have the task of attaching random textures to our colorless background objects at each Iteration of the Scenario. Similarly, `HueOffsetRandomizer` will alter the color of the objects, and `RotationRandomizer` will give the objects a new random rotation each Iteration.
* **Action**: In the UI snippet for `TextureRandomizer`, click _**Add Folder**_ and choose `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Background Textures`.
* **Action**: In the UI snippet for `TextureRandomizer`, click _**Add Folder**_ and choose `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Textures`.
* **Action**: In the UI snippet for `RotationRandomizer`, change all the maximum values for the three ranges to `360` and leave the minimums at `0`.
* **Action**: In the UI snippet for `RotationRandomizer`, verify that all the minimum values for the three ranges are `0` and that maximum values are `360`.
Your list of Randomizers should now look like the screenshot below:

To make sure each Randomizer knows which objects it should work with, we will use an object tagging and querying workflow that the bundled Randomizers already use. Each Randomizer can query the Scene for objects that carry certain types of `RandomizerTag` components. For instance, the `TextureRandomizer` queries the Scene for objects that have a `TextureRandomizerTag` component (you can change this in code!). Therefore, in order to make sure our background Prefabs are affected by the `TextureRandomizer` we need to make sure they have `TextureRandomizerTag` attached to them.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **Action**: In the _**Project**_ tab, navigate to `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Background Objects/Prefabs`.
* **Action**: Select all the files inside and from the _**Inspector**_ tab add a `TextureRandomizerTag` to them. This will add the component to all the selected files.
* **Action**: Repeat the above step to add `HueOffsetRandomizerTag` and `RotationRandomizerTag` to all selected Prefabs.

<img src="Images/background_good.png" width = "700"/>
</p>
It is now time to spawn and randomize our foregournd objects. We are getting close to generating our first set of synthetic data!
### <a name="step-6">Step 6: Set Up Foreground Randomizers</a>
* **Action**: Add `ForegroundObjectPlacementRandomizer` to your list of Randomizers. Click _**Add Folder**_ and select `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **Action**: Set these values for the above Randomizer: `Depth = 3, Separation Distance = 1.5, Placement Area = (5,5)`.
It is now time to spawn and randomize our foreground objects.
* **Action**: Add `ForegroundObjectPlacementRandomizer` to your list of Randomizers. Click _**Add Folder**_ and select `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`.
* **Action**: Set these values for the above Randomizer: `Depth = -3, Separation Distance = 1.5, Placement Area = (5,5)`.
This Randomizer uses the same algorithm as the one we used for backgrounds; however, it is defined in a separate C# class because you can only have **one of each type of Randomizer added to your Scenario**. Therefore, this is our way of differentating between how background and foreground objects are treated.
This Randomizer uses the same algorithm as the one we used for backgrounds; however, it is defined in a separate C# class because you can only have **one of each type of Randomizer added to your Scenario**. Therefore, this is our way of differentiating between how background and foreground objects are treated.
* **Action**: From the _**Project**_ tab select all the foreground Prefabs located in `Assets/Samples/Perception/0.5.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`, and add a `RotationRandomizerTag` component to them.
* **Action**: From the _**Project**_ tab select all the foreground Prefabs located in `Assets/Samples/Perception/0.6.0-preview.1/Tutorial Files/Foreground Objects/Phase 1/Prefabs`, and add a `RotationRandomizerTag` component to them.
The last step here is to make sure the order of randomizations is correct. Randomizers execute according to their order within the list of Randomizers added to your Scenario. If you look at the list now, you will notice that `ForegroundObjectPlacementRandomizer` is coming after `RotationRandomizer`, therefore, foreground objects will NOT be included in the rotation randomizations, even though they are carrying the proper tag. To fix that:
Randomizers execute according to their order within the list of Randomizers added to your Scenario. If you look at the list now, you will notice that `ForegroundObjectPlacementRandomizer` is coming after `RotationRandomizer`, therefore, foreground objects will NOT be included in the rotation randomizations, even though they are carrying the proper RandomizerTag. To fix that:
* **Action**: Drag `ForegroundObjectPlacementRandomizer` and drop it above `RotationRandomizer`.
* **Action**: Drag `ForegroundObjectPlacementRandomizer` using the striped handle bar (on its left side) and drop it above `RotationRandomizer`.
### <a name="step-6">Step 6: Generate and Verify Synthetic Data</a>
Your full list of Randomizers should now look like the screenshot below:
You are now ready to generate your first dataset. Our current set-up will produce 100 frames of annotated captures.
<p align="center">
<img src="Images/randomizers_all.png" width = "400"/>
</p>
You are now ready to generate your first dataset. Our current setup will produce 100 frames of annotated captures.
While the simulation is running, your _**Game**_ view will quickly generate frames similar to the gif below (visualization for `SemanticSegmentationLabeler` is disabled here):
While the simulation is running, your _**Game**_ view will quickly generate frames similar to the gif below (note: visualization for `SemanticSegmentationLabeler` is disabled here):
### <a name="step-7">Step 7: Inspect Generated Synthetic Data</a>
Once the run is complete, you will see a message in the _**Console**_ tab of the editor, with information on where the generated data has been saved. An example is shown below (Mac OS):

The output dataset includes a variety of information about different aspects of the active sensors in the Scene (currently only one), as well as the ground-truth generated by all active labelers. [This page](https://github.com/Unity-Technologies/com.unity.perception/blob/master/com.unity.perception/Documentation%7E/Schema/Synthetic_Dataset_Schema.md) provides a comprehensive explanation on the schema of this dataset. We strongly recommend having a look at the page once you have completed this tutorial.
* **Action**: To get a quick feel of how the data is stored, open the folder whose name starts with `Dataset`, then open the file named `captures_000.json`. This file contains the output from `BoundingBox2DLabeler`. The `captures` array contains the position and rotation of the sensor (camera), the position and rotation of the ego (sensor group, currently only one), and the annotations made by `BoundingBox2DLabeler` for all visible objects defined in its label configuration. For each visibile object, the annotations include:
* **Action**: To get a quick feel of how the data is stored, open the folder whose name starts with `Dataset`, then open the file named `captures_000.json`. This file contains the output from `BoundingBox2DLabeler`. The `captures` array contains the position and rotation of the sensor (camera), the position and rotation of the ego (sensor group, currently only one), and the annotations made by `BoundingBox2DLabeler` for all visible objects defined in its label configuration. For each visible object, the annotations include:
* `label_id`: The numerical id assigned to this object's label in the labeler's label configuration
* `label_name`: The object's label, e.g. `candy_minipralines_lindt`
* `instance_id`: Unique instance id of the object

* **Action**: Review the JSON meta-data and the images captured for the first annotated frame, and verify that the objects within them match.
### <a name="step-8">Step 8: Verify Data Using Dataset Insights</a>
`docker run -p 8888:8888 -v <path to synthetic data>:/data -t unitytechnologies/datasetinsights:latest`, where the path to data is what we earlier found in Unity's console messages.
`docker run -p 8888:8888 -v "<path to synthetic data>:/data" -t unitytechnologies/datasetinsights:latest`, where the path to data is what we earlier found in Unity's console messages.
This will download a Docker image from Unity. If you get an error regarding the path to your dataset, make sure you have not included the enclosing `<` and `>` in the path and that the spaces are properly escaped.

* **Action**: To make sure your data is properly mounted, navigate to the `data` folder. If you see the dataset's folders there, we are good to go.
* **Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.
* **Action**: Once in the notebook, replace the `<GUID>` in the `data_root = /data/<GUID>` line with the name of the dataset folder inside your generated data. For example `data_root = /data/Dataseta26351bc-1b72-46c5-9e0c-d7afd6df2974`.
* **Action**: Once in the notebook, remove the `/<GUID>` part of the `data_root = /data/<GUID>` path. Since the dataset root is already mapped to `/data`, you can use this path directly.
<p align="center">
<img src="Images/jupyter2.png"/>

Each of the code blocks in this notebook can be executed by clicking on them to select them, and the clicking the _**Run**_ button at the top of the notebook. When you run a code block, an **asterisk (\*)** will be shown next to it on the left side, until the code finishes executing.
Each of the code blocks in this notebook can be executed by clicking on them to select them, and then clicking the _**Run**_ button at the top of the notebook. When you run a code block, an **asterisk (\*)** will be shown next to it on the left side, until the code finishes executing.
Below, you can see a sample plot generated by the Dataset Insights notebook, depicting the number of times each of the 10 foreground objects appeared in the dataset. As shown in the histogram, there is a high level of uniformity between the labels, which is a desirable outcome.

* **Action**: Follow the instructions laid out in the notebook and run each code block to view its outputs.
This concludes Phase 1 of the Perception tutoial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer. [Click here to continue to Phase 2: Custom Randomizations](Phase2.md)
This concludes Phase 1 of the Perception tutorial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer. [Click here to continue to Phase 2: Custom Randomizations](Phase2.md)

34
com.unity.perception/Documentation~/Tutorial/Phase2.md


Steps included this phase of the tutorial:
- [Step 1: Build a Lighting Randomizer](#step-1)
- [Step 2: Bundle Data and Logic Inside Randomization Tags](#step-2)
- [Step 2: Bundle Data and Logic Inside RandomizerTags](#step-2)
### <a name="step-1">Step 1: Build a Lighting Randomizer</a>

* **Action**: Create another script and name it `MyLightRandomizerTag.cs`.
* **Action**: Double-click `MyLightRandomizer.cs` to open it in _**Visual Studio**_.
Note that while _**Visual Studio**_ is the default option, you can choose any text editor of your choice. You can change the this setting in _**Preferences -> External Tools -> External Script Editor**_.
Note that while _**Visual Studio**_ is the default option, you can choose any text editor of your choice. You can change this setting in _**Preferences -> External Tools -> External Script Editor**_.
* **Action**: Remove the contents of the class and copy/paste the code below:

The purpose of this piece of code is to obtain a random float parameter and assign it to the light's `Intensity` field on the start of every Iteration. Let's go through the code above and understand each part. The `FloatParameter` field makes it possible for us to define a randomized float parameter and modify its properties from the editor UI, similar to how we already modified the properties for the previous Randomizers we used.
**Note:** If you look at the _**Console**_ tab of the editor now, you will see an error regarding `MyLightRandomizerTag` not being found. This is to be expected, since we have not yet created this class; the error will go away once we create the class later.
You will notice the the Randomizer's UI snippet contains one Parameter named `Light Intensity Parameter`. This is the same Parameter we added in the code block above. Here, you can set the sampling distribution (`Value`), `Seed`, and `Range` for this float Parameter:
You will notice that the Randomizer's UI snippet contains one Parameter named `Light Intensity Parameter`. This is the same Parameter we added in the code block above. Here, you can set the sampling distribution (`Value`), `Seed`, and `Range` for this float Parameter:
<p align="center">
<img src="Images/light_rand_1.png" width="420"/>

This range of intensities is arbitrary but will give us a typically nice lighting without excessive darkness or burnt-out highlights.
The `MyLightRandomizer` class extends `Randomizer`, which is the base class for all Randomizers that can be added to a Scenario. This base class provides a plethora of useful functions and properties that can help catalyze the process of creating new Randomziers.
The `MyLightRandomizer` class extends `Randomizer`, which is the base class for all Randomizers that can be added to a Scenario. This base class provides a plethora of useful functions and properties that can help catalyze the process of creating new Randomizers.
The `OnIterationStart()` function is used for telling the Randomizer what actions to perform at the start of each Iteration of the Scenario. As seen in the code block, at the start of each Iteration, this class queries the `tagManager` object for all objects that carry the `MyLightRandomizerTag` component. Then, for each object inside the queried list, it first retrieves the `Light` component, and then sets its intensity to a new random float sampled from `lightIntensityParamter`.
The `OnIterationStart()` function is used for telling the Randomizer what actions to perform at the start of each Iteration of the Scenario. As seen in the code block, at the start of each Iteration, this class queries the `tagManager` object for all objects that carry the `MyLightRandomizerTag` component. Then, for each object inside the queried list, it first retrieves the `Light` component, and then sets its intensity to a new random float sampled from `lightIntensityParameter`.
* **Action**: Open `MyLightRandomizerTag.cs` and replace its contents with the code below:

Yes, a RandomizerTag can be this simple if you just need it for helping Randomizers query for target objects. Later, you will learn how to add code here to encapsulate more data and logic within the randomized objects.
Notice there is a `RequireComponent(typeof(Light))` line at the top. This line makes it so that you can only add the `MyLightRandomizerTag` component to an object that already has a `Light` component attached. This way, the Randomizers that query for this tag can be confident that the found objects have a `Light` component, and can thus be Randomized.
Notice there is a `RequireComponent(typeof(Light))` line at the top. This line makes it so that you can only add the `MyLightRandomizerTag` component to an object that already has a `Light` component attached. This way, the Randomizers that query for this tag can be confident that the found objects have a `Light` component and can thus be Randomized.
* **Action**: Select `Directional Light` in the Scene's _**Hierarchy**_, and in the _**Inspector**_ tab, add a `Light Randomizer Tag` component.
* **Action**: Select `Directional Light` in the Scene's _**Hierarchy**_, and in the _**Inspector**_ tab, add a `My Light Randomizer Tag` component.
* **Action**: Run the simulation again and inspect how `Directional Light` now switches between different intensities. You can pause the simulation and then use the step button (to the right of the pause button) to move the simulation one frame forward and clearly see the varying light intensity
Let's now add more variation to our light by randomizing its color as well.

}
```
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So let's increase the minimum a bit to avoid such a scenario..
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So, let's increase the minimum a bit to avoid such a scenario.
Each value should also already have a unique `Seed` specified. This is the seed which the sampler will use to produce a random value from the specifed distribution. If two random parameters have the same seed, range, and distribution, they will always have the same value. In the case of this color, this would lead to the red, green, and blue components having equal values, and thus the produced color always being a shade of grey. As such, in order to get varied colors and not just grey, we need to make sure the seed values are different for our red, green, and blue components.
Each value should also already have a unique `Seed` specified. This is the seed which the sampler will use to produce a random value from the specified distribution. If two random parameters have the same seed, range, and distribution, they will always have the same value. In the case of this color, this would lead to the red, green, and blue components having equal values, and thus the produced color always being a shade of grey. As such, in order to get varied colors and not just grey, we need to make sure the seed values are different for our red, green, and blue components.
* **Action**: In the UI snippet for `MyLightRandomizer`, make sure the red, green, and blue components have different `Seed` values. Set the distribution and value for Alpha to `Constant` and 1, as we do not want to randomize the alpha component of the color.

* **Action**: Run the simulation for a few frames to observe the lighting color changing on each iteration.
### <a name="step-2">Step 2: Bundle Data and Logic Inside Randomization Tags</a>
### <a name="step-2">Step 2: Bundle Data and Logic Inside RandomizerTags</a>
There are also cases were you may need to include certain logic within your object in order to make the Randomizer code more reusable and easy to maintain. For instance, you may want to build an office chair Prefab to use in various simulations. This chair is likely to support a range of customizations for its various parts (back angle, seat angle, seat height, etc.). Instead of directly mapping a Rotation Parameter from a Randomizer to the rotation of the back angle object within the chair, it might be more convenient to have the chair expose the range of possible angles in the form of a simple float between 0 and 1. With this approach, the Randomizer would only need to sample a float Parameter and assign it to the chair. The chair would in turn have a script attached that knows how to map this single float to a certain plausible back angle. You could even map this float to a more complex state of the chair. Your Randomizer would still only need one float Parameter.
There are also cases where you may need to include certain logic within your object in order to make the Randomizer code more reusable and easier to maintain. For instance, you may want to build an office chair Prefab to use in various simulations. This chair is likely to support a range of customizations for its various parts (back angle, seat angle, seat height, etc.). Instead of directly mapping a Rotation Parameter from a Randomizer to the rotation of the back angle object within the chair, it might be more convenient to have the chair expose the range of possible angles in the form of a simple float between 0 and 1. With this approach, the Randomizer would only need to sample a float Parameter and assign it to the chair. The chair would in turn have a script attached that knows how to map this single float to a certain plausible back angle. You could even map this float to a more complex state of the chair. Your Randomizer would still only need one float Parameter.
* **Action**: Right-click on `Directional Light` in the Scene _**Hierarchy**_, and select _**Duplicate**_. The new light will automatically be named `Directional Light (1)`.
* **Action**: Right-click on `Directional Light` in the Scene _**Hierarchy**_ and select _**Duplicate**_. The new light will automatically be named `Directional Light (1)`.
<img src="Images/light_rand_2.png" width="420"/>
<img src="Images/light_2.png" width="420"/>
This makes the two lights illuminate the scene from opposing sides, each having a 30 degree angle with the background and foreground planes.
This makes the two lights illuminate the scene from opposing angles, each having a 30-degree angle with the background and foreground planes. Note that the position of Directional Lights in Unity does not affect how they illuminate the scene, so you do not need to use the same position as the screenshot above.
* **Action**: Open `MyLightRandomizerTag.cs` and modify it to match the code below:
```

This component is already added to both our lights. We now need to set our desired minimum and maximum intensities, and this can be done through the _**Inspector**_ view.
* **Action**: Select `Directional Light` and from the _**Inspector** UI for the `MyLightRandomizerTag` component, set `Min Intensity` to 0.5 and `Max Intensity` to 3.
* **Action**: Select `Directional Light` and from the **Inspector** UI for the `MyLightRandomizerTag` component, set `Min Intensity` to 0.5 and `Max Intensity` to 3.
* **Action**: Repeat the above step for `Directional Light (1)` and set `Min Intensity` to 0 and `Max Intensity` to 0.4.
Note that with this change, we fully transfer the responsibility for the light's intensity range to `MyLightRandomizerTag.cs` and assume the intensity value coming from `My Light Randomizer` is always between 0 and 1. Therefore, we now need to change the range for the corresponding Parameter in `My Light Randomizer` to (0,1).

By this point in the tutorial, we have learned how to set-up a Perception Scene, randomize our simulation, and verify our generated datasets using Dataset Insights. That said, the size of the dataset we created was only 100 captures, which is not sufficient for model-training purposes. It is now time to generate a large-scale synthetic dataset with tens of thousands of frames using Unity Simulation.
[Click here to continue to Phase 3: Cloud](Phase3.md)
[Click here to continue to Phase 3: Cloud](Phase3.md)

78
com.unity.perception/Documentation~/Tutorial/Phase3.md


# Perception Tutorial
## Phase 3: Cloud
In this phase of the tutorial, we will learn how to run our Scene on _**Unity Simulation (USim)**_ and analyze the generated dataset using _**Dataset Insights**_. USim will allow us to generate a much larger dataset than what is typically plausible on a workstation computer.
In this phase of the tutorial, we will learn how to run our Scene on _**Unity Simulation**_ and analyze the generated dataset using _**Dataset Insights**_. Unity Simulation will allow us to generate a much larger dataset than what is typically plausible on a workstation computer.
- [Step 1: Setup Unity Account, USim, and Cloud Project](#step-1)
- [Step 2: Run Project on USim](#step-2)
- [Step 3: Keep Track of USim Runs Using USim-CLI](#step-3)
- [Step 1: Setup Unity Account, Unity Simulation, and Cloud Project](#step-1)
- [Step 2: Run Project on Unity Simulation](#step-2)
- [Step 3: Keep Track of Your Runs Using the Unity Simulation Command-Line Interface](#step-3)
### <a name="step-1">Step 1: Setup Unity Account, USim, and Cloud Project</a>
### <a name="step-1">Step 1: Setup Unity Account, Unity Simulation, and Cloud Project</a>
In order to use Unity Simulation you need to first create a Unity account or login with your existing one. Once logged in, you will also need to sign-up for Unity Simulation.
In order to use Unity Simulation, you need to first create a Unity account or login with your existing one. Once logged in, you will also need to sign-up for Unity Simulation.
* **Action** Click on the _**Cloud**_ button at the top-right corner of Unity Editor to open the _**Services**_ tab.

* **Action**: Click _**Sign in...**_ and follow the steps in the window that opens to sign in or create an account.
* **Action**: Sign up for a free trial of Unity Simulation [here](https://unity.com/products/unity-simulation).
Unity Simulation is a cloud-based service that makes it possible for you run thousands of instances of Unity builds in order to generate massive amounts of data. The USim service is billed on a per-usage basis, and the free trial offers up to $100 of free credit per month. In order to access the free trial, you will need to provide credit card information. **This information will be used to charge your account if you exceed the $100 monthly credit.** A list of hourly and daily rates for various computational resources is available in the page where you first register for USim.
Unity Simulation is a cloud-based service that makes it possible for you to run hundreds of instances of Unity builds in order to generate massive amounts of data. The Unity Simulation service is billed on a per-usage basis, and the free trial offers up to $100 of free credit per month. In order to access the free trial, you will need to provide credit card information. **This information will be used to charge your account if you exceed the $100 monthly credit.** A list of hourly and daily rates for various computational resources is available in the page where you first register for Unity Simulation.
Once you have registered for a free trial, you will be taken to your USim dashboard, where you will be able to observe your usage and billing invoices.
Once you have registered for a free trial, you will be taken to your Unity Simulation dashboard, where you will be able to observe your usage and billing invoices.
It is now time to connect your local Unity project to a cloud project.

* **Action**: Click _**Create**_ to create a new cloud project and connect your local project to it.
### <a name="step-2">Step 2: Run Project on USim</a>
### <a name="step-2">Step 2: Run Project on Unity Simulation</a>
The process of running a project on Unity Simulation involves building it for Linux and then uploading this build, along with a set of parameters, to Unity Simulation. The Perception package simplifies this process by including a dedicated _**Run in USim**_ window that accepts a small number of required parameters and handles everything else automatically.
The process of running a project on Unity Simulation involves building it for Linux and then uploading this build, along with a set of parameters, to Unity Simulation. The Perception package simplifies this process by including a dedicated _**Run in Unity Simulation**_ window that accepts a small number of required parameters and handles everything else automatically.
For performance reasons, it is best to disable real-time visualizations before carrying on with the USim run.
For performance reasons, it is best to disable real-time visualizations before carrying on with the Unity Simulation run.
In order to make sure our builds are compatible with USim, we need to set our project's scripting backend to _**Mono**_ rather than _**IL2CPP**_. The latter is the default option for projects created with newer versions of Unity, so we need to change it. We will also need to switch to _**Windowed**_ mode.
In order to make sure our builds are compatible with Unity Simulation, we need to set our project's scripting backend to _**Mono**_ rather than _**IL2CPP**_ (if not already set). We will also need to switch to _**Windowed**_ mode.
* **Action**: From the top menu bar, open _**Edit -> Project Settings**_.
* **Action**: In the window that opens, navigate to the _**Player**_ tab, find the _**Scripting Backend**_ setting (under _**Other Settings**_), and change it to _**Mono**_:

</p>
* **Action**: Close _**Project Settings**_.
* **Action**: From the top menu bar, open _**Window -> Run in USim**_.
* **Action**: From the top menu bar, open _**Window -> Run in Unity Simulation**_.
<p align="center">
<img src="Images/runinusim.png" width="600"/>

* **Action**: Name your run `FirstRun`, set the number of iterations to `1000`, and instances to `20`.
* **Action**: Click _**Build and Run**_.
Your project will now be built and then uploaded to USim. Depending on the upload speed of your internet connection, this might take anywhere from a few seconds to a couple of minutes.
Your project will now be built and then uploaded to Unity Simulation. Depending on the upload speed of your internet connection, this might take anywhere from a few seconds to a couple of minutes.
* **Action**: Once the operation is complete, you can find the **Build ID**, **Run Definition ID**, and **Execution ID** of this USim run in the _**Console**_ tab:
* **Action**: Once the operation is complete, you can find the **Build ID**, **Run Definition ID**, and **Execution ID** of this Unity Simulation run in the _**Console**_ tab:
<p align="center">
<img src="Images/build_uploaded.png"/>

### <a name="step-3">Step 3: Keep Track of USim Runs Using USim-CLI</a>
### <a name="step-3">Step 3: Keep Track of Your Runs Using the Unity Simulation Command-Line Interface</a>
To keep track of the progress of your USim run, you will need to use USim's command-line interface (USim-CLI). Detailed instructions for USim-CLI are provided [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/blob/master/doc/quickstart.md#download-unity-simulation-quickstart-materials). For the purposes of this tutorial, we will only go through the most essential commands, which will help us know when our USim run is complete and where to find the produced dataset.
To keep track of the progress of your Unity Simulation run, you will need to use Unity Simulation's command-line interface (CLI). Detailed instructions for this CLI are provided [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/blob/master/doc/quickstart.md#download-unity-simulation-quickstart-materials). For the purposes of this tutorial, we will only go through the most essential commands, which will help us know when our Unity Simulation run is complete and where to find the produced dataset.
* **Action**: Download the latest version of `unity_simulation_bundle.zip` from [here](https://github.com/Unity-Technologies/Unity-Simulation-Docs/releases).

Windows:
`cd C:\Users\UserName\Downloads\unity_simulation_bundle`
You will now be using the _**usim**_ executable to interact with Unity Simluation through commands.
You will now be using the _**usim**_ executable to interact with Unity Simulation through commands.
* **Action** To see a list of available commands, simply run `usim` once:

The first step is to login.
* **Action**: Login to USim using the `usim login auth` command.
* **Action**: Login to Unity Simulation using the `usim login auth` command.
MacOS:
`USimCLI/mac/usim login auth`

</p>
**Note**: On MacOS, you might get errors related to permissions. In these cases, try running your commands with the `sudo` qualifier. For example:
`sudo USimCLI/mac/usim login auth`. This will ask for your MacOS account's password, and should help overcome the persmission issues.
`sudo USimCLI/mac/usim login auth`. This will ask for your MacOS account's password and should help overcome the permission issues.
**Note : From this point on we will only include MacOS formatted commands in the tutorial, but all the USim commands we use will work in all supported operating systems.**
**Note : From this point on we will only include MacOS formatted commands in the tutorial, but all the `usim` commands we use will work in all supported operating systems.**
* **Action**: Return to your command-line interface. Get a list of cloud projects associated with your Unity account using the `usim get projects` command:

SynthDet 9ec23417-73cd-becd-9dd6-556183946153 2020-08-12T19:46:20+00:00
```
In case you have more than one cloud project, you will need to "activate" the one corresponding with your perception tutorial project. If there is only one project, it is already activated and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
In case you have more than one cloud project, you will need to "activate" the one corresponding with your perception tutorial project. If there is only one project, it is already activated, and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
* **Action**: Activate the relevant project:

xBv3arj Completed 2020-10-01 02:27:11
```
As seen above, each run has a name, an ID, a creation time, and a list of executions. Note that each "run" can have more than one "execution", as you can manually execute runs again using USim-CLI.
As seen above, each run has a name, an ID, a creation time, and a list of executions. Note that each "run" can have more than one "execution", as you can manually execute runs again using the CLI.
You can also obtain a list of all the builds you have uploaded to USim using the `usim get builds` command.
You can also obtain a list of all the builds you have uploaded to Unity Simulation using the `usim get builds` command.
USim runs executions on simulation nodes. If you enter a number larger than 1 for the number of instances in the _**Run in USim**_ window, your run will execute simultaneously on more than one node. You can view the status of each execution node using the `usim summarize run-execution <execution-id>` command. This command will tell you how many nodes have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
Unity Simulation utilizes the ability to run simulation instances in parallel. If you enter a number larger than 1 for the number of instances in the _**Run in Unity Simulation**_ window, your run will be parallelized, and multiple simulation instances will simultaneously execute. You can view the status of all simulation instances using the `usim summarize run-execution <execution-id>` command. This command will tell you how many instances have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
* **Action**: Use the `usim summarize run-execution <execution-id>` command to observe the status of your execution nodes:

`USimCLI\windows\usim summarize run-execution <execution-id>`-->
Here is an example output of this command, indiciating that there is only one node, and that the node is still in progress:
Here is an example output of this command, indicating that there is only one node, and that the node is still in progress:
```
state count

### <a name="step-4">Step 4: Analyze the Dataset using Dataset Insights</a>
In order to to download the actual data from your run, we will now use Dataset Insights again. This time though, we will utilize some of the lines that were commented in our previous use with locally generated data.
In order to download the actual data from your run, we will now use Dataset Insights again. This time though, we will utilize some of the lines that were commented in our previous use with locally generated data.
* **Action**: Open the Dataset Insights Jupyter notebook again, using the command below:

Once the Docker image is running, the rest of the workflow is quite similar to what we did in Phase 1, with certain differences caused by the need to download the data from USim.
Once the Docker image is running, the rest of the workflow is quite similar to what we did in Phase 1, with certain differences caused by the need to download the data from Unity Simulation.
* **Action**: Open a web browser and navigate to `http://localhost:8888` to open the Jupyter notebook.
* **Action**: Navigate to the `datasetinsights/notebooks` folder and open `Perception_Statistics.ipynb`.

<img src="Images/di_usim_1.png"/>
</p>
The next few lines of code pertain to setting up your notebook for downloading data from USim.
The next few lines of code pertain to setting up your notebook for downloading data from Unity Simulation.
* **Action**: In the block of code titled "Unity Simulation [Optional]", uncomment the lines that assign values to variables, and insert the correct values, based on information from your USim run.
* **Action**: In the block of code titled "Unity Simulation [Optional]", uncomment the lines that assign values to variables, and insert the correct values, based on information from your Unity Simulation run.
We have previoulsy learned how to obtain the `run_execution_id` and `project_id`. You can remove the value already present for `annotation_definition_id` and leave it blank. What's left is the `access_token`.
We have previously learned how to obtain the `run_execution_id` and `project_id`. You can remove the value already present for `annotation_definition_id` and leave it blank. What's left is the `access_token`.
* **Action**: Return to your command-line interface and run the `usim inspect auth` command.

If you receive errors regarding authentication, your token might have timed out. Repeat the login step (`usim login auth`) to login again and fix this issue.
A sample output from `usim inspect auth` will like like below:
A sample output from `usim inspect auth` will look like below:
```
Protect your credentials. They may be used to impersonate your requests.

updated: 2020-10-02 14:50:11.412979
```
The `access_token` you need for your Dataset Insights notebook is the access token shown by the above command, minus the `'Bearer '` part. So in this case, we should input `0CfQbhJ6gjYIHjC6BaP5gkYn1x5xtAp7ZA9I003fTNT1sFp` in the notebook.
The `access_token` you need for your Dataset Insights notebook is the access token shown by the above command, minus the `'Bearer '` part. So, in this case, we should input `0CfQbhJ6gjYIHjC6BaP5gkYn1x5xtAp7ZA9I003fTNT1sFp` in the notebook.
* **Action**: Copy the access token excluding the `'Bearer '` part to the corresponding field in the Dataset Inisghts notebook.
* **Action**: Copy the access token excluding the `'Bearer '` part to the corresponding field in the Dataset Insights notebook.
Once you have entered all the information, the block of code should look like the screenshot below (the actual values you input will be different):

* **Action**: Continue to the next code block and run it to download all the meta-data files from the generated dataset. This includes JSON files and logs, but does not include images (which will be downloaded later).
* **Action**: Continue to the next code block and run it to download all the metadata files from the generated dataset. This includes JSON files and logs but does not include images (which will be downloaded later).
You will see a progress bar while the data downloads:

The next couple of code blocks (under "Load dataset metadata") analyze the downloaded meta-data and display a table containing annotation-id's for the various metrics defined in the dataset.
The next couple of code blocks (under "Load dataset metadata") analyze the downloaded metadata and display a table containing annotation-definition-ids for the various metrics defined in the dataset.
* **Action**: Once you reach the code block titled "Built-in Statistics", make sure the value assigned to the field `rendered_object_info_definition_id` matches the id displayed for this metric in the table output by the code block immediately before it. The screenshot below demonstrates this (note that your ids might differ from the ones here):

Follow the rest of the steps inside the notebook to generate a variety of plots and stats. Keep in mind that this notebook is provided just as an example, and you can modify and extend it according to your own needs using the tools provided by the [Dataset Insights framework](https://datasetinsights.readthedocs.io/en/latest/).
This concludes the Perception tutorial. The next step in this workflow would be to train an object-detection model using a USim-generated dataset. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the dataset's statistics. In order to generate data for training, we recommend a dataset of about 400,000 captures.
This concludes the Perception tutorial. The next step in this workflow would be to train an object-detection model using a dataset generated on Unity Simulation. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the dataset's statistics. In order to generate data for training, we recommend a dataset of about 400,000 captures.
The grocery objects we used in the foreground are a subset of objects from the [SynthDet](https://github.com/Unity-Technologies/SynthDet) project, which is a custom project based on the Perception package. Instructions for training a [Faster-RCNN](https://arxiv.org/abs/1506.01497) object-detection model based on data generated with the SynthDet project are provided [here](https://github.com/Unity-Technologies/datasetinsights/blob/master/docs/source/Evaluation_Tutorial.md). Although the instructions are tailored to SynthDet, the principles will be the same for training a model.
In the near future, we will expand this tutorial to Phase 4, which will inclde model training instructions which are tailor-made for the project we built together here.
In the near future, we will expand this tutorial to Phase 4, which will include instructions on how to train a Faster R-CNN object-detection model using a dataset that can be generated by following this tutorial.

10
com.unity.perception/Documentation~/Tutorial/TUTORIAL.md


# Perception Tutorial
The Perception package offers a variety of tools for generating synthetic datasets intended for use in perception-based machine learning tasks, such as object detection, semantic segmentation, and so on. These datasets are in the form of **frames** captured using simulated sensors. These frames are **annotated** with **ground-truth**, and are thus ready to be used for training and validating machine learning models. While the type of ground-truth bundled with this data will depend on your intended machine learning task, the Perception package already comes with a number of common ground-truth labelers which will make it easier for you to generate synthetic data. This tutorial will guide you all the way from setting up Unity on your computer to generating a large-scale synthetic dataset for training an object-detection model.
The Perception package offers a variety of tools for generating synthetic datasets intended for use in perception-based machine learning tasks, such as object detection, semantic segmentation, and so on. These datasets are in the form of **frames** captured using simulated sensors. These frames are **annotated** with **ground-truth** and are thus ready to be used for training and validating machine learning models. While the type of ground-truth bundled with this data will depend on your intended machine learning task, the Perception package already comes with a number of common ground-truth labelers which will make it easier for you to generate synthetic data. This tutorial will guide you all the way from setting up Unity on your computer to generating a large-scale synthetic dataset for training an object-detection model.
While this process may sound complicated, **you do not need to have any prior experience with Unity or C# coding** in order to follow the first phase of this tutorial and generate a dataset using our provided samples and components. The tutorial will be divided into three high-level phases based on the complexity of the tasks involved. During these phases, you will be gradually introduced to more advanced tools and workflows that the Perception package enables you to perform.
While this process may sound complicated, **you do not need to have any prior experience with Unity or C#** in order to follow the first phase of this tutorial and generate a dataset using our provided samples and components. The tutorial will be divided into three high-level phases based on the complexity of the tasks involved. During these phases, you will be gradually introduced to more advanced tools and workflows that the Perception package enables you to perform.
## [Phase 1: Setup and Basic Randomizations](Phase1.md)

## [Phase 2: Custom Randomizations](Phase2.md)
In order to get the best out of comptuer vision models, the training data needs to contain a large-degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized randomizer in C# code. Once you complete this phase, you will know how to:
In order to get the best out of computer vision models, the training data needs to contain a large degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized randomizer in C# code. Once you complete this phase, you will know how to:
* Create custom randomizers by extending our provided samples.
* Coordinate the operation of several randomizers by specifying their order of execution and the objects they affect.
* Have objects specify criteria (e.g. ranges, means, etc.) and logic (e.g. unique behaviors) for their randomizable attributes.

You will generally require a large amount of data to train your computer vision model. Generating data in these practical sizes will take incredible amounts of time to finish if performed on typical workstation computers. This is where the cloud comes in. In this phase, you will learn how to:
* Generate large-scale synthetic datasets containing hundreds of thousands of frames by leveraging the power of **Unity Simulation** (USim).
* Keep track of your USim runs using the USim command-line interface (USim-CLI).
* Generate large-scale synthetic datasets containing hundreds of thousands of frames by leveraging the power of **Unity Simulation**.
* Keep track of your Unity Simulation runs using the Unity Simulation command-line interface.
* Use Dataset Insights to download and analyze your cloud-generated data.

4
com.unity.perception/Documentation~/index.md


|Feature|Description|
|---|---|
|[Labeling](GroundTruth-Labeling.md)|A component that marks a GameObject and its descendants with a set of labels|
|[LabelConfig](GroundTruth-Labeling.md#LabelConfig)|An asset that defines a taxonomy of labels for ground truth generation|
|[Labeling](GroundTruthLabeling.md)|A component that marks a GameObject and its descendants with a set of labels|
|[LabelConfig](GroundTruthLabeling.md#label-config)|An asset that defines a taxonomy of labels for ground truth generation|
|[Perception Camera](PerceptionCamera.md)|Captures RGB images and ground truth from a [Camera](https://docs.unity3d.com/Manual/class-Camera.html).|
|[DatasetCapture](DatasetCapture.md)|Ensures sensors are triggered at proper rates and accepts data for the JSON dataset.|
|[Randomization (Experimental)](Randomization/Index.md)|The Randomization tool set lets you integrate domain randomization principles into your simulation.|

305
com.unity.perception/Editor/GroundTruth/IdLabelConfigEditor.cs


using System;
using Unity.Mathematics;
using UnityEditorInternal;
using UnityEditor.UIElements;
using UnityEngine.UIElements;
class IdLabelConfigEditor : Editor
class IdLabelConfigEditor : LabelConfigEditor<IdLabelEntry>
ReorderableList m_LabelsList;
const float k_Margin = 5f;
protected override void InitUiExtended()
{
m_StartingIdEnumField.RegisterValueChangedCallback(evt =>
{
var id = (int) ((StartingLabelId) evt.newValue);
serializedObject.FindProperty(nameof(IdLabelConfig.startingLabelId)).enumValueIndex = id;
serializedObject.ApplyModifiedProperties();
AutoAssignIds();
});
m_AutoIdToggle.RegisterValueChangedCallback(evt =>
{
serializedObject.FindProperty(nameof(IdLabelConfig.autoAssignIds)).boolValue = evt.newValue;
m_StartingIdEnumField.SetEnabled(evt.newValue);
serializedObject.ApplyModifiedProperties();
if (!evt.newValue)
{
ChangesHappeningInForeground = true;
RefreshListDataAndPresentation();
//if evt.newValue is true, the auto assign function will perform the above refresh, so no need to do this twice
//refresh is needed because the id textfields of the labels need to be enabled or disabled accordingly
}
AutoAssignIdsIfNeeded();
});
m_StartingIdEnumField.SetEnabled(AutoAssign);
public void OnEnable()
{
m_LabelsList = new ReorderableList(this.serializedObject, this.serializedObject.FindProperty(IdLabelConfig.labelEntriesFieldName), true, false, true, true);
m_LabelsList.elementHeight = EditorGUIUtility.singleLineHeight * 2 + k_Margin;
m_LabelsList.drawElementCallback = DrawElement;
m_LabelsList.onAddCallback += OnAdd;
m_LabelsList.onRemoveCallback += OnRemove;
m_LabelsList.onReorderCallbackWithDetails += OnReorder;
AutoAssignIdsIfNeeded();
m_MoveDownButton.clicked += MoveSelectedItemDown;
m_MoveUpButton.clicked += MoveSelectedItemUp;
void OnReorder(ReorderableList list, int oldIndex, int newIndex)
public override void PostRemoveOperations()
if (!autoAssign)
return;
AutoAssignIds();
AutoAssignIdsIfNeeded();
void OnRemove(ReorderableList list)
void MoveSelectedItemUp()
if (list.index != -1)
list.serializedProperty.DeleteArrayElementAtIndex(list.index);
var selectedIndex = m_LabelListView.selectedIndex;
if (selectedIndex > 0)
{
var currentProperty =
m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex)
.FindPropertyRelative(nameof(ILabelEntry.label));
var topProperty = m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex - 1)
.FindPropertyRelative(nameof(ILabelEntry.label));
if (autoAssign)
AutoAssignIds();
var tmpString = topProperty.stringValue;
topProperty.stringValue = currentProperty.stringValue;
currentProperty.stringValue = tmpString;
if (!AutoAssign)
{
var currentIdProperty =
m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex)
.FindPropertyRelative(nameof(IdLabelEntry.id));
var topIdProperty = m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex - 1)
.FindPropertyRelative(nameof(IdLabelEntry.id));
this.serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty(target);
}
var tmpInt = topIdProperty.intValue;
topIdProperty.intValue = currentIdProperty.intValue;
currentIdProperty.intValue = tmpInt;
}
void OnAdd(ReorderableList list)
{
int maxLabel = Int32.MinValue;
if (list.serializedProperty.arraySize == 0)
maxLabel = -1;
m_LabelListView.selectedIndex = selectedIndex - 1;
for (int i = 0; i < list.serializedProperty.arraySize; i++)
{
var item = list.serializedProperty.GetArrayElementAtIndex(i);
maxLabel = math.max(maxLabel, item.FindPropertyRelative(nameof(IdLabelEntry.id)).intValue);
serializedObject.ApplyModifiedProperties();
RefreshAddedLabels();
m_LabelListView.Refresh();
RefreshListViewHeight();
var index = list.serializedProperty.arraySize;
list.serializedProperty.InsertArrayElementAtIndex(index);
var element = list.serializedProperty.GetArrayElementAtIndex(index);
var idProperty = element.FindPropertyRelative(nameof(IdLabelEntry.id));
idProperty.intValue = maxLabel + 1;
var labelProperty = element.FindPropertyRelative(nameof(IdLabelEntry.label));
labelProperty.stringValue = "";
if (autoAssign)
AutoAssignIds();
serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty(target);
void DrawElement(Rect rect, int index, bool isactive, bool isfocused)
void MoveSelectedItemDown()
var element = m_LabelsList.serializedProperty.GetArrayElementAtIndex(index);
var idProperty = element.FindPropertyRelative(nameof(IdLabelEntry.id));
var labelProperty = element.FindPropertyRelative(nameof(IdLabelEntry.label));
using (var change = new EditorGUI.ChangeCheckScope())
var selectedIndex = m_LabelListView.selectedIndex;
if (selectedIndex > -1 && selectedIndex < m_SerializedLabelsArray.arraySize - 1)
var contentRect = new Rect(rect.position, new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
using (new EditorGUI.DisabledScope(autoAssign))
var currentProperty =
m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex)
.FindPropertyRelative(nameof(ILabelEntry.label));
var bottomProperty = m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex + 1)
.FindPropertyRelative(nameof(ILabelEntry.label));
var tmpString = bottomProperty.stringValue;
bottomProperty.stringValue = currentProperty.stringValue;
currentProperty.stringValue = tmpString;
if (!AutoAssign)
var newLabel = EditorGUI.IntField(contentRect, nameof(IdLabelEntry.id), idProperty.intValue);
if (change.changed)
{
idProperty.intValue = newLabel;
if (autoAssign)
AutoAssignIds();
}
var currentIdProperty =
m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex)
.FindPropertyRelative(nameof(IdLabelEntry.id));
var bottomIdProperty = m_SerializedLabelsArray.GetArrayElementAtIndex(selectedIndex + 1)
.FindPropertyRelative(nameof(IdLabelEntry.id));
var tmpInt = bottomIdProperty.intValue;
bottomIdProperty.intValue = currentIdProperty.intValue;
currentIdProperty.intValue = tmpInt;
m_LabelListView.selectedIndex = selectedIndex + 1;
serializedObject.ApplyModifiedProperties();
RefreshAddedLabels();
m_LabelListView.Refresh();
RefreshListViewHeight();
using (var change = new EditorGUI.ChangeCheckScope())
}
protected override void SetupPresentLabelsListView()
{
base.SetupPresentLabelsListView();
VisualElement MakeItem() =>
new IdLabelElementInLabelConfig(this, m_SerializedLabelsArray);
void BindItem(VisualElement e, int i)
var contentRect = new Rect(rect.position + new Vector2(0, EditorGUIUtility.singleLineHeight), new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
var newLabel = EditorGUI.TextField(contentRect, nameof(IdLabelEntry.label), labelProperty.stringValue);
if (change.changed)
if (e is IdLabelElementInLabelConfig addedLabel)
labelProperty.stringValue = newLabel;
addedLabel.indexInList = i;
addedLabel.labelTextField.BindProperty(m_SerializedLabelsArray.GetArrayElementAtIndex(i)
.FindPropertyRelative(nameof(IdLabelEntry.label)));
addedLabel.labelIdTextField.value = m_SerializedLabelsArray.GetArrayElementAtIndex(i)
.FindPropertyRelative(nameof(IdLabelEntry.id)).intValue.ToString();
m_LabelListView.bindItem = BindItem;
m_LabelListView.makeItem = MakeItem;
bool autoAssign => serializedObject.FindProperty(nameof(IdLabelConfig.autoAssignIds)).boolValue;
protected override IdLabelEntry CreateLabelEntryFromLabelString(SerializedProperty serializedArray,
string labelToAdd)
{
int maxLabel = Int32.MinValue;
if (serializedArray.arraySize == 0)
maxLabel = -1;
public override void OnInspectorGUI()
{
serializedObject.Update();
var autoAssignIdsProperty = serializedObject.FindProperty(nameof(IdLabelConfig.autoAssignIds));
using (var change = new EditorGUI.ChangeCheckScope())
for (int i = 0; i < serializedArray.arraySize; i++)
EditorGUILayout.PropertyField(autoAssignIdsProperty, new GUIContent("Auto Assign IDs"));
if (change.changed && autoAssignIdsProperty.boolValue)
AutoAssignIds();
var item = serializedArray.GetArrayElementAtIndex(i);
maxLabel = math.max(maxLabel, item.FindPropertyRelative(nameof(IdLabelEntry.id)).intValue);
if (autoAssignIdsProperty.boolValue)
return new IdLabelEntry
using (var change = new EditorGUI.ChangeCheckScope())
{
var startingLabelIdProperty = serializedObject.FindProperty(nameof(IdLabelConfig.startingLabelId));
EditorGUILayout.PropertyField(startingLabelIdProperty, new GUIContent("Starting Label ID"));
if (change.changed)
AutoAssignIds();
}
}
id = maxLabel + 1,
label = labelToAdd
};
}
m_LabelsList.DoLayoutList();
this.serializedObject.ApplyModifiedProperties();
protected override void AppendLabelEntryToSerializedArray(SerializedProperty serializedArray,
IdLabelEntry labelEntry)
{
var index = serializedArray.arraySize;
serializedArray.InsertArrayElementAtIndex(index);
var element = serializedArray.GetArrayElementAtIndex(index);
var idProperty = element.FindPropertyRelative(nameof(IdLabelEntry.id));
idProperty.intValue = labelEntry.id;
var labelProperty = element.FindPropertyRelative(nameof(ILabelEntry.label));
labelProperty.stringValue = labelEntry.label;
public bool AutoAssign => serializedObject.FindProperty(nameof(IdLabelConfig.autoAssignIds)).boolValue;
void AutoAssignIds()
{

return;
var startingLabelId = (StartingLabelId)serializedObject.FindProperty(nameof(IdLabelConfig.startingLabelId)).enumValueIndex;
var startingLabelId =
(StartingLabelId) serializedObject.FindProperty(nameof(IdLabelConfig.startingLabelId)).enumValueIndex;
serializedProperty.GetArrayElementAtIndex(i).FindPropertyRelative(nameof(IdLabelEntry.id)).intValue = nextId;
serializedProperty.GetArrayElementAtIndex(i).FindPropertyRelative(nameof(IdLabelEntry.id)).intValue =
nextId;
serializedObject.ApplyModifiedProperties();
ChangesHappeningInForeground = true;
RefreshListDataAndPresentation();
EditorUtility.SetDirty(target);
}
void AutoAssignIdsIfNeeded()
{
if (AutoAssign)
{
AutoAssignIds();
}
}
public int IndexOfGivenIdInSerializedLabelsArray(int id)
{
for (int i = 0; i < m_SerializedLabelsArray.arraySize; i++)
{
var element = m_SerializedLabelsArray.GetArrayElementAtIndex(i).FindPropertyRelative(nameof(IdLabelEntry.id));
if (element.intValue == id)
{
return i;
}
}
return -1;
}
}
class IdLabelElementInLabelConfig : LabelElementInLabelConfig<IdLabelEntry>
{
protected override string UxmlPath => k_UxmlDir + "IdLabelElementInLabelConfig.uxml";
public TextField labelIdTextField;
public IdLabelElementInLabelConfig(LabelConfigEditor<IdLabelEntry> editor, SerializedProperty labelsArray) :
base(editor, labelsArray)
{
}
protected override void InitExtended()
{
labelIdTextField = this.Q<TextField>("label-id-value");
labelIdTextField.isDelayed = true;
labelIdTextField.SetEnabled(!((IdLabelConfigEditor) m_LabelConfigEditor).AutoAssign);
labelIdTextField.RegisterValueChangedCallback(evt =>
{
if(int.TryParse(evt.newValue, out int parsedId))
{
m_LabelsArray.GetArrayElementAtIndex(indexInList).FindPropertyRelative(nameof(IdLabelEntry.id))
.intValue = parsedId;
if (m_LabelsArray.serializedObject.hasModifiedProperties)
{
m_LabelsArray.serializedObject.ApplyModifiedProperties();
m_LabelConfigEditor.ChangesHappeningInForeground = true;
m_LabelConfigEditor.RefreshListDataAndPresentation();
}
var index = ((IdLabelConfigEditor)m_LabelConfigEditor).IndexOfGivenIdInSerializedLabelsArray(parsedId);
if (index != -1 && index != indexInList)
{
//The listview recycles child visual elements and that causes the RegisterValueChangedCallback event to be called when scrolling.
//Therefore, we need to make sure we are not in this code block just because of scrolling, but because the user is actively changing one of the labels.
//The index check is for this purpose.
Debug.LogWarning("A label with the ID " + evt.newValue + " has already been added to this label configuration.");
}
}
else
{
Debug.LogError("Provided id is not a valid integer. Please provide integer values.");
labelIdTextField.value = evt.previousValue;
}
});
}
}
}

2
com.unity.perception/Editor/GroundTruth/IdLabelConfigEditor.cs.meta


fileFormatVersion: 2
guid: 910dd3186e1c4fad8eb6aca9b9ee0f48
guid: 43cb2a3117353435abe59ca5217974a8
timeCreated: 1585940009

929
com.unity.perception/Editor/GroundTruth/LabelingEditor.cs


using Unity.Entities;
using UnityEditorInternal;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEditor.UIElements;
using UnityEngine.UIElements;
using Button = UnityEngine.UIElements.Button;
using Toggle = UnityEngine.UIElements.Toggle;
namespace UnityEditor.Perception.GroundTruth
{

ReorderableList m_LabelsList;
VisualElement m_Root;
VisualElement m_ManualLabelingContainer;
VisualElement m_AutoLabelingContainer;
VisualElement m_FromLabelConfigsContainer;
VisualElement m_SuggestedLabelsContainer;
VisualElement m_SuggestedOnNamePanel;
VisualElement m_SuggestedOnPathPanel;
ListView m_CurrentLabelsListView;
ListView m_SuggestedLabelsListViewFromName;
ListView m_SuggestedLabelsListViewFromPath;
ScrollView m_LabelConfigsScrollView;
PopupField<string> m_LabelingSchemesPopup;
Button m_AddButton;
Button m_AddAutoLabelToConfButton;
Toggle m_AutoLabelingToggle;
Label m_CurrentAutoLabel;
Label m_CurrentAutoLabelTitle;
Label m_AddManualLabelsTitle;
Labeling m_Labeling;
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
string m_UxmlPath;
List<string> m_SuggestedLabelsBasedOnName = new List<string>();
List<string> m_SuggestedLabelsBasedOnPath = new List<string>();
public List<string> CommonLabels { get; private set; } = new List<string>();
List<Type> m_LabelConfigTypes;
readonly List<ScriptableObject> m_AllLabelConfigsInProject = new List<ScriptableObject>();
readonly List<AssetLabelingScheme> m_LabelingSchemes = new List<AssetLabelingScheme>();
/// <summary>
/// List of separator characters used for parsing asset names for auto labeling or label suggestion purposes
/// </summary>
public static readonly string[] NameSeparators = {".", "-", "_"};
/// <summary>
/// List of separator characters used for parsing asset paths for auto labeling or label suggestion purposes
/// </summary>
public static readonly string[] PathSeparators = {"/"};
void OnEnable()
{
m_LabelConfigTypes = AddToConfigWindow.FindAllSubTypes(typeof(LabelConfig<>));
var mySerializedObject = new SerializedObject(serializedObject.targetObjects[0]);
m_Labeling = mySerializedObject.targetObject as Labeling;
m_UxmlPath = m_UxmlDir + "Labeling_Main.uxml";
m_Root = AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(m_UxmlPath).CloneTree();
m_CurrentLabelsListView = m_Root.Q<ListView>("current-labels-listview");
m_SuggestedLabelsListViewFromName = m_Root.Q<ListView>("suggested-labels-name-listview");
m_SuggestedLabelsListViewFromPath = m_Root.Q<ListView>("suggested-labels-path-listview");
m_LabelConfigsScrollView = m_Root.Q<ScrollView>("label-configs-scrollview");
m_SuggestedOnNamePanel = m_Root.Q<VisualElement>("suggested-labels-from-name");
m_SuggestedOnPathPanel = m_Root.Q<VisualElement>("suggested-labels-from-path");
m_AddButton = m_Root.Q<Button>("add-label");
m_CurrentAutoLabel = m_Root.Q<Label>("current-auto-label");
m_CurrentAutoLabelTitle = m_Root.Q<Label>("current-auto-label-title");
m_AutoLabelingToggle = m_Root.Q<Toggle>("auto-or-manual-toggle");
m_ManualLabelingContainer = m_Root.Q<VisualElement>("manual-labeling");
m_AutoLabelingContainer = m_Root.Q<VisualElement>("automatic-labeling");
m_FromLabelConfigsContainer = m_Root.Q<VisualElement>("from-label-configs");
m_SuggestedLabelsContainer = m_Root.Q<VisualElement>("suggested-labels");
m_AddAutoLabelToConfButton = m_Root.Q<Button>("add-auto-label-to-config");
m_AddManualLabelsTitle = m_Root.Q<Label>("add-manual-labels-title");
var dropdownParent = m_Root.Q<VisualElement>("drop-down-parent");
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = false;
InitializeLabelingSchemes(dropdownParent);
AssesAutoLabelingStatus();
m_FirstItemLabelsArray = serializedObject.FindProperty(nameof(Labeling.labels));
if (serializedObject.targetObjects.Length > 1)
{
var addedTitle = m_Root.Q<Label>("added-labels-title");
addedTitle.text = "Common Labels of Selected Items";
m_SuggestedOnNamePanel.style.display = DisplayStyle.None;
m_AddAutoLabelToConfButton.text = "Add Automatic Labels of All Selected Assets to Config...";
}
else
{
m_AddAutoLabelToConfButton.text = "Add to Label Config...";
}
m_AddAutoLabelToConfButton.clicked += () =>
{
AddToConfigWindow.ShowWindow(CreateUnionOfAllLabels().ToList());
};
m_AddButton.clicked += () =>
{
var labelsUnion = CreateUnionOfAllLabels();
var newLabel = FindNewLabelValue(labelsUnion);
foreach (var targetObject in targets)
{
if (targetObject is Labeling labeling)
{
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty(nameof(Labeling.labels));
serializedLabelArray2.InsertArrayElementAtIndex(serializedLabelArray2.arraySize);
serializedLabelArray2.GetArrayElementAtIndex(serializedLabelArray2.arraySize-1).stringValue = newLabel;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
serializedObject.SetIsDifferentCacheDirty();
}
}
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
};
m_AutoLabelingToggle.RegisterValueChangedCallback(evt =>
{
AutoLabelToggleChanged();
});
ChangesHappeningInForeground = true;
m_Root.schedule.Execute(CheckForModelChanges).Every(30);
}
int m_PreviousLabelsArraySize = -1;
/// <summary>
/// This boolean is used to signify when changes in the model are triggered directly from the inspector UI by the user.
/// In these cases, the scheduled model checker does not need to update the UI again.
/// </summary>
public bool ChangesHappeningInForeground { get; set; }
SerializedProperty m_FirstItemLabelsArray;
void CheckForModelChanges()
{
if (ChangesHappeningInForeground)
{
ChangesHappeningInForeground = false;
m_PreviousLabelsArraySize = m_FirstItemLabelsArray.arraySize;
return;
}
if (m_FirstItemLabelsArray.arraySize != m_PreviousLabelsArraySize)
{
AssesAutoLabelingStatus();
RefreshManualLabelingData();
m_PreviousLabelsArraySize = m_FirstItemLabelsArray.arraySize;
}
}
bool SerializedObjectHasValidLabelingScheme(SerializedObject serObj)
{
var schemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
return IsValidLabelingSchemeName(schemeName);
}
bool IsValidLabelingSchemeName(string schemeName)
{
return schemeName != string.Empty &&
m_LabelingSchemes.FindAll(scheme => scheme.GetType().Name == schemeName).Count > 0;
}
bool m_ItIsPossibleToAddMultipleAutoLabelsToConfig;
void UpdateUiAspects()
{
m_ManualLabelingContainer.SetEnabled(!m_AutoLabelingToggle.value);
m_AutoLabelingContainer.SetEnabled(m_AutoLabelingToggle.value);
m_AddManualLabelsTitle.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_FromLabelConfigsContainer.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_SuggestedLabelsContainer.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_CurrentLabelsListView.style.minHeight = m_AutoLabelingToggle.value ? 70 : 120;
if (!m_AutoLabelingToggle.value || serializedObject.targetObjects.Length > 1 ||
!SerializedObjectHasValidLabelingScheme(new SerializedObject(serializedObject.targetObjects[0])))
{
m_CurrentAutoLabel.style.display = DisplayStyle.None;
m_AddAutoLabelToConfButton.SetEnabled(false);
}
else
{
m_CurrentAutoLabel.style.display = DisplayStyle.Flex;
m_AddAutoLabelToConfButton.SetEnabled(true);
}
if(m_AutoLabelingToggle.value && serializedObject.targetObjects.Length > 1 && m_ItIsPossibleToAddMultipleAutoLabelsToConfig)
{
m_AddAutoLabelToConfButton.SetEnabled(true);
}
if (serializedObject.targetObjects.Length == 1)
{
m_AutoLabelingToggle.text = "Use Automatic Labeling";
}
else
{
m_CurrentAutoLabelTitle.text = "Select assets individually to inspect their automatic labels.";
m_AutoLabelingToggle.text = "Use Automatic Labeling for All Selected Items";
}
}
void UpdateCurrentAutoLabelValue(SerializedObject serObj)
{
var array = serObj.FindProperty(nameof(Labeling.labels));
if (array.arraySize > 0)
{
m_CurrentAutoLabel.text = array.GetArrayElementAtIndex(0).stringValue;
}
}
bool AreSelectedAssetsCompatibleWithAutoLabelScheme(AssetLabelingScheme scheme)
{
foreach (var asset in serializedObject.targetObjects)
{
string label = scheme.GenerateLabel(asset);
if (label == null)
{
return false;
}
}
return true;
}
public void OnEnable()
void InitializeLabelingSchemes(VisualElement parent)
m_LabelsList = new ReorderableList(serializedObject, serializedObject.FindProperty(nameof(Labeling.labels)), true, false, true, true);
m_LabelsList.drawElementCallback = DrawElement;
m_LabelsList.onAddCallback += OnAdd;
m_LabelsList.onRemoveCallback += OnRemove;
m_LabelsList.onReorderCallbackWithDetails += OnReordered;
//this function should be called only once during the lifecycle of the editor element
AssetLabelingScheme labelingScheme = new AssetNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
labelingScheme = new AssetFileNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
labelingScheme = new CurrentOrParentsFolderNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
var descriptions = m_LabelingSchemes.Select(scheme => scheme.Description).ToList();
descriptions.Insert(0, "<Select Scheme>");
m_LabelingSchemesPopup = new PopupField<string>(descriptions, 0) {label = "Labeling Scheme"};
m_LabelingSchemesPopup.style.marginLeft = 0;
parent.Add(m_LabelingSchemesPopup);
m_LabelingSchemesPopup.RegisterValueChangedCallback(evt => AssignAutomaticLabelToSelectedAssets());
void OnRemove(ReorderableList list)
void AutoLabelToggleChanged()
if (list.index != -1)
UpdateUiAspects();
if (!m_AutoLabelingToggle.value)
{
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = false;
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue = false;
if (SerializedObjectHasValidLabelingScheme(serObj))
{
//asset already had a labeling scheme before auto labeling was disabled, which means it has auto label(s) attached. these should be cleared now.
serObj.FindProperty(nameof(Labeling.labels)).ClearArray();
}
serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue = string.Empty;
m_LabelingSchemesPopup.index = 0;
serObj.ApplyModifiedProperties();
serObj.SetIsDifferentCacheDirty();
}
}
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
}
void AssignAutomaticLabelToSelectedAssets()
{
//the 0th index of this popup is "<Select Scheme>" and should not do anything
if (m_LabelingSchemesPopup.index == 0)
{
return;
}
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = true;
var labelingScheme = m_LabelingSchemes[m_LabelingSchemesPopup.index - 1];
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue = true; //only set this flag once the user has actually chosen a scheme, otherwise, we will not touch the flag
serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue = labelingScheme.GetType().Name;
var serLabelsArray = serObj.FindProperty(nameof(Labeling.labels));
serLabelsArray.ClearArray();
serLabelsArray.InsertArrayElementAtIndex(0);
var label = labelingScheme.GenerateLabel(targetObj);
serLabelsArray.GetArrayElementAtIndex(0).stringValue = label;
if (targetObj == serializedObject.targetObjects[0] && serializedObject.targetObjects.Length == 1)
{
UpdateCurrentAutoLabelValue(serObj);
}
serObj.ApplyModifiedProperties();
serObj.SetIsDifferentCacheDirty();
}
UpdateUiAspects();
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
}
void AssesAutoLabelingStatus()
{
var enabledOrNot = true;
if (serializedObject.targetObjects.Length == 1)
{
var serObj = new SerializedObject(serializedObject.targetObjects[0]);
var enabled = serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue;
m_AutoLabelingToggle.value = enabled;
var currentLabelingSchemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
if (IsValidLabelingSchemeName(currentLabelingSchemeName))
{
m_LabelingSchemesPopup.index =
m_LabelingSchemes.FindIndex(scheme => scheme.GetType().Name.ToString() == currentLabelingSchemeName) + 1;
}
UpdateCurrentAutoLabelValue(serObj);
}
else
{
string unifiedLabelingScheme = null;
var allAssetsUseSameLabelingScheme = true;
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
var enabled = serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue;
enabledOrNot &= enabled;
var schemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
if (schemeName == string.Empty)
{
//if any of the selected assets does not have a labeling scheme, they can't all have the same valid scheme
allAssetsUseSameLabelingScheme = false;
}
if (allAssetsUseSameLabelingScheme)
{
if (unifiedLabelingScheme == null)
{
unifiedLabelingScheme = schemeName;
}
else if (unifiedLabelingScheme != schemeName)
{
allAssetsUseSameLabelingScheme = false;
}
}
}
m_AutoLabelingToggle.value = enabledOrNot;
if (allAssetsUseSameLabelingScheme)
{
//all selected assets have the same scheme recorded in their serialized objects
m_LabelingSchemesPopup.index =
m_LabelingSchemes.FindIndex(scheme => scheme.GetType().Name.ToString() == unifiedLabelingScheme) + 1;
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = enabledOrNot;
//if all selected assets have the same scheme recorded in their serialized objects, and they all
//have auto labeling enabled, we can now add all auto labels to a config
}
else
{
//the selected DO NOT have the same scheme recorded in their serialized objects
m_LabelingSchemesPopup.index = 0;
}
}
UpdateUiAspects();
}
HashSet<string> CreateUnionOfAllLabels()
{
HashSet<String> result = new HashSet<string>();
foreach (var obj in targets)
{
if (obj is Labeling labeling)
{
result.UnionWith(labeling.labels);
}
}
return result;
}
string FindNewLabelValue(HashSet<string> labels)
{
string baseLabel = "New Label";
string label = baseLabel;
int count = 1;
while (labels.Contains(label))
{
label = baseLabel + "_" + count++;
}
return label;
}
public override VisualElement CreateInspectorGUI()
{
serializedObject.Update();
m_Labeling = serializedObject.targetObject as Labeling;
RefreshCommonLabels();
RefreshSuggestedLabelLists();
RefreshLabelConfigsList();
SetupListsAndScrollers();
return m_Root;
}
void RefreshLabelConfigsList()
{
List<string> labelConfigGuids = new List<string>();
foreach (var type in m_LabelConfigTypes)
{
labelConfigGuids.AddRange(AssetDatabase.FindAssets("t:"+type.Name));
}
m_AllLabelConfigsInProject.Clear();
foreach (var configGuid in labelConfigGuids)
{
var asset = AssetDatabase.LoadAssetAtPath<ScriptableObject>(AssetDatabase.GUIDToAssetPath(configGuid));
m_AllLabelConfigsInProject.Add(asset);
}
}
void RemoveAddedLabelsFromSuggestedLists()
{
m_SuggestedLabelsBasedOnName.RemoveAll(s => CommonLabels.Contains(s));
m_SuggestedLabelsBasedOnPath.RemoveAll(s => CommonLabels.Contains(s));
}
void RefreshSuggestedLabelLists()
{
m_SuggestedLabelsBasedOnName.Clear();
m_SuggestedLabelsBasedOnPath.Clear();
//based on name
if (serializedObject.targetObjects.Length == 1)
{
string assetName = serializedObject.targetObject.name;
var pieces = assetName.Split(NameSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
if (pieces.Count > 1)
{
//means the asset name was actually split
m_SuggestedLabelsBasedOnName.Add(assetName);
}
m_SuggestedLabelsBasedOnName.AddRange(pieces);
}
//based on path
string assetPath = GetAssetOrPrefabPath(m_Labeling.gameObject);
//var prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(m_Labeling.gameObject);
if (assetPath != null)
{
var stringList = assetPath.Split(PathSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
stringList.Reverse();
m_SuggestedLabelsBasedOnPath.AddRange(stringList);
}
foreach (var targetObject in targets)
var value = labeling.labels[list.index];
foreach (var t in targets)
if (targetObject == target)
continue; //we have already taken care of this one above
assetPath = GetAssetOrPrefabPath(((Labeling)targetObject).gameObject);
//prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(((Labeling)targetObject).gameObject);
if (assetPath != null)
((Labeling)t).labels.Remove(value);
var stringList = assetPath.Split(PathSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
m_SuggestedLabelsBasedOnPath = m_SuggestedLabelsBasedOnPath.Intersect(stringList).ToList();
RemoveAddedLabelsFromSuggestedLists();
//Debug.Log("list update, source list count is:" + m_SuggestedLabelsBasedOnPath.Count);
Labeling labeling => (Labeling)target;
public void RefreshManualLabelingData()
{
serializedObject.SetIsDifferentCacheDirty();
serializedObject.Update();
RefreshCommonLabels();
RefreshSuggestedLabelLists();
SetupSuggestedLabelsListViews();
SetupCurrentLabelsListView();
UpdateSuggestedPanelVisibility();
}
void OnAdd(ReorderableList list)
void SetupListsAndScrollers()
foreach (var t in targets)
//Labels that have already been added to the target Labeling component
SetupCurrentLabelsListView();
//Labels suggested by the system, which the user can add
SetupSuggestedLabelsListViews();
//Add labels from Label Configs present in project
SetupLabelConfigsScrollView();
UpdateSuggestedPanelVisibility();
}
void UpdateSuggestedPanelVisibility()
{
m_SuggestedOnNamePanel.style.display = m_SuggestedLabelsBasedOnName.Count == 0 ? DisplayStyle.None : DisplayStyle.Flex;
m_SuggestedOnPathPanel.style.display = m_SuggestedLabelsBasedOnPath.Count == 0 ? DisplayStyle.None : DisplayStyle.Flex;
if (m_SuggestedLabelsBasedOnPath.Count == 0 && m_SuggestedLabelsBasedOnName.Count == 0)
var castedTarget = ((Labeling)t);
castedTarget.labels.Add("");
EditorUtility.SetDirty(castedTarget);
m_SuggestedLabelsContainer.style.display = DisplayStyle.None;
void OnReordered(ReorderableList list, int oldIndex, int newIndex)
void RefreshCommonLabels()
var label = labeling.labels[newIndex];
CommonLabels.Clear();
CommonLabels.AddRange(((Labeling)serializedObject.targetObjects[0]).labels);
foreach (var t in targets)
foreach (var obj in serializedObject.targetObjects)
var l = (Labeling)t;
if (this.labeling == l) continue;
CommonLabels = CommonLabels.Intersect(((Labeling) obj).labels).ToList();
}
}
ReorderLabels(l, label, newIndex);
void SetupCurrentLabelsListView()
{
m_CurrentLabelsListView.itemsSource = CommonLabels;
VisualElement MakeItem() =>
new AddedLabelEditor(this, m_CurrentLabelsListView);
void BindItem(VisualElement e, int i)
{
if (e is AddedLabelEditor addedLabel)
{
addedLabel.indexInList = i;
addedLabel.labelTextField.value = CommonLabels[i];
}
const int itemHeight = 35;
m_CurrentLabelsListView.bindItem = BindItem;
m_CurrentLabelsListView.makeItem = MakeItem;
m_CurrentLabelsListView.itemHeight = itemHeight;
m_CurrentLabelsListView.itemsSource = CommonLabels;
m_CurrentLabelsListView.selectionType = SelectionType.None;
static void ReorderLabels(Labeling labeling, string label, int newIndex)
void SetupSuggestedLabelsListViews()
if (labeling.labels.Contains(label))
SetupSuggestedLabelsBasedOnFlatList(m_SuggestedLabelsListViewFromName, m_SuggestedLabelsBasedOnName);
SetupSuggestedLabelsBasedOnFlatList(m_SuggestedLabelsListViewFromPath, m_SuggestedLabelsBasedOnPath);
}
void SetupSuggestedLabelsBasedOnFlatList(ListView labelsListView, List<string> stringList)
{
labelsListView.itemsSource = stringList;
VisualElement MakeItem() => new SuggestedLabelElement(this);
void BindItem(VisualElement e, int i)
labeling.labels.Remove(label);
if (newIndex < labeling.labels.Count)
labeling.labels.Insert(newIndex, label);
else
labeling.labels.Add(label);
if (e is SuggestedLabelElement suggestedLabel)
{
suggestedLabel.label.text = stringList[i];
}
}
const int itemHeight = 32;
labelsListView.bindItem = BindItem;
labelsListView.makeItem = MakeItem;
labelsListView.itemHeight = itemHeight;
labelsListView.selectionType = SelectionType.None;
}
void SetupLabelConfigsScrollView()
{
m_LabelConfigsScrollView.Clear();
foreach (var config in m_AllLabelConfigsInProject)
{
VisualElement configElement = new LabelConfigElement(this, config);
m_LabelConfigsScrollView.Add(configElement);
static void ReplaceLabel(Labeling labeling, string oldLabel, string newLabel)
/// <summary>
/// Get the path of the given asset in the project, or get the path of the given Scene GameObject's source prefab if any
/// </summary>
/// <param name="obj"></param>
/// <returns></returns>
public static string GetAssetOrPrefabPath(UnityEngine.Object obj)
var idx = labeling.labels.IndexOf(oldLabel);
if (idx == -1) return;
labeling.labels[idx] = newLabel;
string assetPath = AssetDatabase.GetAssetPath(obj);
if (assetPath == string.Empty)
{
//this indicates that gObj is a scene object and not a prefab directly selected from the Project tab
var prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(obj);
if (prefabObject)
{
assetPath = AssetDatabase.GetAssetPath(prefabObject);
}
}
return assetPath;
}
private void ReplaceLabel(int index, string newLabel)
class AddedLabelEditor : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
public TextField labelTextField;
public int indexInList;
public AddedLabelEditor(LabelingEditor editor, ListView listView)
labeling.labels[index] = newLabel;
var uxmlPath = m_UxmlDir + "AddedLabelElement.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
labelTextField = this.Q<TextField>("label-value");
var removeButton = this.Q<Button>("remove-button");
var addToConfigButton = this.Q<Button>("add-to-config-button");
labelTextField.isDelayed = true;
labelTextField.RegisterValueChangedCallback((cEvent) =>
{
//Do not let the user define a duplicate label
if (editor.CommonLabels.Contains(cEvent.newValue) && editor.CommonLabels.IndexOf(cEvent.newValue) != indexInList)
{
//The listview recycles child visual elements and that causes the RegisterValueChangedCallback event to be called when scrolling.
//Therefore, we need to make sure we are not in this code block just because of scrolling, but because the user is actively changing one of the labels.
//The editor.CommonLabels.IndexOf(cEvent.newValue) != m_IndexInList check is for this purpose.
Debug.LogError("A label with the string " + cEvent.newValue + " has already been added to selected objects.");
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
return;
}
bool shouldRefresh = false;
foreach (var targetObject in editor.targets)
{
if (targetObject is Labeling labeling)
{
var indexToModifyInTargetLabelList =
labeling.labels.IndexOf(editor.CommonLabels[indexInList]);
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty(nameof(Labeling.labels));
serializedLabelArray2.GetArrayElementAtIndex(indexToModifyInTargetLabelList).stringValue = cEvent.newValue;
shouldRefresh = shouldRefresh || serializedLabelArray2.serializedObject.hasModifiedProperties;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
}
}
//the value change event is called even when the listview recycles its child elements for re-use during scrolling, therefore, we should check to make sure there are modified properties, otherwise we would be doing the refresh for no reason (reduces scrolling performance)
if (shouldRefresh)
{
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
}
});
addToConfigButton.clicked += () =>
{
AddToConfigWindow.ShowWindow(labelTextField.value);
};
removeButton.clicked += () =>
{
List<string> commonLabels = new List<string>();
commonLabels.Clear();
var firstTarget = editor.targets[0] as Labeling;
if (firstTarget != null)
{
commonLabels.AddRange(firstTarget.labels);
foreach (var obj in editor.targets)
{
commonLabels = commonLabels.Intersect(((Labeling) obj).labels).ToList();
}
foreach (var targetObject in editor.targets)
{
if (targetObject is Labeling labeling)
{
RemoveLabelFromLabelingSerObj(labeling, commonLabels);
}
}
editor.serializedObject.SetIsDifferentCacheDirty();
editor.RefreshManualLabelingData();
}
};
void ReplaceLabelAll(int index, string currentLabel)
void RemoveLabelFromLabelingSerObj(Labeling labeling, List<string> commonLabels)
var oldLabel = labeling.labels[index];
ReplaceLabel(index, currentLabel);
Dictionary<int, int> commonsIndexToLabelsIndex = new Dictionary<int, int>();
foreach (var t in targets)
for (int i = 0; i < labeling.labels.Count; i++)
var l = (Labeling)t;
string label = labeling.labels[i];
if (this.labeling == l) continue;
for (int j = 0; j < commonLabels.Count; j++)
{
string label2 = commonLabels[j];
ReplaceLabel(l, oldLabel, currentLabel);
if (string.Equals(label, label2) && !commonsIndexToLabelsIndex.ContainsKey(j))
{
commonsIndexToLabelsIndex.Add(j, i);
}
}
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty("labels");
serializedLabelArray2.DeleteArrayElementAtIndex(commonsIndexToLabelsIndex[indexInList]);
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
}
void DrawElement(Rect rect, int index, bool isactive, bool isfocused)
class SuggestedLabelElement : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
public Label label;
public SuggestedLabelElement(LabelingEditor editor)
using (var change = new EditorGUI.ChangeCheckScope())
var uxmlPath = m_UxmlDir + "SuggestedLabelElement.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
label = this.Q<Label>("label-value");
var addButton = this.Q<Button>("add-button");
addButton.clicked += () =>
var contentRect = new Rect(rect.x, rect.y, rect.width, rect.height);
foreach (var targetObject in editor.serializedObject.targetObjects)
{
if (targetObject is Labeling labeling)
{
if (labeling.labels.Contains(label.text))
continue; //Do not allow duplicate labels in one asset. Duplicate labels have no use and cause other operations (especially mutlt asset editing) to get messed up
var serializedLabelingObject2 = new SerializedObject(targetObject);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty("labels");
serializedLabelArray2.InsertArrayElementAtIndex(serializedLabelArray2.arraySize);
serializedLabelArray2.GetArrayElementAtIndex(serializedLabelArray2.arraySize-1).stringValue = label.text;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
editor.serializedObject.SetIsDifferentCacheDirty();
}
}
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
};
}
}
var value = EditorGUI.DelayedTextField(contentRect, labeling.labels[index]);
class LabelConfigElement : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
bool m_Collapsed = true;
ListView m_LabelsListView;
VisualElement m_CollapseToggle;
public LabelConfigElement(LabelingEditor editor, ScriptableObject config)
{
var uxmlPath = m_UxmlDir + "ConfigElementForAddingLabelsFrom.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
m_LabelsListView = this.Q<ListView>("label-config-contents-listview");
var openButton = this.Q<Button>("open-config-button");
var configName = this.Q<Label>("config-name");
configName.text = config.name;
m_CollapseToggle = this.Q<VisualElement>("collapse-toggle");
if (change.changed)
openButton.clicked += () =>
{
Selection.SetActiveObjectWithContext(config, null);
};
var propertyInfo = config.GetType().GetProperty(IdLabelConfig.publicLabelEntriesFieldName);
if (propertyInfo != null)
{
var objectList = (IEnumerable) propertyInfo.GetValue(config);
var labelEntryList = objectList.Cast<ILabelEntry>().ToList();
var labelList = labelEntryList.Select(entry => entry.label).ToList();
m_LabelsListView.itemsSource = labelList;
VisualElement MakeItem()
ReplaceLabelAll(index, value);
var element = new SuggestedLabelElement(editor);
element.AddToClassList("label_add_from_config");
return element;
}
if (PrefabUtility.IsPartOfAnyPrefab(target))
void BindItem(VisualElement e, int i)
{
if (e is SuggestedLabelElement suggestedLabel)
EditorUtility.SetDirty(target);
suggestedLabel.label.text = labelList[i];
const int itemHeight = 27;
m_LabelsListView.bindItem = BindItem;
m_LabelsListView.makeItem = MakeItem;
m_LabelsListView.itemHeight = itemHeight;
m_LabelsListView.selectionType = SelectionType.None;
m_CollapseToggle.RegisterCallback<MouseUpEvent>(evt =>
{
m_Collapsed = !m_Collapsed;
ApplyCollapseState();
});
ApplyCollapseState();
public override void OnInspectorGUI()
void ApplyCollapseState()
m_LabelsList.DoLayoutList();
if (m_Collapsed)
{
m_CollapseToggle.AddToClassList("collapsed-toggle-state");
m_LabelsListView.AddToClassList("collapsed");
}
else
{
m_CollapseToggle.RemoveFromClassList("collapsed-toggle-state");
m_LabelsListView.RemoveFromClassList("collapsed");
}
}
}
/// <summary>
/// A labeling scheme based on which an automatic label can be produced for a given asset. E.g. based on asset name, asset path, etc.
/// </summary>
abstract class AssetLabelingScheme
{
/// <summary>
/// The description of how this scheme generates labels. Used in the dropdown menu in the UI.
/// </summary>
public abstract string Description { get; }
/// <summary>
/// Generate a label for the given asset
/// </summary>
/// <param name="asset"></param>
/// <returns></returns>
public abstract string GenerateLabel(UnityEngine.Object asset);
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's name as its automatic label
/// </summary>
class AssetNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use asset name";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
return asset.name;
}
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's file name, including extension, as its automatic label
/// </summary>
class AssetFileNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use file name with extension";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
string assetPath = LabelingEditor.GetAssetOrPrefabPath(asset);
var stringList = assetPath.Split(LabelingEditor.PathSeparators, StringSplitOptions.RemoveEmptyEntries)
.ToList();
return stringList.Count > 0 ? stringList.Last() : null;
}
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's folder name as its automatic label
/// </summary>
class CurrentOrParentsFolderNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use the asset's folder name";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
string assetPath = LabelingEditor.GetAssetOrPrefabPath(asset);
var stringList = assetPath.Split(LabelingEditor.PathSeparators, StringSplitOptions.RemoveEmptyEntries)
.ToList();
return stringList.Count > 1 ? stringList[stringList.Count-2] : null;
}
}
}

2
com.unity.perception/Editor/GroundTruth/LabelingEditor.cs.meta


fileFormatVersion: 2
guid: 2e725508a34c40a0938c8d891b371980
guid: 387b8732b87094321af57795df93aec4
timeCreated: 1585933334

141
com.unity.perception/Editor/GroundTruth/SemanticSegmentationLabelConfigEditor.cs


using System;
using System.Collections.Generic;
using System.Collections.Generic;
using UnityEditorInternal;
using UnityEditor.UIElements;
using UnityEngine.UIElements;
class SemanticSegmentationLabelConfigEditor : Editor
class SemanticSegmentationLabelConfigEditor : LabelConfigEditor<SemanticSegmentationLabelEntry>
ReorderableList m_LabelsList;
const float k_Margin = 5f;
protected override void InitUiExtended()
{
m_MoveButtons.style.display = DisplayStyle.None;
m_IdSpecificUi.style.display = DisplayStyle.None;
}
static List<Color> s_StandardColors = new List<Color>()
public override void PostRemoveOperations()
{ }
protected override void SetupPresentLabelsListView()
Color.blue,
Color.green,
Color.red,
Color.white,
Color.yellow,
Color.gray
};
base.SetupPresentLabelsListView();
VisualElement MakeItem() =>
new ColoredLabelElementInLabelConfig(this, m_SerializedLabelsArray);
void BindItem(VisualElement e, int i)
{
if (e is ColoredLabelElementInLabelConfig addedLabel)
{
addedLabel.indexInList = i;
addedLabel.labelTextField.BindProperty(m_SerializedLabelsArray.GetArrayElementAtIndex(i)
.FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.label)));
addedLabel.colorField.BindProperty(m_SerializedLabelsArray.GetArrayElementAtIndex(i)
.FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.color)));
}
}
public void OnEnable()
{
m_LabelsList = new ReorderableList(this.serializedObject, this.serializedObject.FindProperty(IdLabelConfig.labelEntriesFieldName), true, false, true, true);
m_LabelsList.elementHeight = EditorGUIUtility.singleLineHeight * 2 + k_Margin;
m_LabelsList.drawElementCallback = DrawElement;
m_LabelsList.onAddCallback += OnAdd;
m_LabelListView.bindItem = BindItem;
m_LabelListView.makeItem = MakeItem;
void OnAdd(ReorderableList list)
protected override SemanticSegmentationLabelEntry CreateLabelEntryFromLabelString(SerializedProperty serializedArray, string labelToAdd)
var standardColorList = new List<Color>(s_StandardColors);
for (int i = 0; i < list.serializedProperty.arraySize; i++)
var standardColorList = new List<Color>(SemanticSegmentationLabelConfig.s_StandardColors);
for (int i = 0; i < serializedArray.arraySize; i++)
var item = list.serializedProperty.GetArrayElementAtIndex(i);
var item = serializedArray.GetArrayElementAtIndex(i);
var index = list.serializedProperty.arraySize;
list.serializedProperty.InsertArrayElementAtIndex(index);
var element = list.serializedProperty.GetArrayElementAtIndex(index);
var labelProperty = element.FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.label));
labelProperty.stringValue = "";
var colorProperty = element.FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.color));
if (standardColorList.Any())
colorProperty.colorValue = standardColorList.First();
else
colorProperty.colorValue = Random.ColorHSV(0, 1, .5f, 1, 1, 1);
var foundColor = standardColorList.Any() ? standardColorList.First() : Random.ColorHSV(0, 1, .5f, 1, 1, 1);
serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty(target);
return new SemanticSegmentationLabelEntry
{
color = foundColor,
label = labelToAdd
};
void DrawElement(Rect rect, int index, bool isactive, bool isfocused)
protected override void AppendLabelEntryToSerializedArray(SerializedProperty serializedArray, SemanticSegmentationLabelEntry semanticSegmentationLabelEntry)
var element = m_LabelsList.serializedProperty.GetArrayElementAtIndex(index);
var index = serializedArray.arraySize;
serializedArray.InsertArrayElementAtIndex(index);
var element = serializedArray.GetArrayElementAtIndex(index);
var labelProperty = element.FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.label));
using (var change = new EditorGUI.ChangeCheckScope())
colorProperty.colorValue = semanticSegmentationLabelEntry.color;
var labelProperty = element.FindPropertyRelative(nameof(ILabelEntry.label));
labelProperty.stringValue = semanticSegmentationLabelEntry.label;
}
public int IndexOfGivenColorInSerializedLabelsArray(Color color)
{
for (int i = 0; i < m_SerializedLabelsArray.arraySize; i++)
var contentRect = new Rect(rect.position, new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
var newLabel = EditorGUI.TextField(contentRect, nameof(SemanticSegmentationLabelEntry.label), labelProperty.stringValue);
if (change.changed)
{
labelProperty.stringValue = newLabel;
}
}
using (var change = new EditorGUI.ChangeCheckScope())
{
var contentRect = new Rect(rect.position + new Vector2(0, EditorGUIUtility.singleLineHeight), new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
var newLabel = EditorGUI.ColorField(contentRect, nameof(SemanticSegmentationLabelEntry.color), colorProperty.colorValue);
if (change.changed)
var element = m_SerializedLabelsArray.GetArrayElementAtIndex(i).FindPropertyRelative(nameof(SemanticSegmentationLabelEntry.color));
if (element.colorValue == color)
colorProperty.colorValue = newLabel;
return i;
return -1;
}
public override void OnInspectorGUI()
class ColoredLabelElementInLabelConfig : LabelElementInLabelConfig<SemanticSegmentationLabelEntry>
{
protected override string UxmlPath => k_UxmlDir + "ColoredLabelElementInLabelConfig.uxml";
public ColorField colorField;
public ColoredLabelElementInLabelConfig(LabelConfigEditor<SemanticSegmentationLabelEntry> editor, SerializedProperty labelsArray) : base(editor, labelsArray)
{ }
protected override void InitExtended()
serializedObject.Update();
colorField = this.Q<ColorField>("label-color-value");
colorField.RegisterValueChangedCallback((cEvent) =>
{
var index = ((SemanticSegmentationLabelConfigEditor)m_LabelConfigEditor).IndexOfGivenColorInSerializedLabelsArray(cEvent.newValue);
if (index != -1 && index != indexInList)
{
//The listview recycles child visual elements and that causes the RegisterValueChangedCallback event to be called when scrolling.
//Therefore, we need to make sure we are not in this code block just because of scrolling, but because the user is actively changing one of the labels.
//The index check is for this purpose.
m_LabelsList.DoLayoutList();
this.serializedObject.ApplyModifiedProperties();
Debug.LogWarning("A label with the chosen color " + cEvent.newValue + " has already been added to this label configuration.");
}
});
}
}
}

4
com.unity.perception/Editor/GroundTruth/SemanticSegmentationLabelConfigEditor.cs.meta


fileFormatVersion: 2
guid: e8cb4fead5b34d41884c1c9a77308c72
timeCreated: 1593454492
guid: c8e809a6323844c71aa3cb751a6ae9a1
timeCreated: 1585940009

14
com.unity.perception/Editor/Randomization/Editors.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: 0b17046409af4c22bf74eec2a5965984
timeCreated: 1598135707
>>>>>>> 86d25d2... implemented parameter behaviours
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/Editors/ScenarioBaseEditor.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754583
=======
guid: face5e97e23d402cbf6fafadb39fa0c3
timeCreated: 1596213301
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

30
com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs


using Unity.Simulation.Client;
using UnityEditor;
using UnityEditor.Build.Reporting;
using UnityEditor.SceneManagement;
using UnityEditor.UIElements;
using UnityEngine.Experimental.Perception.Randomization.Editor;
using UnityEngine.Experimental.Perception.Randomization.Scenarios;

namespace UnityEngine.Perception.Randomization.Editor
{
class RunInUSimWindow : EditorWindow
class RunInUnitySimulationWindow : EditorWindow
{
string m_BuildDirectory;

ObjectField m_ScenarioField;
Button m_RunButton;
[MenuItem("Window/Run in USim")]
[MenuItem("Window/Run in Unity Simulation")]
var window = GetWindow<RunInUSimWindow>();
var window = GetWindow<RunInUnitySimulationWindow>();
window.titleContent = new GUIContent("Run In Unity Simulation");
window.minSize = new Vector2(250, 50);
window.Show();

}
else
{
CreateRunInUSimUI();
CreateRunInUnitySimulationUI();
}
}

/// <param name="element">The visual element to enable view data for</param>
static void SetViewDataKey(VisualElement element)
{
element.viewDataKey = $"RunInUSim_{element.name}";
element.viewDataKey = $"RunInUnitySimulation_{element.name}";
void CreateRunInUSimUI()
void CreateRunInUnitySimulationUI()
$"{StaticData.uxmlDir}/RunInUSimWindow.uxml").CloneTree(root);
$"{StaticData.uxmlDir}/RunInUnitySimulationWindow.uxml").CloneTree(root);
m_RunNameField = root.Q<TextField>("run-name");
SetViewDataKey(m_RunNameField);

m_SysParam = sysParamDefinitions[0];
m_RunButton = root.Q<Button>("run-button");
m_RunButton.clicked += RunInUSim;
m_RunButton.clicked += RunInUnitySimulation;
async void RunInUSim()
async void RunInUnitySimulation()
await StartUSimRun();
await StartUnitySimulationRun();
}
void ValidateSettings()

if (m_ScenarioField.value == null)
throw new MissingFieldException("Scenario unselected");
var scenario = (ScenarioBase)m_ScenarioField.value;
if (!StaticData.IsSubclassOfRawGeneric(typeof(USimScenario<>), scenario.GetType()))
throw new NotSupportedException("Scenario class must be derived from USimScenario to run in USim");
if (!StaticData.IsSubclassOfRawGeneric(typeof(UnitySimulationScenario<>), scenario.GetType()))
throw new NotSupportedException(
"Scenario class must be derived from UnitySimulationScenario to run in Unity Simulation");
}
void CreateLinuxBuildAndZip()

if (token.IsCancellationRequested)
return null;
var appParamName = $"{m_RunNameField.value}_{i}";
var appParamId = API.UploadAppParam(appParamName, new USimConstants
var appParamId = API.UploadAppParam(appParamName, new UnitySimulationConstants
{
totalIterations = m_TotalIterationsField.value,
instanceCount = m_InstanceCountField.value,

return appParamIds;
}
async Task StartUSimRun()
async Task StartUnitySimulationRun()
{
m_RunButton.SetEnabled(false);
var cancellationTokenSource = new CancellationTokenSource();

14
com.unity.perception/Editor/Randomization/PropertyDrawers.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: d3107e026b2943c1868c9b3f8c6480d3
timeCreated: 1598135730
>>>>>>> 86d25d2... implemented parameter behaviours
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/PropertyDrawers/ColorHsvaDrawer.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: 5e8094c28dd142a09fbbd38ca560164b
timeCreated: 1598250942
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/PropertyDrawers/ParameterDrawer.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: d389620d3aa3471ca1877eb59cdfb465
timeCreated: 1598135745
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

12
com.unity.perception/Editor/Randomization/StaticData.cs.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1595278931
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

9
com.unity.perception/Editor/Randomization/Uss.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1590479034
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

20
com.unity.perception/Editor/Randomization/Uss/Styles.uss


.randomization__remove-item-button {
width: 12px;
height: 14px;
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/X.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/X.png");
}
.randomization__collapse-toggle {

width: 10px;
height: 10px;
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/FoldoutOpen.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/FoldoutOpen.png");
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/FoldoutClosed.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/FoldoutClosed.png");
}
.randomization__collapse-toggle:hover {

.randomization__chevron-left {
height: 12px;
width: 12px;
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/ChevronLeft.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/ChevronLeft.png");
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/ChevronRight.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/ChevronRight.png");
}

border-color: #191919;
padding: 2px 4px 2px 4px;
border-color: #808080;
padding: 7px;
border-radius: 4px;
white-space: normal;
margin-top: 4px;
margin-bottom: 4px;

padding: 2px;
}
/* Randomizer classes */
.randomizer__drag-bar {
width: 100px;

width: 16px;
height: 100%;
min-height: 20px;
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/DragHandle.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/DragHandle.png");
}
.randomizer__drag-handle:hover {

top: 9px;
width: 10px;
height: 10px;
background-image: resource("Packages/com.unity.perception/Editor/Randomization/Icons/Search.png");
background-image: resource("Packages/com.unity.perception/Editor/Icons/Search.png");
}
.randomizer__menu-search-bar .unity-base-text-field__input {

9
com.unity.perception/Editor/Randomization/Uxml.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1590479019
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

16
com.unity.perception/Editor/Randomization/Uxml/Parameter/ParameterDrawer.uxml.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: 6a4bb3efae29429292ccdfa63e661872
timeCreated: 1598240583
>>>>>>> 86d25d2... implemented parameter behaviours
ScriptedImporter:
internalIDToNameTable: []
externalObjects: {}
serializedVersion: 2
userData:
assetBundleName:
assetBundleVariant:
script: {fileID: 13804, guid: 0000000000000000e000000000000000, type: 0}

2
com.unity.perception/Editor/Randomization/Uxml/RunInUnitySimulationWindow.uxml


<editor:ObjectField name="main-scene" label="Main Scene" allow-scene-objects="false"/>
<editor:ObjectField name="scenario" label="Scenario"/>
<VisualElement class="unity-base-field">
<Label text="USim worker config" class="unity-base-field__label"/>
<Label text="Sys-Param" class="unity-base-field__label"/>
<editor:ToolbarMenu name="sys-param" class="unity-base-field__input" style="border-width: 1px;"/>
</VisualElement>
<VisualElement style="align-items: center;">

14
com.unity.perception/Editor/Randomization/VisualElements.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: 7f8f95a1bb144a96b9310164f5560387
timeCreated: 1598135666
>>>>>>> 86d25d2... implemented parameter behaviours
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

9
com.unity.perception/Editor/Randomization/VisualElements/Parameter.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1601669088
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Parameter/CategoricalOptionElement.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: 3066f77d411047baafb6cc454adc6e37
timeCreated: 1595535184
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Parameter/ColorHsvaField.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: 103b163a2467415ab86b0df8175b12a6
timeCreated: 1598254290
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Parameter/DrawerParameterElement.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: e2eb905ca8c14b5cbe43e48418948be0
timeCreated: 1598255728
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Parameter/ParameterElement.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: ea72d77c64d1447aa195e2068f02cf74
timeCreated: 1595279847
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

9
com.unity.perception/Editor/Randomization/VisualElements/Randomizer.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1601054944
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

12
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/AddRandomizerMenu.cs.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1600836688
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

12
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerElement.cs.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1600290125
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

11
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs


ToolbarMenu m_AddRandomizerMenu;
public HashSet<Type> randomizerTypeSet = new HashSet<Type>();
int m_PreviousListSize;
ScenarioBase scenario => (ScenarioBase)m_Property.serializedObject.targetObject;
VisualElement inspectorContainer

collapseAllButton.clicked += () => CollapseRandomizers(true);
RefreshList();
Undo.undoRedoPerformed += () =>
{
m_Property.serializedObject.Update();
RefreshList();
};
}
void RefreshList()

randomizerTypeSet.Clear();
foreach (var randomizer in scenario.randomizers)
randomizerTypeSet.Add(randomizer.GetType());
m_PreviousListSize = m_Property.arraySize;
Undo.RegisterCompleteObjectUndo(m_Property.serializedObject.targetObject, "Add Randomizer");
var newRandomizer = scenario.CreateRandomizer(randomizerType);
newRandomizer.RandomizeParameterSeeds();
m_Property.serializedObject.Update();

public void RemoveRandomizer(RandomizerElement element)
{
Undo.RegisterCompleteObjectUndo(m_Property.serializedObject.targetObject, "Remove Randomizer");
scenario.RemoveRandomizer(element.randomizerType);
m_Property.serializedObject.Update();
RefreshList();

{
if (currentIndex == nextIndex)
return;
Undo.RegisterCompleteObjectUndo(m_Property.serializedObject.targetObject, "Reorder Randomizer");
scenario.ReorderRandomizer(currentIndex, nextIndex);
m_Property.serializedObject.Update();
RefreshList();

12
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerList.cs.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1600366159
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

13
com.unity.perception/Editor/Randomization/VisualElements/Randomizer/RandomizerReorderingIndicator.cs.meta


<<<<<<< HEAD
fileFormatVersion: 2
<<<<<<< HEAD
guid: 61021c66c33e40e98de0702ae0aa4449
timeCreated: 1600754567
=======
guid: 7c1e08b02e5a4c55875f34baf32f8e76
timeCreated: 1596143672
>>>>>>> 86d25d2... implemented parameter behaviours
=======
guid: 7c1e08b02e5a4c55875f34baf32f8e76
guid: e43353090a445024ab8110b73630525a
MonoImporter:
externalObjects: {}
serializedVersion: 2

userData:
assetBundleName:
assetBundleVariant:
>>>>>>> 750f255... working on new workflow

9
com.unity.perception/Editor/Randomization/VisualElements/Sampler.meta


fileFormatVersion: 2
fileFormatVersion: 2
timeCreated: 1601669132
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Sampler/FloatRangeElement.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: e37f169c618d471d8ed9614a41096437
timeCreated: 1595281335
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Sampler/RandomSeedField.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: b4fa54f5ed5d4d67a278fa8b42dc55cb
timeCreated: 1596171029
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

17
com.unity.perception/Editor/Randomization/VisualElements/Sampler/SamplerElement.cs.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754567
=======
guid: b367f8f2cb8e465ca2d60ccbd5414a14
timeCreated: 1595277943
>>>>>>> 86d25d2... implemented parameter behaviours
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

1
com.unity.perception/Runtime/AssemblyInfo.cs


[assembly: InternalsVisibleTo("Unity.Perception.Runtime.Tests")]
[assembly: InternalsVisibleTo("Unity.Perception.Runtime")]
[assembly: InternalsVisibleTo("Unity.Perception.TestProject")]
[assembly: InternalsVisibleTo("Unity.Perception.Performance.Tests")]

20
com.unity.perception/Runtime/GroundTruth/DatasetCapture.cs


static SimulationState CreateSimulationData()
{
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
return new SimulationState($"Dataset{k_DatasetGuid}");
}

throw new ArgumentNullException(nameof(path));
m_SimulationState.ReportAsyncAnnotationResult<object>(this, path);
}
/// <summary>
/// Report file-based and value-based data for this annotation.
/// </summary>
/// <param name="path">The path to the file containing the annotation data.</param>
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if path or values is null</exception>
public void ReportFileAndValues<T>(string path, IEnumerable<T> values)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, path, values);
}
/// <summary>

12
com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs


void InitGameObjectRecursive(GameObject gameObject, MaterialPropertyBlock mpb, Labeling labeling, uint instanceId)
{
var terrain = gameObject.GetComponent<Terrain>();
if (terrain != null)
{
terrain.GetSplatMaterialPropertyBlock(mpb);
foreach (var pass in m_ActiveGenerators)
pass.SetupMaterialProperties(mpb, null, labeling, instanceId);
terrain.SetSplatMaterialPropertyBlock(mpb);
}
var renderer = (Renderer)gameObject.GetComponent<MeshRenderer>();
if (renderer == null)
renderer = gameObject.GetComponent<SkinnedMeshRenderer>();

14
com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs


Shader m_SegmentationShader;
Material m_OverrideMaterial;
int m_NextObjectIndex;
Dictionary<uint, uint> m_Ids;
/// <summary>
/// Create a new <see cref="InstanceSegmentationCrossPipelinePass"/> referencing the given

{
using (s_ExecuteMarker.Auto())
{
cmd.ClearRenderTarget(true, true, Color.clear);
cmd.ClearRenderTarget(true, true, Color.black);
var result = CreateRendererListDesc(camera, cullingResult, "FirstPass", 0, m_OverrideMaterial, layerMask);
DrawRendererList(renderContext, cmd, RendererList.Create(result));

public override void SetupMaterialProperties(MaterialPropertyBlock mpb, Renderer renderer, Labeling labeling, uint instanceId)
{
mpb.SetInt(k_SegmentationIdProperty, (int)instanceId);
var found = InstanceIdToColorMapping.TryGetColorFromInstanceId(instanceId, out var color);
if (!found)
{
Debug.LogError($"Could not get a unique color for {instanceId}");
}
mpb.SetVector(k_SegmentationIdProperty, (Color)color);
#if PERCEPTION_DEBUG
Debug.Log($"Assigning id. Frame {Time.frameCount} id {id}");
#endif

82
com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBox3DLabeler.cs


public class BoundingBox3DLabeler : CameraLabeler
{
EntityQuery m_EntityQuery;
///<inheritdoc/>
public override string description
{

/// <summary>
/// Each 3D bounding box data record maps a tuple of (instance, label) to translation, size and rotation that draws a 3D bounding box,
/// as well as velocity and acceleration (optional) of the 3D bounding box. All location data is given with respect to the sensor coordinate system.
///
/// bounding_box_3d
/// label_id (int): Integer identifier of the label
/// label_name (str): String identifier of the label
/// instance_id (str): UUID of the instance.
/// translation (float, float, float): 3d bounding box's center location in meters as center_x, center_y, center_z with respect to global coordinate system.
/// size (float, float, float): 3d bounding box size in meters as width, length, height.
/// rotation (float, float, float, float): 3d bounding box orientation as quaternion: w, x, y, z.
/// velocity (float, float, float) [optional]: 3d bounding box velocity in meters per second as v_x, v_y, v_z.
/// acceleration (float, float, float) [optional]: 3d bounding box acceleration in meters per second^2 as a_x, a_y, a_z.
/// </summary>
/// <remarks>
/// Currently not supporting exporting velocity and acceleration. Both values will be null.

public struct BoxData
{
/// <summary>
/// Integer identifier of the label
/// </summary>
/// <summary>
/// String identifier of the label
/// </summary>
/// <summary>
/// UUID of the instance
/// </summary>
/// <summary>
/// 3d bounding box's center location in meters as center_x, center_y, center_z with respect to global coordinate system
/// </summary>
/// <summary>
/// 3d bounding box size in meters as width, length, height
/// </summary>
/// <summary>
/// 3d bounding box orientation as quaternion: w, x, y, z
/// </summary>
/// <summary>
/// [optional]: 3d bounding box velocity in meters per second as v_x, v_y, v_z
/// </summary>
/// <summary>
/// [optional]: 3d bounding box acceleration in meters per second^2 as a_x, a_y, a_z
/// </summary>
int m_CurrentFrame;
/// <inheritdoc/>

"Bounding box for each labeled object visible to the sensor", id: new Guid(annotationId));
perceptionCamera.RenderedObjectInfosCalculated += OnRenderObjectInfosCalculated;
m_AsyncAnnotations = new Dictionary<int, AsyncAnnotation>();
m_BoundingBoxValues = new Dictionary<int, Dictionary<uint, BoxData>>();
m_ToReport = new List<BoxData>();

right = rotation * right;
up = rotation * up;
forward = rotation * forward;
corners[3] = corners[0] + doubleRight;
corners[3] = corners[0] + doubleRight;
for (var i = 0; i < 4; i++)
{
corners[i + 4] = corners[i] + doubleForward;

protected override void OnBeginRendering()
{
m_CurrentFrame = Time.frameCount;
foreach (var entity in entities)
{
ProcessEntity(entityManager.GetComponentObject<Labeling>(entity));

if (!m_BoundingBoxValues.TryGetValue(frameCount, out var boxes))
return;
m_AsyncAnnotations.Remove(frameCount);
m_BoundingBoxValues.Remove(frameCount);

for (var i = 0; i < renderedObjectInfos.Length; i++)
{
var objectInfo = renderedObjectInfos[i];

m_ToReport.Add(box);
}
}
BoundingBoxComputed?.Invoke(frameCount, m_ToReport);
asyncAnnotation.ReportValues(m_ToReport);
}

if (idLabelConfig.TryGetLabelEntryFromInstanceId(labeledEntity.instanceId, out var labelEntry))
{
var entityGameObject = labeledEntity.gameObject;
var labelTransform = entityGameObject.transform;
var cameraTransform = perceptionCamera.transform;
var combinedBounds = new Bounds(Vector3.zero, Vector3.zero);

// they are axis-aligned with respect to the current component's coordinate space. This, in theory
// could still provide non-ideal fitting bounds (if the model is made strangely, but garbage in; garbage out)
var meshBounds = mesh.mesh.bounds;
// Apply the transformations on this object until we reach the labeled transform
while (currentTransform != labelTransform)
{

// need to calculate all 8 corners of the bounds and combine them with the current combined
// bounds
var corners = GetBoxCorners(transformedBounds, transformedRotation);
// If this is the first time, create a new bounds struct
if (areBoundsUnset)
{

combinedBounds.Encapsulate(c2);
}
}
// Convert the combined bounds into world space
combinedBounds.center = labelTransform.TransformPoint(combinedBounds.center);
combinedBounds.extents = Vector3.Scale(combinedBounds.extents, labelTransform.localScale);

cameraCenter = Vector3.Scale(cameraTransform.localScale, cameraCenter);
m_BoundingBoxValues[m_CurrentFrame][labeledEntity.instanceId] = converted;
}
}

9
com.unity.perception/Runtime/GroundTruth/Labelers/BoundingBoxLabeler.cs


return;
m_AsyncAnnotations.Remove(frameCount);
using (s_BoundingBoxCallback.Auto())
{
m_BoundingBoxValues.Clear();

// The player screen can be dynamically resized during play, need to
// scale the bounding boxes appropriately from the original screen size
float screenRatioWidth = Screen.width / m_OriginalScreenSize.x;
float screenRatioHeight = Screen.height / m_OriginalScreenSize.y;
var screenRatioWidth = Screen.width / m_OriginalScreenSize.x;
var screenRatioHeight = Screen.height / m_OriginalScreenSize.y;
float x = box.x * screenRatioWidth;
float y = box.y * screenRatioHeight;
var x = box.x * screenRatioWidth;
var y = box.y * screenRatioHeight;
var boxRect = new Rect(x, y, box.width * screenRatioWidth, box.height * screenRatioHeight);
var labelWidth = Math.Min(120, box.width * screenRatioWidth);

7
com.unity.perception/Runtime/GroundTruth/Labelers/CameraLabeler.cs


public HUDPanel hudPanel => perceptionCamera != null ? perceptionCamera.hudPanel : null;
/// <summary>
/// The overlay panel. Used to control which full screen image visual is displayed.
/// </summary>
public OverlayPanel overlayPanel => perceptionCamera != null ? perceptionCamera.overlayPanel : null;
/// <summary>
/// The <see cref="PerceptionCamera"/> that contains this labeler.
/// </summary>
protected PerceptionCamera perceptionCamera { get; private set; }

internal void VisualizeUI()
{
if (supportsVisualization)
if (supportsVisualization && !(this is IOverlayPanelProvider))
{
GUILayout.Label(GetType().Name);
GUILayout.BeginHorizontal();

8
com.unity.perception/Runtime/GroundTruth/Labelers/RenderedObjectInfoLabeler.cs


[UsedImplicitly]
public uint instance_id;
[UsedImplicitly]
public Color32 instance_color;
[UsedImplicitly]
}
// ReSharper restore InconsistentNaming

// Clear out all of the old entries...
hudPanel.RemoveEntries(this);
}
for (var i = 0; i < renderedObjectInfos.Length; i++)
{
var objectInfo = renderedObjectInfos[i];

{
label_id = labelEntry.id,
instance_id = objectInfo.instanceId,
visible_pixels = objectInfo.pixelCount
visible_pixels = objectInfo.pixelCount,
instance_color = objectInfo.instanceColor
};
if (visualize)

103
com.unity.perception/Runtime/GroundTruth/Labelers/SemanticSegmentationLabeler.cs


/// Only one SemanticSegmentationLabeler can render at once across all cameras.
/// </summary>
[Serializable]
public sealed class SemanticSegmentationLabeler : CameraLabeler
public sealed class SemanticSegmentationLabeler : CameraLabeler, IOverlayPanelProvider
{
///<inheritdoc/>
public override string description

/// </summary>
public RenderTexture targetTexture => m_TargetTextureOverride;
/// <inheritdoc cref="IOverlayPanelProvider"/>
public Texture overlayImage=> targetTexture;
/// <inheritdoc cref="IOverlayPanelProvider"/>
public string label => "SemanticSegmentation";
[Tooltip("(Optional) The RenderTexture on which semantic segmentation images will be drawn. Will be reformatted on startup.")]
[SerializeField]
RenderTexture m_TargetTextureOverride;

#endif
Dictionary<int, AsyncAnnotation> m_AsyncAnnotations;
private float segmentTransparency = 0.8f;
private float backgroundTransparency = 0.0f;
/// <summary>
/// Creates a new SemanticSegmentationLabeler. Be sure to assign <see cref="labelConfig"/> before adding to a <see cref="PerceptionCamera"/>.

public string path;
}
int camWidth = 0;
int camHeight = 0;
private GameObject segCanvas;
private GameObject segVisual = null;
private RawImage segImage = null;
GUIStyle labelStyle = null;
GUIStyle sliderStyle = null;
/// <inheritdoc/>
protected override bool supportsVisualization => true;

var myCamera = perceptionCamera.GetComponent<Camera>();
camWidth = myCamera.pixelWidth;
camHeight = myCamera.pixelHeight;
var camWidth = myCamera.pixelWidth;
var camHeight = myCamera.pixelHeight;
if (labelConfig == null)
{

visualizationEnabled = supportsVisualization;
}
private void SetupVisualizationElements()
{
segmentTransparency = 0.8f;
backgroundTransparency = 0.0f;
segVisual = GameObject.Instantiate(Resources.Load<GameObject>("SegmentTexture"));
segImage = segVisual.GetComponent<RawImage>();
segImage.material.SetFloat("_SegmentTransparency", segmentTransparency);
segImage.material.SetFloat("_BackTransparency", backgroundTransparency);
segImage.texture = targetTexture;
var rt = segVisual.transform as RectTransform;
rt.SetSizeWithCurrentAnchors(RectTransform.Axis.Horizontal, camWidth);
rt.SetSizeWithCurrentAnchors(RectTransform.Axis.Vertical, camHeight);
if (segCanvas == null)
{
segCanvas = new GameObject(perceptionCamera.gameObject.name + "_segmentation_canvas");
segCanvas.AddComponent<RectTransform>();
var canvas = segCanvas.AddComponent<Canvas>();
canvas.renderMode = RenderMode.ScreenSpaceOverlay;
segCanvas.AddComponent<CanvasScaler>();
segVisual.transform.SetParent(segCanvas.transform, false);
}
labelStyle = new GUIStyle(GUI.skin.label) {padding = {left = 10}};
sliderStyle = new GUIStyle(GUI.skin.horizontalSlider) {margin = {left = 12}};
}
void OnSemanticSegmentationImageRead(int frameCount, NativeArray<Color32> data)
{
if (!m_AsyncAnnotations.TryGetValue(frameCount, out var annotation))

m_SemanticSegmentationTextureReader?.Dispose();
m_SemanticSegmentationTextureReader = null;
Object.Destroy(segCanvas);
segCanvas = null;
}
/// <inheritdoc/>
override protected void OnVisualizerEnabledChanged(bool enabled)
{
if (segVisual != null)
segVisual.SetActive(enabled);
}
/// <inheritdoc/>
protected override void OnVisualizeAdditionalUI()
{
if (segImage == null)
{
SetupVisualizationElements();
}
var rt = segVisual.transform as RectTransform;
if (rt != null && camHeight != Screen.height)
{
camHeight = Screen.height;
rt.SetSizeWithCurrentAnchors(RectTransform.Axis.Vertical, camHeight);
}
if (rt != null && camWidth != Screen.width)
{
camWidth = Screen.width;
rt.SetSizeWithCurrentAnchors(RectTransform.Axis.Horizontal, Screen.width);
}
GUILayout.Space(4);
GUILayout.Label("Object Alpha:", labelStyle);
segmentTransparency = GUILayout.HorizontalSlider(segmentTransparency, 0.0f, 1.0f, sliderStyle, GUI.skin.horizontalSliderThumb);
GUILayout.Space(4);
GUILayout.Label("Background Alpha:", labelStyle);
backgroundTransparency = GUILayout.HorizontalSlider(backgroundTransparency, 0.0f, 1.0f, sliderStyle, GUI.skin.horizontalSliderThumb);
GUI.skin.label.padding.left = 0;
if (!GUI.changed) return;
segImage.material.SetFloat("_SegmentTransparency", segmentTransparency);
segImage.material.SetFloat("_BackTransparency", backgroundTransparency);
}
}
}

1
com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/Materials/SegmentationMaterial.mat


- _ReplaceColor: {r: 1, g: 1, b: 1, a: 0}
- _SoftParticleFadeParams: {r: 0, g: 0, b: 0, a: 0}
- _SpecColor: {r: 0.19999996, g: 0.19999996, b: 0.19999996, a: 1}
m_BuildTextureStacks: []
--- !u!114 &3089368426582508830
MonoBehaviour:
m_ObjectHideFlags: 11

3
com.unity.perception/Runtime/GroundTruth/Labeling/IdLabelConfig.cs


using System.Diagnostics.CodeAnalysis;
using System.Linq;
using JetBrains.Annotations;
using Newtonsoft.Json;
namespace UnityEngine.Perception.GroundTruth {
/// <summary>

public class IdLabelConfig : LabelConfig<IdLabelEntry>
{
/// <summary>
/// Whether the inspector will auto-assign ids based on the id of the first element.
/// </summary>

25
com.unity.perception/Runtime/GroundTruth/Labeling/LabelConfig.cs


using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.Serialization;
namespace UnityEngine.Perception.GroundTruth

/// </summary>
public const string labelEntriesFieldName = nameof(m_LabelEntries);
/// <summary>
/// List of LabelEntry items added to this label configuration
/// </summary>
List<T> m_LabelEntries = new List<T>();
protected List<T> m_LabelEntries = new List<T>();
/// Name of the public accessor for the list of label entries, used for reflection purposes.
/// </summary>
public const string publicLabelEntriesFieldName = nameof(labelEntries);
/// <summary>
/// A sequence of <see cref="ILabelEntry"/> which defines the labels relevant for this configuration and their values.
/// </summary>
public IReadOnlyList<T> labelEntries => m_LabelEntries;

public bool TryGetMatchingConfigurationEntry(Labeling labeling, out T labelEntry)
{
return TryGetMatchingConfigurationEntry(labeling, out labelEntry, out int _);
}
/// <summary>
/// Name of the function that checks whether a given string matches any of the label entries in this label configuration, used for reflection purposes.
/// </summary>
public const string DoesLabelMatchAnEntryName = nameof(DoesLabelMatchAnEntry);
/// <summary>
/// Does the given string match any of the label entries added to this label configuration.
/// </summary>
/// <param name="label"></param>
/// <returns></returns>
public bool DoesLabelMatchAnEntry(string label)
{
return m_LabelEntries.Any(entry => string.Equals(entry.label, label));
}
/// <summary>

7
com.unity.perception/Runtime/GroundTruth/Labeling/LabelEntryMatchCache.cs


const int k_StartingObjectCount = 1 << 8;
NativeList<ushort> m_InstanceIdToLabelEntryIndexLookup;
IdLabelConfig m_IdLabelConfig;
ushort m_DefaultValue;
ushort m_DefaultValue = ushort.MaxValue;
public LabelEntryMatchCache(IdLabelConfig idLabelConfig)
{

{
if (m_IdLabelConfig.TryGetMatchingConfigurationEntry(labeling, out _, out var index))
{
m_DefaultValue = ushort.MaxValue;
Debug.Assert(index < m_DefaultValue, "Too many entries in the label config");
if (m_InstanceIdToLabelEntryIndexLookup.Length <= instanceId)
{

m_InstanceIdToLabelEntryIndexLookup[i] = m_DefaultValue;
}
m_InstanceIdToLabelEntryIndexLookup[(int)instanceId] = (ushort)index;
}
else if (m_InstanceIdToLabelEntryIndexLookup.Length > (int)instanceId)
{
m_InstanceIdToLabelEntryIndexLookup[(int)instanceId] = m_DefaultValue;
}
}

37
com.unity.perception/Runtime/GroundTruth/Labeling/Labeling.cs


using System;
using UnityEngine;
using UnityEditor;
using UnityEngine.Serialization;
namespace UnityEngine.Perception.GroundTruth

/// The label names to associate with the GameObject. Modifications to this list after the Update() step of the frame the object is created in are
/// not guaranteed to be reflected by labelers.
/// </summary>
[FormerlySerializedAs("classes")]
public List<string> labels = new List<string>();
[FormerlySerializedAs("classes")] public List<string> labels = new List<string>();
// /// <summary>
// /// A list for backing up the asset's manually added labels, so that if the user switches to auto labeling and back, the previously added labels can be revived
// /// </summary>
// public List<string> manualLabelsBackup = new List<string>();
/// <summary>
/// Whether this labeling component is currently using an automatic labeling scheme. When this is enabled, the asset can have only one label (the automatic one) and the user cannot add more labels.
/// </summary>
public bool useAutoLabeling;
/// <summary>
/// The specific subtype of AssetLabelingScheme that this component is using, if useAutoLabeling is enabled.
/// </summary>
public string autoLabelingSchemeType = string.Empty;
/// <summary>
/// The unique id of this labeling component instance

{
this.instanceId = instanceId;
}
void Awake()
{
m_Entity = World.DefaultGameObjectInjectionWorld.EntityManager.CreateEntity();

World.DefaultGameObjectInjectionWorld.EntityManager.DestroyEntity(m_Entity);
}
void Reset()
{
labels.Clear();
useAutoLabeling = false;
autoLabelingSchemeType = string.Empty;
#if UNITY_EDITOR
EditorUtility.SetDirty(gameObject);
#endif
}
/// <summary>
/// Refresh ground truth generation for the labeling of the attached GameObject. This is necessary when the
/// list of labels changes or when renderers or materials change on objects in the hierarchy.

World.DefaultGameObjectInjectionWorld.GetOrCreateSystem<GroundTruthLabelSetupSystem>().RefreshLabeling(m_Entity);
World.DefaultGameObjectInjectionWorld.GetOrCreateSystem<GroundTruthLabelSetupSystem>()
.RefreshLabeling(m_Entity);
}
}
}

13
com.unity.perception/Runtime/GroundTruth/Labeling/SemanticSegmentationLabelConfig.cs


using System;
using System.Collections.Generic;
namespace UnityEngine.Perception.GroundTruth {
/// <summary>

public class SemanticSegmentationLabelConfig : LabelConfig<SemanticSegmentationLabelEntry>
{
/// <summary>
/// List of standard color based on which this type of label configuration assigns new colors to added labels.
/// </summary>
public static readonly List<Color> s_StandardColors = new List<Color>()
{
Color.blue,
Color.green,
Color.red,
Color.white,
Color.yellow,
Color.gray
};
}
/// <summary>

13
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


[RequireComponent(typeof(Camera))]
public partial class PerceptionCamera : MonoBehaviour
{
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
internal static string RgbDirectory { get; } = $"RGB{Guid.NewGuid()}";
static string s_RgbFilePrefix = "rgb_";

}
internal HUDPanel hudPanel = null;
internal OverlayPanel overlayPanel = null;
void SetupVisualizationCamera(Camera cam)
{

s_VisualizedPerceptionCamera = this;
hudPanel = gameObject.AddComponent<HUDPanel>();
overlayPanel = gameObject.AddComponent<OverlayPanel>();
overlayPanel.perceptionCamera = this;
#endif
}

GUI.skin.label.padding = new RectOffset(0, 0, 1, 1);
GUI.skin.label.margin = new RectOffset(0, 0, 1, 1);
GUI.skin.label.wordWrap = true;
GUI.skin.label.alignment = TextAnchor.MiddleLeft;
GUI.skin.box.padding = new RectOffset(5, 5, 5, 5);
GUI.skin.toggle.margin = new RectOffset(0, 0, 0, 0);
GUI.skin.horizontalSlider.margin = new RectOffset(0, 0, 0, 0);

foreach (var labeler in m_Labelers.Where(labeler => labeler.isInitialized))
{
labeler.VisualizeUI();
GUILayout.Space(4);
// This needs to happen here so that the overlay panel controls
// are placed in the controls panel
overlayPanel.OnDrawGUI(x, 10, panelWidth, height);
}
void OnValidate()

7
com.unity.perception/Runtime/GroundTruth/PerceptionCamera_InstanceSegmentation.cs


using System;
using System.Collections.Generic;
using Unity.Collections;
using UnityEngine;
using UnityEngine.Experimental.Rendering;

/// Invoked when instance segmentation images are read back from the graphics system. The first parameter is the
/// Time.frameCount at which the objects were rendered. May be invoked many frames after the objects were rendered.
/// </summary>
public event Action<int, NativeArray<uint>, RenderTexture> InstanceSegmentationImageReadback;
public event Action<int, NativeArray<Color32>, RenderTexture> InstanceSegmentationImageReadback;
/// <summary>
/// Invoked when RenderedObjectInfos are calculated. The first parameter is the Time.frameCount at which the

RenderedObjectInfoGenerator m_RenderedObjectInfoGenerator;
RenderTexture m_InstanceSegmentationTexture;
RenderTextureReader<uint> m_InstanceSegmentationReader;
RenderTextureReader<Color32> m_InstanceSegmentationReader;
internal bool m_fLensDistortionEnabled = false;

m_fLensDistortionEnabled = true;
#endif
m_InstanceSegmentationReader = new RenderTextureReader<uint>(m_InstanceSegmentationTexture, myCamera, (frameCount, data, tex) =>
m_InstanceSegmentationReader = new RenderTextureReader<Color32>(m_InstanceSegmentationTexture, myCamera, (frameCount, data, tex) =>
{
InstanceSegmentationImageReadback?.Invoke(frameCount, data, tex);
if (RenderedObjectInfosCalculated != null)

6
com.unity.perception/Runtime/GroundTruth/RenderedObjectInfo.cs


/// The number of pixels in the image matching this instance.
/// </summary>
public int pixelCount;
/// <summary>
/// The unique RGBA color for the instance.
/// </summary>
public Color32 instanceColor;
return $"{nameof(instanceId)}: {instanceId}, {nameof(boundingBox)}: {boundingBox}, {nameof(pixelCount)}: {pixelCount}";
return $"{nameof(instanceId)}: {instanceId}, {nameof(boundingBox)}: {boundingBox}, {nameof(pixelCount)}: {pixelCount}, {nameof(instanceColor)}: {instanceColor}";
}
/// <inheritdoc />

48
com.unity.perception/Runtime/GroundTruth/RenderedObjectInfoGenerator.cs


struct ComputeHistogramPerRowJob : IJob
{
[ReadOnly]
public NativeSlice<uint> segmentationImageData;
public NativeSlice<Color32> segmentationImageData;
public int width;
public int rows;
public int rowStart;

{
for (var row = 0; row < rows; row++)
{
var rowSlice = new NativeSlice<uint>(segmentationImageData, width * row, width);
var rowSlice = new NativeSlice<Color32>(segmentationImageData, width * row, width);
var currentBB = new Object1DSpan
{

for (var i = 0; i < rowSlice.Length; i++)
{
var value = rowSlice[i];
var packed = InstanceIdToColorMapping.GetPackedColorFromColor(rowSlice[i]);
// pixel color black (0,0,0,255) is reserved for no hit, so set it to id 0
var id = packed == 255 ? 0 : packed;
if (value != currentBB.instanceId)
if (id != currentBB.instanceId)
{
if (currentBB.instanceId > 0)
{

currentBB = new Object1DSpan
{
instanceId = value,
instanceId = id,
left = i,
row = row + rowStart
};

/// <param name="boundingBoxOrigin">Whether bounding boxes should be top-left or bottom-right-based.</param>
/// <param name="renderedObjectInfos">When this method returns, filled with RenderedObjectInfo entries for each object visible in the frame.</param>
/// <param name="allocator">The allocator to use for allocating renderedObjectInfos and perLabelEntryObjectCount.</param>
public void Compute(NativeArray<uint> instanceSegmentationRawData, int stride, BoundingBoxOrigin boundingBoxOrigin, out NativeArray<RenderedObjectInfo> renderedObjectInfos, Allocator allocator)
public void Compute(NativeArray<Color32> instanceSegmentationRawData, int stride, BoundingBoxOrigin boundingBoxOrigin, out NativeArray<RenderedObjectInfo> renderedObjectInfos, Allocator allocator)
{
const int jobCount = 24;
var height = instanceSegmentationRawData.Length / stride;

handles[jobIndex] = new ComputeHistogramPerRowJob
{
segmentationImageData = new NativeSlice<uint>(instanceSegmentationRawData, row * stride, stride * rowsThisJob),
segmentationImageData = new NativeSlice<Color32>(instanceSegmentationRawData, row * stride, stride * rowsThisJob),
width = stride,
rowStart = row,
rows = rowsThisJob,

renderedObjectInfos = new NativeArray<RenderedObjectInfo>(keyValueArrays.Keys.Length, allocator);
for (var i = 0; i < keyValueArrays.Keys.Length; i++)
{
var instanceId = keyValueArrays.Keys[i];
var renderedObjectInfo = keyValueArrays.Values[i];
var boundingBox = renderedObjectInfo.boundingBox;
if (boundingBoxOrigin == BoundingBoxOrigin.TopLeft)
var color = InstanceIdToColorMapping.GetColorFromPackedColor(keyValueArrays.Keys[i]);
if (InstanceIdToColorMapping.TryGetInstanceIdFromColor(color, out var instanceId))
var y = height - boundingBox.yMax;
boundingBox = new Rect(boundingBox.x, y, boundingBox.width, boundingBox.height);
var renderedObjectInfo = keyValueArrays.Values[i];
var boundingBox = renderedObjectInfo.boundingBox;
if (boundingBoxOrigin == BoundingBoxOrigin.TopLeft)
{
var y = height - boundingBox.yMax;
boundingBox = new Rect(boundingBox.x, y, boundingBox.width, boundingBox.height);
}
renderedObjectInfos[i] = new RenderedObjectInfo
{
instanceId = instanceId,
boundingBox = boundingBox,
pixelCount = renderedObjectInfo.pixelCount,
instanceColor = color
};
renderedObjectInfos[i] = new RenderedObjectInfo
else
instanceId = instanceId,
boundingBox = boundingBox,
pixelCount = renderedObjectInfo.pixelCount
};
Debug.LogError($"Could not generate instance ID for object, ID exceeded maximum ID");
}
}
keyValueArrays.Dispose();
}

11
com.unity.perception/Runtime/GroundTruth/Resources/InstanceSegmentation.shader


{
Properties
{
[PerObjectData] _SegmentationId("Segmentation ID", int) = 0
[PerObjectData] _SegmentationId("Segmentation ID", vector) = (0,0,0,1)
}
SubShader
{

float4 vertex : SV_POSITION;
};
uint _SegmentationId;
float4 _SegmentationId;
v2f vert (appdata v)
{

fixed4 frag (v2f i) : SV_Target
{
return fixed4(
UnpackUIntToFloat((uint)_SegmentationId, 0, 8),
UnpackUIntToFloat((uint)_SegmentationId, 8, 8),
UnpackUIntToFloat((uint)_SegmentationId, 16, 8),
UnpackUIntToFloat((uint)_SegmentationId, 24, 8)
);
return _SegmentationId;
}
ENDCG
}

21
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


string m_OutputDirectoryPath;
JsonSerializer m_AnnotationSerializer;
public bool IsRunning { get; private set; }
public string OutputDirectory

m_AnnotationSerializer = JsonSerializer.CreateDefault();
m_AnnotationSerializer.Converters.Add(new Vector3Converter());
m_AnnotationSerializer.Converters.Add(new QuaternionConverter());
m_OutputDirectoryName = outputDirectory;
IsRunning = true;
}

{
var q = Quaternion.identity;
reader.Read(); // open [ token
q.x = (float)reader.ReadAsDecimal();
q.x = (float)reader.ReadAsDecimal();
q.y = (float)reader.ReadAsDecimal();
q.z = (float)reader.ReadAsDecimal();
q.w = (float)reader.ReadAsDecimal();

}
[SuppressMessage("ReSharper", "PossibleInvalidOperationException")]
public class Vector3Converter : JsonConverter<Vector3>
{

return outVector;
}
}
var jArray = values == null ? null : JArray.FromObject(values, m_AnnotationSerializer);
JArray jArray = null;
if (values != null)
{
jArray = new JArray();
foreach (var value in values)
{
if (value != null)
jArray.Add(new JRaw(DatasetJsonUtility.ToJToken(value)));
}
}
ReportAsyncAnnotationResult<T>(asyncAnnotation, filename, jArray);
}

2
com.unity.perception/Runtime/Randomization/Parameters/CategoricalParameter.cs


get
{
var catOptions = new List<(T, float)>(m_Categories.Count);
for (var i = 0; i < catOptions.Count; i++)
for (var i = 0; i < m_Categories.Count; i++)
catOptions.Add((m_Categories[i], probabilities[i]));
return catOptions;
}

14
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/NumericParameters/ColorParameters.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: ce91e289cdaa4ccc849a0c287aefd34d
timeCreated: 1598326361
>>>>>>> 50f2c39... Added xml documentation
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

14
com.unity.perception/Runtime/Randomization/Randomizers.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: ae6aad06c0e14f67aa7a9ad9004a1828
timeCreated: 1600274594
>>>>>>> c653d18... Implemented randomizer class. Ran into SerializeReference issue 1193322.
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

2
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/BackgroundObjectPlacementRandomizer.cs


for (var i = 0; i < layerCount; i++)
{
var seed = scenario.GenerateIterativeRandomSeed(i);
var seed = scenario.GenerateRandomSeedFromIndex(i);
var placementSamples = PoissonDiskSampling.GenerateSamples(
placementArea.x, placementArea.y, separationDistance, seed);
var offset = new Vector3(placementArea.x, placementArea.y, 0f) * -0.5f;

5
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/NormalSampler.cs


[Serializable]
public struct NormalSampler : ISampler
{
Unity.Mathematics.Random m_Random;
[SerializeField, HideInInspector] Unity.Mathematics.Random m_Random;
/// <summary>
/// The mean of the normal distribution to sample from

/// <summary>
/// The base seed used to initialize this sampler's state
/// </summary>
[field: SerializeField]
public uint baseSeed { get; set; }
[field: SerializeField] public uint baseSeed { get; set; }
/// <summary>
/// The current random state of this sampler

5
com.unity.perception/Runtime/Randomization/Samplers/SamplerTypes/UniformSampler.cs


[Serializable]
public struct UniformSampler : ISampler
{
Unity.Mathematics.Random m_Random;
[SerializeField, HideInInspector] Unity.Mathematics.Random m_Random;
[field: SerializeField]
public uint baseSeed { get; set; }
[field: SerializeField] public uint baseSeed { get; set; }
/// <summary>
/// The current random state of this sampler

4
com.unity.perception/Runtime/Randomization/Scenarios/FixedLengthScenario.cs


/// A scenario that runs for a fixed number of frames during each iteration
/// </summary>
[AddComponentMenu("Perception/Randomization/Scenarios/Fixed Length Scenario")]
public class FixedLengthScenario: USimScenario<FixedLengthScenario.Constants>
public class FixedLengthScenario: UnitySimulationScenario<FixedLengthScenario.Constants>
public class Constants : USimConstants
public class Constants : UnitySimulationConstants
{
/// <summary>
/// The number of frames to generate per iteration

14
com.unity.perception/Runtime/Randomization/Scenarios/ScenarioBase.cs


foreach (var randomizer in m_Randomizers)
randomizer.Create();
ValidateParameters();
// Don't skip the first frame if executing on Unity Simulation
if (Configuration.Instance.IsSimulationRunningInCloud())
m_SkipFrame = false;
}
void OnEnable()

}
/// <summary>
/// Generates a random seed by hashing three values together: an arbitrary iteration value,
/// the current scenario iteration, and a base random seed
/// Generates a random seed by hashing three values together: an arbitrary index value,
/// the current scenario iteration, and a base random seed. This method is useful for deterministically
/// generating random seeds from within a for-loop.
public uint GenerateIterativeRandomSeed(int iteration, uint baseSeed = SamplerUtility.largePrime)
public uint GenerateRandomSeedFromIndex(int iteration, uint baseSeed = SamplerUtility.largePrime)
return SamplerUtility.IterateSeed((uint)iteration, baseSeed);
var seed = SamplerUtility.IterateSeed((uint)iteration, baseSeed);
return SamplerUtility.IterateSeed((uint)currentIteration, seed);
}
void ValidateParameters()

12
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetCaptureTests.cs


""annotation_definition"": <guid>,
""values"": [
{{
""a"": ""a string"",
""b"": 10
}},
""a"": ""a string"",
""b"": 10
}},
""a"": ""a second string"",
""b"": 20
}}
""a"": ""a second string"",
""b"": 20
}}
]
}}
]";

41
com.unity.perception/Tests/Runtime/GroundTruthTests/LabelEntryMatchCacheTests.cs


using System.Collections;
using NUnit.Framework;
using Unity.Entities;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.TestTools;

//allow label to be registered
yield return null;
Assert.IsFalse(cache.TryGetLabelEntryFromInstanceId(labeledPlane.GetComponent<Labeling>().instanceId, out var labelEntry, out var index));
Assert.AreEqual(-1, index);
Assert.AreEqual(default(IdLabelEntry), labelEntry);
}
}
[UnityTest]
public IEnumerator TryGet_ReturnsFalse_ForNonMatchingLabel_WhenAllObjectsAreDestroyedAndNewOnesAreCreated()
{
//only way to guarantee registration order is to run frames.
var labeledPlane = TestHelper.CreateLabeledPlane(label: "foo");
var config = ScriptableObject.CreateInstance<IdLabelConfig>();
config.Init(new[]
{
new IdLabelEntry()
{
id = 1,
label = "foo"
},
});
using (var cache = new LabelEntryMatchCache(config))
{
//allow label to be registered
yield return null;
//delete all labeled objects and run a frame so that instance ids of labeled entities reset
DestroyTestObject(labeledPlane);
yield return null;
//this new object has a label that is not included in our label config
var labeledPlane2 = TestHelper.CreateLabeledPlane(label: "bar");
AddTestObjectForCleanup(labeledPlane2);
//let labeledPlane2 be assigned a recycled instance id (1) previously belonging to labeledPlane
yield return null;
Assert.IsFalse(cache.TryGetLabelEntryFromInstanceId(labeledPlane2.GetComponent<Labeling>().instanceId, out var labelEntry, out var index));
Assert.AreEqual(-1, index);
Assert.AreEqual(default(IdLabelEntry), labelEntry);
}

71
com.unity.perception/Tests/Runtime/GroundTruthTests/RenderedObjectInfoTests.cs


public class ProducesCorrectObjectInfoData
{
public RenderedObjectInfo[] renderedObjectInfosExpected;
public uint[] data;
public Color32[] data;
public ProducesCorrectObjectInfoData(uint[] data, RenderedObjectInfo[] renderedObjectInfosExpected, int stride, BoundingBoxOrigin boundingBoxOrigin, string name)
public ProducesCorrectObjectInfoData(Color32[] data, RenderedObjectInfo[] renderedObjectInfosExpected, int stride, BoundingBoxOrigin boundingBoxOrigin, string name)
{
this.data = data;
this.renderedObjectInfosExpected = renderedObjectInfosExpected;

}
public static IEnumerable ProducesCorrectBoundingBoxesTestCases()
{
InstanceIdToColorMapping.TryGetColorFromInstanceId(1, out var color1);
InstanceIdToColorMapping.TryGetColorFromInstanceId(2, out var color2);
var empty = Color.black;
new uint[]
new Color32[]
1, 1,
1, 1
color1, color1,
color1, color1
}, new[]
{
new RenderedObjectInfo()

pixelCount = 4
pixelCount = 4,
instanceColor = color1
}
},
2,

new uint[]
new Color32[]
1, 0, 2,
1, 0, 0
color1, empty, color2,
color1, empty, empty
boundingBox = new Rect(0, 0, 1, 2),
instanceId = 1,
pixelCount = 2
boundingBox = new Rect(2, 0, 1, 1),
instanceId = 2,
pixelCount = 1,
instanceColor = color2
boundingBox = new Rect(2, 0, 1, 1),
instanceId = 2,
pixelCount = 1
boundingBox = new Rect(0, 0, 1, 2),
instanceId = 1,
pixelCount = 2,
instanceColor = color1
}
},
3,

new uint[]
new Color32[]
1, 2, 1,
1, 2, 1
color1, color2, color1,
color1, color2, color1
boundingBox = new Rect(0, 0, 3, 2),
instanceId = 1,
pixelCount = 4
boundingBox = new Rect(1, 0, 1, 2),
instanceId = 2,
pixelCount = 2,
instanceColor = color2
boundingBox = new Rect(1, 0, 1, 2),
instanceId = 2,
pixelCount = 2
boundingBox = new Rect(0, 0, 3, 2),
instanceId = 1,
pixelCount = 4,
instanceColor = color1
}
},
3,

new uint[]
new Color32[]
0, 0,
0, 0,
0, 1
empty, empty,
empty, empty,
empty, color1
}, new[]
{
new RenderedObjectInfo()

pixelCount = 1
pixelCount = 1,
instanceColor = color1
},
},
2,

AddTestObjectForCleanup(TestHelper.CreateLabeledPlane(.1f, label2));
yield return null;
var dataNativeArray = new NativeArray<uint>(producesCorrectObjectInfoData.data, Allocator.Persistent);
var dataNativeArray = new NativeArray<Color32>(producesCorrectObjectInfoData.data, Allocator.Persistent);
renderedObjectInfoGenerator.Compute(dataNativeArray, producesCorrectObjectInfoData.stride, producesCorrectObjectInfoData.boundingBoxOrigin, out var boundingBoxes, Allocator.Temp);

97
com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs


{
public RenderTexture source;
public Camera cameraSource;
RenderTextureReader<uint> m_Reader;
RenderTextureReader<Color32> m_Reader;
public event Action<int, NativeArray<uint>> SegmentationImageReceived;
public event Action<int, NativeArray<Color32>> SegmentationImageReceived;
m_Reader = new RenderTextureReader<uint>(source, cameraSource, ImageReadCallback);
m_Reader = new RenderTextureReader<Color32>(source, cameraSource, ImageReadCallback);
void ImageReadCallback(int frameCount, NativeArray<uint> data, RenderTexture renderTexture)
void ImageReadCallback(int frameCount, NativeArray<Color32> data, RenderTexture renderTexture)
{
if (SegmentationImageReceived != null)
SegmentationImageReceived(frameCount, data);

}
}
public enum RendererType
{
MeshRenderer,
SkinnedMeshRenderer,
Terrain
}
//Graphics issues with OpenGL Linux Editor. https://jira.unity3d.com/browse/AISV-422
[UnityPlatform(exclude = new[] {RuntimePlatform.LinuxEditor, RuntimePlatform.LinuxPlayer})]
public class SegmentationPassTests : GroundTruthTestBase

// `yield return null;` to skip a frame.
[UnityTest]
public IEnumerator SegmentationPassTestsWithEnumeratorPasses(
[Values(false, true)] bool useSkinnedMeshRenderer,
[Values(RendererType.MeshRenderer, RendererType.SkinnedMeshRenderer, RendererType.Terrain)] RendererType rendererType,
[Values(SegmentationKind.Instance, SegmentationKind.Semantic)] SegmentationKind segmentationKind)
{
int timesSegmentationImageReceived = 0;

switch (segmentationKind)
{
case SegmentationKind.Instance:
expectedPixelValue = 1;
//expectedPixelValue = new Color32(0, 74, 255, 255);
expectedPixelValue = new Color32(255,0,0, 255);
cameraObject = SetupCameraInstanceSegmentation(OnSegmentationImageReceived);
break;
case SegmentationKind.Semantic:

}
//
// // Arbitrary wait for 5 frames for shaders to load. Workaround for issue with Shader.WarmupAllShaders()
// for (int i=0 ; i<5 ; ++i)
// yield return new WaitForSeconds(1);
frameStart = Time.frameCount;
var planeObject = GameObject.CreatePrimitive(PrimitiveType.Plane);
if (useSkinnedMeshRenderer)
GameObject planeObject;
if (rendererType == RendererType.Terrain)
var oldObject = planeObject;
planeObject = new GameObject();
var terrainData = new TerrainData();
AddTestObjectForCleanup(terrainData);
//look down because terrains cannot be rotated
cameraObject.transform.rotation = Quaternion.LookRotation(Vector3.down, Vector3.forward);
planeObject = Terrain.CreateTerrainGameObject(terrainData);
planeObject.transform.SetPositionAndRotation(new Vector3(-10, -10, -10), Quaternion.identity);
}
else
{
planeObject = GameObject.CreatePrimitive(PrimitiveType.Plane);
if (rendererType == RendererType.SkinnedMeshRenderer)
{
var oldObject = planeObject;
planeObject = new GameObject();
var meshFilter = oldObject.GetComponent<MeshFilter>();
var meshRenderer = oldObject.GetComponent<MeshRenderer>();
var skinnedMeshRenderer = planeObject.AddComponent<SkinnedMeshRenderer>();
skinnedMeshRenderer.sharedMesh = meshFilter.sharedMesh;
skinnedMeshRenderer.material = meshRenderer.material;
var meshFilter = oldObject.GetComponent<MeshFilter>();
var meshRenderer = oldObject.GetComponent<MeshRenderer>();
var skinnedMeshRenderer = planeObject.AddComponent<SkinnedMeshRenderer>();
skinnedMeshRenderer.sharedMesh = meshFilter.sharedMesh;
skinnedMeshRenderer.material = meshRenderer.material;
Object.DestroyImmediate(oldObject);
Object.DestroyImmediate(oldObject);
}
planeObject.transform.SetPositionAndRotation(new Vector3(0, 0, 10), Quaternion.Euler(90, 0, 0));
planeObject.transform.localScale = new Vector3(10, -1, 10);
planeObject.transform.SetPositionAndRotation(new Vector3(0, 0, 10), Quaternion.Euler(90, 0, 0));
planeObject.transform.localScale = new Vector3(10, -1, 10);
frameStart = Time.frameCount;
AddTestObjectForCleanup(planeObject);

{
GameObject cameraObject = null;
PerceptionCamera perceptionCamera;
bool fLensDistortionEnabled = false;
bool fDone = false;
int frames = 0;
var dataBBox = new uint[]
var fLensDistortionEnabled = false;
var fDone = false;
var frames = 0;
#if false
var dataBBox = new Color32[]
1, 1,
1, 1
Color.blue, Color.blue,
Color.blue, Color.blue
#endif
Rect boundingBoxWithoutLensDistortion = new Rect();
Rect boundingBoxWithLensDistortion = new Rect();
var boundingBoxWithoutLensDistortion = new Rect();
var boundingBoxWithLensDistortion = new Rect();
void OnSegmentationImageReceived(int frameCount, NativeArray<uint> data, RenderTexture tex)
void OnSegmentationImageReceived(int frameCount, NativeArray<Color32> data, RenderTexture tex)
{
frames++;

else
{
var renderedObjectInfoGenerator = new RenderedObjectInfoGenerator();
renderedObjectInfoGenerator.Compute(data, tex.width, BoundingBoxOrigin.TopLeft, out var boundingBoxes, Allocator.Temp);
boundingBoxWithLensDistortion = boundingBoxes[0].boundingBox;

}
var cameraObject = segmentationKind == SegmentationKind.Instance ?
SetupCameraInstanceSegmentation(OnSegmentationImageReceived<uint>) :
SetupCameraInstanceSegmentation(OnSegmentationImageReceived<Color32>) :
object expectedPixelValue = segmentationKind == SegmentationKind.Instance ? (object) 1 : k_SemanticPixelValue;
//object expectedPixelValue = segmentationKind == SegmentationKind.Instance ? (object) new Color32(0, 74, 255, 255) : k_SemanticPixelValue;
object expectedPixelValue = segmentationKind == SegmentationKind.Instance ? (object) new Color32(255, 0, 0, 255) : k_SemanticPixelValue;
expectedLabelAtFrame = new Dictionary<int, object>
{
{Time.frameCount , expectedPixelValue},

Assert.AreEqual(3, timesSegmentationImageReceived);
}
GameObject SetupCameraInstanceSegmentation(Action<int, NativeArray<uint>, RenderTexture> onSegmentationImageReceived)
GameObject SetupCameraInstanceSegmentation(Action<int, NativeArray<Color32>, RenderTexture> onSegmentationImageReceived)
{
var cameraObject = SetupCamera(out var perceptionCamera, false);
perceptionCamera.InstanceSegmentationImageReadback += onSegmentationImageReceived;

8
com.unity.perception/Tests/Runtime/GroundTruthTests/VisualizationTests.cs


// Need to wait to make sure a visualization call is made so that the canvas will be constructed
yield return null;
Assert.IsNotNull(GameObject.Find(nameof(VisualizedCamera_SetsUpCanvas) + "_segmentation_canvas"));
Assert.IsNotNull(GameObject.Find("overlay_canvas"));
}
[Test]
public void TwoCamerasVisualizing_CausesWarningAndDisablesVisualization()

AddTestObjectForCleanup(object1);
//wait a frame to make sure visualize is called once
yield return null;
Assert.IsNotNull(GameObject.Find(nameof(DestroyCamera_RemovesVisualization) + "_segmentation_canvas"));
Assert.IsNotNull(GameObject.Find("overlay_canvas"));
Assert.IsNull(GameObject.Find(nameof(DestroyCamera_RemovesVisualization) + "_segmentation_canvas"));
Assert.IsNull(GameObject.Find("overlay_segmentation_canvas"));
}
[UnityTest]
public IEnumerator DestroyAndRecreateCamera_ProperlyVisualizes()

//wait a frame to make sure visualize is called once
yield return null;
Assert.IsNotNull(GameObject.Find(nameof(DestroyAndRecreateCamera_ProperlyVisualizes) + "2_segmentation_canvas"));
Assert.IsNotNull("overlay_canvas");
}
[UnityTest]

14
com.unity.perception/Tests/Runtime/Randomization/RandomizerTests.meta


fileFormatVersion: 2
<<<<<<< HEAD
fileFormatVersion: 2
timeCreated: 1600754588
=======
guid: f9e02c502b7845229d26d377a0d871f1
timeCreated: 1600744200
>>>>>>> cb407fb... added randomizer tests
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

15
com.unity.perception/Tests/Runtime/Randomization/ScenarioTests.cs


Assert.AreEqual(DatasetCapture.SimulationState.SequenceTime, 0);
}
[UnityTest]
public IEnumerator GeneratedRandomSeedsChangeWithScenarioIteration()
{
yield return CreateNewScenario(3, 1);
var seed = m_Scenario.GenerateRandomSeed();
var seeds = new uint[3];
for (var i = 0; i < 3; i++)
seeds[i] = m_Scenario.GenerateRandomSeedFromIndex(i);
yield return null;
Assert.AreNotEqual(seed, m_Scenario.GenerateRandomSeed());
for (var i = 0; i < 3; i++)
Assert.AreNotEqual(seeds[i], m_Scenario.GenerateRandomSeedFromIndex(i));
}
PerceptionCamera SetupPerceptionCamera()
{
m_TestObject.SetActive(false);

7
com.unity.perception/package.json


"dependencies": {
"com.unity.nuget.newtonsoft-json": "1.1.2",
"com.unity.render-pipelines.core": "7.1.6",
"com.unity.burst": "1.3.9",
"com.unity.simulation.capture": "0.0.10-preview.13",
"com.unity.simulation.core": "0.0.10-preview.19"
"com.unity.simulation.capture": "0.0.10-preview.14",
"com.unity.simulation.core": "0.0.10-preview.20"
"version": "0.5.0-preview.1",
"version": "0.6.0-preview.1",
"samples":[
{
"displayName": "Tutorial Files",

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存