浏览代码

Revert "Merge from master"

This reverts commit c0828e485298b9f68da2fa99db015d21dcd904cf.
/schema-0.1.0
You-Cyuan Jhang 4 年前
当前提交
66a2d04d
共有 305 个文件被更改,包括 3604 次插入4538 次删除
  1. 17
      .github/pull_request_template.md
  2. 6
      .gitignore
  3. 143
      .yamato/promotion.yml
  4. 43
      .yamato/upm-ci-testprojects.yml
  5. 125
      .yamato/upm-ci-full.yml
  6. 2
      LICENSE.md
  7. 87
      README.md
  8. 74
      TestProjects/PerceptionHDRP/Assets/HDRenderPipelineAsset.asset
  9. 492
      TestProjects/PerceptionHDRP/Assets/Scenes/SampleScene.unity
  10. 11
      TestProjects/PerceptionHDRP/Packages/manifest.json
  11. 5
      TestProjects/PerceptionHDRP/ProjectSettings/EditorBuildSettings.asset
  12. 2
      TestProjects/PerceptionHDRP/ProjectSettings/EditorSettings.asset
  13. 31
      TestProjects/PerceptionHDRP/ProjectSettings/ProjectSettings.asset
  14. 4
      TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt
  15. 559
      TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity
  16. 3
      TestProjects/PerceptionURP/Assets/Settings/ForwardRenderer.asset
  17. 2
      TestProjects/PerceptionURP/Assets/Settings/ForwardRenderer.asset.meta
  18. 5
      TestProjects/PerceptionURP/Assets/Settings/UniversalRP-HighQuality.asset
  19. 9
      TestProjects/PerceptionURP/Packages/manifest.json
  20. 2
      TestProjects/PerceptionURP/ProjectSettings/EditorSettings.asset
  21. 2
      TestProjects/PerceptionURP/ProjectSettings/GraphicsSettings.asset
  22. 49
      TestProjects/PerceptionURP/ProjectSettings/ProjectSettings.asset
  23. 4
      TestProjects/PerceptionURP/ProjectSettings/UnityConnectSettings.asset
  24. 4
      TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt
  25. 414
      com.unity.perception/CHANGELOG.md
  26. 173
      com.unity.perception/Documentation~/Schema/Synthetic_Dataset_Schema.md
  27. 35
      com.unity.perception/Documentation~/SetupSteps.md
  28. 125
      com.unity.perception/Documentation~/images/LabelingConfigurationFinished.PNG
  29. 366
      com.unity.perception/Documentation~/images/MainCameraConfig.PNG
  30. 28
      com.unity.perception/Editor/Unity.Perception.Editor.asmdef
  31. 3
      com.unity.perception/Editor/GroundTruth/InstanceSegmentationPassEditor.cs
  32. 2
      com.unity.perception/Editor/GroundTruth/LabelingConfigurationEditor.cs.meta
  33. 934
      com.unity.perception/Editor/GroundTruth/LabelingEditor.cs
  34. 2
      com.unity.perception/Editor/GroundTruth/LabelingEditor.cs.meta
  35. 2
      com.unity.perception/Editor/GroundTruth/SemanticSegmentationPassEditor.cs
  36. 2
      com.unity.perception/LICENSE.md
  37. 1
      com.unity.perception/Runtime/AssemblyInfo.cs
  38. 16
      com.unity.perception/Runtime/Unity.Perception.Runtime.asmdef
  39. 45
      com.unity.perception/Runtime/GroundTruth/GroundTruthRendererFeature.cs
  40. 23
      com.unity.perception/Runtime/GroundTruth/IGroundTruthGenerator.cs
  41. 64
      com.unity.perception/Runtime/GroundTruth/Labeling/Labeling.cs
  42. 2
      com.unity.perception/Runtime/GroundTruth/Labeling/StartingLabelId.cs
  43. 69
      com.unity.perception/Runtime/GroundTruth/RenderTextureReader.cs
  44. 25
      com.unity.perception/Runtime/GroundTruth/RenderedObjectInfo.cs
  45. 121
      com.unity.perception/Runtime/GroundTruth/RenderedObjectInfoGenerator.cs
  46. 10
      com.unity.perception/Runtime/GroundTruth/Resources/InstanceSegmentation.shader
  47. 18
      com.unity.perception/Runtime/GroundTruth/Resources/LabeledObjectHistogram.compute
  48. 29
      com.unity.perception/Runtime/GroundTruth/Resources/SemanticSegmentation.shader
  49. 123
      com.unity.perception/Runtime/GroundTruth/SimulationManager.cs
  50. 5
      com.unity.perception/Runtime/GroundTruth/Ego.cs
  51. 871
      com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs
  52. 210
      com.unity.perception/Runtime/GroundTruth/SimulationState.cs
  53. 18
      com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs
  54. 6
      com.unity.perception/Tests/Editor/BuildPerceptionPlayer.cs
  55. 16
      com.unity.perception/Tests/Editor/Unity.Perception.Editor.Tests.asmdef
  56. 175
      com.unity.perception/Tests/Editor/PerceptionCameraEditorTests.cs
  57. 46
      com.unity.perception/Tests/Editor/SimulationManagerEditorTests.cs
  58. 2
      com.unity.perception/Tests/Runtime/Unity.Perception.Runtime.Tests.asmdef
  59. 11
      com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetJsonUtilityTests.cs
  60. 36
      com.unity.perception/Tests/Runtime/GroundTruthTests/TestHelper.cs
  61. 40
      com.unity.perception/Tests/Runtime/GroundTruthTests/GroundTruthTestBase.cs
  62. 122
      com.unity.perception/Tests/Runtime/GroundTruthTests/PerceptionCameraIntegrationTests.cs
  63. 520
      com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs
  64. 32
      com.unity.perception/package.json
  65. 2
      com.unity.perception/Editor/GroundTruth/ObjectCountPassEditor.cs.meta
  66. 2
      com.unity.perception/Runtime/GroundTruth/GroundTruthInfo.cs.meta
  67. 8
      com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs.meta
  68. 8
      com.unity.perception/Runtime/GroundTruth/GroundTruthPass.cs.meta
  69. 2
      com.unity.perception/Runtime/GroundTruth/ObjectCountPass.cs.meta
  70. 2
      com.unity.perception/Runtime/GroundTruth/SimulationManagementComponentSystem.cs.meta
  71. 2
      com.unity.perception/Runtime/GroundTruth/Labeling/LabelingConfiguration.cs.meta
  72. 35
      com.unity.perception/Runtime/GroundTruth/GroundTruthPass.cs
  73. 14
      com.unity.perception/Runtime/GroundTruth/InstanceSegmentationPass.cs
  74. 25
      com.unity.perception/Runtime/GroundTruth/SemanticSegmentationPass.cs
  75. 2
      com.unity.perception/Runtime/GroundTruth/SemanticSegmentationPass.cs.meta
  76. 2
      com.unity.perception/Runtime/GroundTruth/GroundTruthCrossPipelinePass.cs.meta
  77. 2
      com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs.meta
  78. 2
      com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs.meta
  79. 8
      TestProjects/PerceptionHDRP/Assets/LabelingConfiguration.asset.meta
  80. 26
      TestProjects/PerceptionHDRP/Assets/LabelingConfiguration.asset
  81. 8
      TestProjects/PerceptionURP/Assets/ExampleLabelingConfiguration.asset.meta
  82. 8
      TestProjects/PerceptionURP/Assets/Settings/UniversalRP-HighQuality.asset.meta
  83. 26
      TestProjects/PerceptionURP/Assets/ExampleLabelingConfiguration.asset
  84. 48
      com.unity.perception/Documentation~/GettingStarted.md
  85. 27
      com.unity.perception/Documentation~/GroundTruth-Labeling.md
  86. 15
      com.unity.perception/Documentation~/index.md
  87. 21
      com.unity.perception/Editor/GroundTruth/ObjectCountPassEditor.cs
  88. 157
      com.unity.perception/Editor/GroundTruth/LabelingConfigurationEditor.cs
  89. 107
      com.unity.perception/Runtime/GroundTruth/GroundTruthCrossPipelinePass.cs
  90. 15
      com.unity.perception/Runtime/GroundTruth/GroundTruthInfo.cs
  91. 106
      com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs
  92. 74
      com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs
  93. 108
      com.unity.perception/Runtime/GroundTruth/Labeling/LabelingConfiguration.cs
  94. 181
      com.unity.perception/Runtime/GroundTruth/ObjectCountPass.cs
  95. 64
      com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs
  96. 13
      com.unity.perception/Runtime/GroundTruth/SimulationManagementComponentSystem.cs
  97. 192
      com.unity.perception/Tests/Runtime/GroundTruthTests/BoundingBox2DTests.cs
  98. 253
      com.unity.perception/Tests/Runtime/GroundTruthTests/Main Camera.prefab
  99. 7
      com.unity.perception/Tests/Runtime/GroundTruthTests/Main Camera.prefab.meta
  100. 147
      com.unity.perception/Tests/Runtime/GroundTruthTests/ObjectCountTests.cs

17
.github/pull_request_template.md


# Peer Review Information:
Information on any code, feature, documentation changes here
**Editor Version Target**: 2019.4
**Editor Version Target (i.e. 19.3, 20.1)**: 2019.3
<br>
**Package Tests (Pass/Fail)**:
[X] - Make sure automation passes
<br>
<br>
## Checklist
- [ ] - Updated docs
- [ ] - Updated changelog
- [ ] - Updated test rail
<br>
**Notes + Expectations**:

6
.gitignore


.npmrc
!Documentation~
!.Documentation
!Samples~
npm-debug.log
build.sh.meta
build.bat.meta

**/.bin
CodeCoverage
/.download
**/Build/**
**/Builds/**
/utr
*.user
/TestProjects/PerceptionURP/Build

143
.yamato/promotion.yml


{% metadata_file .yamato/environments.yml %}
---
{% for variant in package_variants %}
{% for editor in complete_editors %}
{% for platform in test_platforms %}
promotion_test_{{ platform.name }}_{{ editor.version }}:
name : Package promotion tests ({{variant.name}} pkg, {{ editor.version }}, {{ platform.name }})
agent:
type: {{ platform.type }}
image: {{ platform.image }}
flavor: {{ platform.flavor}}
variables:
UPMCI_PROMOTION: 1
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- upm-ci package test -u {{ editor.version }} --package-path ./com.unity.perception --type vetting-tests
artifacts:
logs:
paths:
- "upm-ci~/test-results/**/*"
dependencies:
- .yamato/upm-ci-full.yml#pack_{{ variant.name }}
{% endfor %}
{% endfor %}
{% endfor %}
promotion_test_trigger:
name: Promotion Tests Trigger
dependencies:
{% for editor in complete_editors %}
{% for platform in publish_platforms %}
{% for suite in suites %}
{% for project in projects %}
- .yamato/promote.yml#promotion_test_{{platform.name}}_{{editor.version}}
- .yamato/upm-ci-full.yml#pkg_test_Perception_{{platform.name}}_{{editor.version}}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_windows_{{suite.name}}_{{editor.version}}
{% endfor %}
{% endfor %}
{% endfor %}
{% endfor %}
promote:
name: Promote to Production
agent:
type: Unity::VM
image: package-ci/win10:stable
flavor: b1.large
variables:
UPMCI_PROMOTION: 1
commands:
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- upm-ci package promote --package-path ./com.unity.perception
# The Yamato build step `publish` will publish the com.unity.perception package to the `upm-candidates` registry. To see which versions of the package have been published, see
# * https://artifactory.prd.cds.internal.unity3d.com/artifactory/webapp/#/artifacts/browse/tree/General/upm-candidates/com.unity.perception
# * https://bintray.com/unity
#test_editors:
# - version: 2019.1
#test_platforms:
# - name: win
# type: Unity::VM
# image: package-ci/win10:stable
# flavor: b1.large
#---
#{% for editor in test_editors %}
#{% for platform in test_platforms %}
#promotion_test_{{ platform.name }}_{{ editor.version }}:
# name : Promotion Test {{ editor.version }} on {{ platform.name }}
# agent:
# type: {{ platform.type }}
# image: {{ platform.image }}
# flavor: {{ platform.flavor}}
# variables:
# UPMCI_PROMOTION: 1
# commands:
# - npm install upm-ci-utils@latest -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
# - upm-ci package test --unity-version {{ editor.version }}
# artifacts:
# logs:
# paths:
# - "upm-ci~/test-results/**/*"
# dependencies:
# - .yamato/upm-ci.yml#pack
#{% endfor %}
#{% endfor %}
#
#promotion_test_trigger:
# name: Promotion Tests Trigger
# agent:
# type: Unity::VM
# image: package-ci/win10:stable
# flavor: b1.large
# artifacts:
# logs:
# paths:
# - "upm-ci~/test-results/**/*"
# packages:
# paths:
# - "upm-ci~/packages/**/*"
# dependencies:
#{% for editor in test_editors %}
#{% for platform in test_platforms %}
# - .yamato/promotion.yml#promotion_test_{{platform.name}}_{{editor.version}}
#{% endfor %}
#{% endfor %}
#
#promote:
# name: Promote to Production
# agent:
# type: Unity::VM
# image: package-ci/win10:stable
# flavor: b1.large
# variables:
# UPMCI_PROMOTION: 1
# commands:
# - npm install upm-ci-utils@latest -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
# - upm-ci package promote
artifacts:
artifacts:
paths:
- "upm-ci~/packages/*.tgz"
dependencies:
{% for variant in package_variants %}
{% for editor in complete_editors %}
{% for platform in publish_platforms %}
- .yamato/upm-ci-full.yml#pack_{{ variant.name }}
- .yamato/promote.yml#promotion_test_{{ platform.name }}_{{ editor.version }}
{% endfor %}
{% endfor %}
{% endfor %}
# artifacts:
# artifacts:
# paths:
# - "upm-ci~/packages/*.tgz"
# dependencies:
# - .yamato/upm-ci.yml#pack
#{% for editor in test_editors %}
#{% for platform in test_platforms %}
# - .yamato/promotion.yml#promotion_test_{{ platform.name }}_{{ editor.version }}
#{% endfor %}
#{% endfor %}

43
.yamato/upm-ci-testprojects.yml


{% metadata_file .yamato/environments.yml %}
test_editors:
- version: 2019.3.7f1
suites:
- name: standalone
display_name: standalone
args: --suite=playmode --platform=
- name: editmode
display_name: editmode
args: --suite=playmode --suite=editor --platform=Editor
projects:
- name: PerceptionHDRP
- name: PerceptionURP
{% for editor in complete_editors %}
{% for editor in test_editors %}
{% for suite in suites %}
{% for project in projects %}
{{project.name}}_windows_{{suite.name}}_{{editor.version}}:

commands:
- git submodule update --init --recursive
- git clone git@github.cds.internal.unity3d.com:unity/utr.git utr
- pip install unity-downloader-cli --index-url https://artifactory.prd.it.unity3d.com/artifactory/api/pypi/pypi/simple --upgrade
- pip install unity-downloader-cli --extra-index-url https://artifactory.internal.unity3d.com/api/pypi/common-python/simple
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}StandaloneWindows64
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}StandaloneWindows64
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}
- utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=./.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-d3d11" {{suite.args}}
{% endif %}
artifacts:
logs:

{% endfor %}
{% endfor %}
{% for editor in coverage_editors %}
{% for editor in test_editors %}
{% for suite in suites %}
codecoverage_windows_{{suite.name}}_{{editor.version}}:
name : CodeCoverage {{ suite.display_name }} tests ({{ editor.version }}, Windows)

{% endfor %}
{% endfor %}
{% for editor in complete_editors %}
{% for editor in test_editors %}
{% for suite in suites %}
{% for project in projects %}
{{project.name}}_linux_{{suite.name}}_{{editor.version}}:

model: rtx2080
image: cds-ops/ubuntu-18.04-base:stable
image: cds-ops/ubuntu-18.04-base:latest
variables:
PATH: /root/.local/bin:/home/bokken/bin:/home/bokken/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/sbin:/home/bokken/.npm-global/bin
- pip config set global.index-url https://artifactory.prd.it.unity3d.com/artifactory/api/pypi/pypi/simple
- pip install unity-downloader-cli --index-url https://artifactory.prd.it.unity3d.com/artifactory/api/pypi/pypi/simple --upgrade --user
- sudo -H pip install --upgrade pip
- sudo -H pip install unity-downloader-cli --extra-index-url https://artifactory.internal.unity3d.com/api/pypi/common-python/simple
- unity-downloader-cli -u {{ editor.version }} -c editor -c StandaloneSupport-IL2CPP -c Linux --wait --published
- sudo unity-downloader-cli -u {{ editor.version }} -c editor -c StandaloneSupport-IL2CPP -c Linux --wait --published
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}StandaloneLinux64
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}StandaloneLinux64
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --reruncount=2 --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}
- DISPLAY=:0.0 utr/utr --testproject=./TestProjects/{{project.name}} --editor-location=.Editor --artifacts_path=test-results --stdout-filter=minimal --extra-editor-arg="--force-vulkan" {{suite.args}}
{% endif %}
artifacts:
logs:

# Not including OSX because the only OSX platform on Bokken with a GPU is a Mac Mini, of which there are only a few and setting up Yamato jobs is very complicated.
# {% for variant in package_variants %}
# {% for editor in complete_editors %}
# {% for editor in test_editors %}
# code_coverage_win_{{editor.version}}:
# name: Code Coverage Report - Windows
# agent:

125
.yamato/upm-ci-full.yml


{% metadata_file .yamato/environments.yml %}
test_editors:
- version: 2019.3.7f1
test_platforms:
- name: win
type: Unity::VM
image: package-ci/win10:stable
flavor: b1.large
extra-args: --force-d3d11
standalone-platform: StandaloneWindows64
- name: mac
type: Unity::VM::osx
image: package-ci/mac:stable
flavor: m1.mac
extra-args: --force-metal
standalone-platform: StandaloneOSX
- name: ubuntu
type: Unity::VM
image: package-ci/ubuntu:latest
flavor: b1.large
suites:
- name: standalone
display_name: standalone
args: --suite=playmode --platform=
- name: editmode
display_name: editmode
args: --suite=playmode --suite=editor --platform=editmode
package_variants:
- name: Perception
projects:
- name: PerceptionHDRP
- name: PerceptionURP
---
{% for variant in package_variants %}
pack_{{ variant.name }}:

flavor: b1.large
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- npm install upm-ci-utils -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
- upm-ci package pack --package-path ./com.unity.perception/
artifacts:
packages:

{% for variant in package_variants %}
{% for editor in complete_editors %}
{% for editor in test_editors %}
{% for platform in test_platforms %}
pkg_test_{{variant.name}}_{{ platform.name }}_{{ editor.version }}:
name : Package tests ({{variant.name}} pkg, {{ editor.version }}, {{ platform.name }})

flavor: {{ platform.flavor}}
commands:
- git submodule update --init --recursive
- npm install upm-ci-utils@stable -g --registry {{ upmci_registry }}
- npm install upm-ci-utils -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
- upm-ci package test -u {{ editor.version }} --package-path ./com.unity.perception --type vetting-tests
artifacts:
logs:

{% endfor %}
test_trigger:
name: Per Commit Tests
name: All Tests
agent:
type: Unity::VM
image: package-ci/win10:stable

branches:
only:
- "/.*/"
cancel_old_ci: true
artifacts:
logs:
paths:
- "upm-ci~/test-results/**/*"
packages:
paths:
- "upm-ci~/packages/**/*"
dependencies:
{% for variant in package_variants %}
{% for editor in per_commit_editors %}
{% for platform in test_platforms %}
- .yamato/upm-ci-full.yml#pkg_test_{{variant.name}}_{{platform.name}}_{{editor.version}}
{% endfor %}
{% endfor %}
{% endfor %}
{% for editor in per_commit_editors %}
{% for project in projects %}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_linux_standalone_{{editor.version}}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_windows_editmode_{{editor.version}}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_linux_editmode_{{editor.version}}
{% endfor %}
{% endfor %}
all_tests:
name: Complete Tests
agent:
type: Unity::VM
image: package-ci/win10:stable
flavor: b1.small
commands:
- dir
triggers:
cancel_old_ci: true
artifacts:
logs:
paths:

- "upm-ci~/packages/**/*"
dependencies:
{% for variant in package_variants %}
{% for editor in complete_editors %}
{% for editor in test_editors %}
{% for platform in test_platforms %}
- .yamato/upm-ci-full.yml#pkg_test_{{variant.name}}_{{platform.name}}_{{editor.version}}
{% endfor %}

{% for editor in coverage_editors %}
# Disabling trigger of standalone tests which aren't working at the moment
{% for editor in test_editors %}
{% endfor %}
{% for editor in complete_editors %}
{% for project in projects %}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-testprojects.yml#{{project.name}}_linux_standalone_{{editor.version}}

{% endfor %}
{% for editor in complete_editors %}
{% for project in projects %}
- .yamato/upm-ci-performance.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-performance.yml#{{project.name}}_linux_standalone_{{editor.version}}
{% endfor %}
{% endfor %}
all_tests_nightly_perf:
name: Nightly Performance Tests
{% for variant in package_variants %}
publish:
name: Publish Perception package to internal registry
flavor: b1.small
flavor: b1.large
- dir
- npm install upm-ci-utils@stable -g --registry https://artifactory.prd.cds.internal.unity3d.com/artifactory/api/npm/upm-npm
- upm-ci package publish --package-path com.unity.perception
recurring:
- branch: performance_testing
frequency: daily
cancel_old_ci: true
tags:
only:
- /^(r|R)(c|C)-\d+\.\d+\.\d+(-preview(\.\d+)?)?$/
logs:
paths:
- "upm-ci~/test-results/**/*"
packages:
artifacts:
- "upm-ci~/packages/**/*"
- "upm-ci~/packages/*.tgz"
{% for editor in performance_editors %}
{% for project in projects %}
- .yamato/upm-ci-performance.yml#{{project.name}}_windows_standalone_{{editor.version}}
- .yamato/upm-ci-performance.yml#{{project.name}}_linux_standalone_{{editor.version}}
- .yamato/upm-ci-full.yml#pack_{{ variant.name }}
{% for editor in test_editors %}
{% for platform in test_platforms %}
- .yamato/upm-ci-full.yml#test_perception_{{ platform.name }}_{{ editor.version }}
{% endfor %}

2
LICENSE.md


com.unity.perception copyright © 2021 Unity Technologies ApS
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/

87
README.md


<img src="com.unity.perception/Documentation~/images/unity-wide-whiteback.png" align="middle" width="3000"/>
<img src="com.unity.perception/Documentation~/images/unity-wide.png" align="middle" width="3000"/>
![ReleaseBadge](https://badge-proxy.cds.internal.unity3d.com/5ab9a162-9dd0-4ba1-ba41-cf25378a927a)
# Perception
The Perception package provides a toolkit for generating large-scale datasets for perception-based machine learning training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
<img src="https://img.shields.io/badge/unity-2019.4-green.svg?style=flat-square" alt="unity 2019.4">
<img src="https://img.shields.io/badge/unity-2020.2-green.svg?style=flat-square" alt="unity 2020.2">
> com.unity.perception is in active development. Its features and API are subject to significant change as development progresses.
# Perception Package ([Unity Computer Vision](https://unity.com/computer-vision))
The Perception package provides a toolkit for generating large-scale datasets for computer vision training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
Visit the [Unity Computer Vision](https://unity.com/computer-vision) page for more information on our tools and offerings!
## Getting Started
**[Quick Installation Instructions](com.unity.perception/Documentation~/SetupSteps.md)**
Get your local Perception workspace up and running quickly. Recommended for users with prior Unity experience.
**[Perception Tutorial](com.unity.perception/Documentation~/Tutorial/TUTORIAL.md)**
Detailed instructions covering all the important steps from installing Unity Editor, to creating your first computer vision data generation project, building a randomized Scene, and generating large-scale synthetic datasets by leveraging the power of Unity Simulation. No prior Unity experience required.
**[Human Pose Labeling and Randomization Tutorial](com.unity.perception/Documentation~/HPTutorial/TUTORIAL.md)**
Step by step instructions for using the keypoint, pose, and animation randomization tools included in the Perception package. It is recommended that you finish Phase 1 of the Perception Tutorial above before starting this tutorial.
## Documentation
In-depth documentation on individual components of the package.
|Feature|Description|
|---|---|
|[Labeling](com.unity.perception/Documentation~/GroundTruthLabeling.md)|A component that marks a GameObject and its descendants with a set of labels|
|[Label Config](com.unity.perception/Documentation~/GroundTruthLabeling.md#label-config)|An asset that defines a taxonomy of labels for ground truth generation|
|[Perception Camera](com.unity.perception/Documentation~/PerceptionCamera.md)|Captures RGB images and ground truth from a [Camera](https://docs.unity3d.com/Manual/class-Camera.html).|
|[Dataset Capture](com.unity.perception/Documentation~/DatasetCapture.md)|Ensures sensors are triggered at proper rates and accepts data for the JSON dataset.|
|[Randomization](com.unity.perception/Documentation~/Randomization/Index.md)|The Randomization tool set lets you integrate domain randomization principles into your simulation.|
## Documenation
**Click [here](com.unity.perception/Documentation~/SetupSteps.md) to set up a Perception project**
## Community and Support
**Click [here](com.unity.perception/Documentation~/GettingStarted.md) to get the started with Perception**
For setup problems or discussions about leveraging the Perception package in your project, please create a new thread on the **[Unity Computer Vision forum](https://forum.unity.com/forums/computer-vision.626/)** and make sure to include as much detail as possible. If you run into any other problems with the Perception package or have a specific feature request, please submit a **[GitHub issue](https://github.com/Unity-Technologies/com.unity.perception/issues)**.
For any other questions or feedback, connect directly with the Computer Vision team at [computer-vision@unity3d.com](mailto:computer-vision@unity3d.com).
## Example Projects
### SynthDet
<img src="com.unity.perception/Documentation~/images/synthdet.png"/>
[SynthDet](https://github.com/Unity-Technologies/SynthDet) is an end-to-end solution for training a 2D object detection model using synthetic data.
### Unity Simulation Smart Camera example
<img src="com.unity.perception/Documentation~/images/smartcamera.png"/>
The [Unity Simulation Smart Camera Example](https://github.com/Unity-Technologies/Unity-Simulation-Smart-Camera-Outdoor) illustrates how the Perception package could be used in a smart city or autonomous vehicle simulation. You can generate datasets locally or at scale in [Unity Simulation](https://unity.com/products/unity-simulation).
### Robotics Object Pose Estimation Demo
<img src="com.unity.perception/Documentation~/images/robotics_pose.png"/>
The [Robotics Object Pose Estimation Demo & Tutorial](https://github.com/Unity-Technologies/Robotics-Object-Pose-Estimation) demonstrates pick-and-place with a robot arm in Unity. It includes using ROS with Unity, importing URDF models, collecting labeled training data using the Perception package, and training and deploying a deep learning model.
## Local development
The repository includes two projects for local development in `TestProjects` folder, one set up for HDRP and the other for URP.
### Suggested IDE Setup
## Suggested IDE Setup
## Known issues
* The Linux Editor 2019.4.7f1 and 2019.4.8f1 might hang when importing HDRP-based Perception projects. For Linux Editor support, use 2019.4.6f1 or 2020.1
* To get automatic feedback and fixups on formatting and naming convention violations, set up Rider/JetBrains with our Unity standard .dotsettings file by following [these instructions](https://github.cds.internal.unity3d.com/unity/com.unity.coding/tree/master/UnityCoding/Packages/com.unity.coding/Coding~/Configs/JetBrains).
* If you use VS Code, install the Editorconfig extension to get automatic code formatting according to our convention
## Citation
If you find this package useful, consider citing it using:
```
@misc{com.unity.perception2021,
title={Unity {P}erception Package},
author={{Unity Technologies}},
howpublished={\url{https://github.com/Unity-Technologies/com.unity.perception}},
year={2020}
}
```

74
TestProjects/PerceptionHDRP/Assets/HDRenderPipelineAsset.asset


m_Script: {fileID: 11500000, guid: 0cf1dab834d4ec34195b920ea7bbf9ec, type: 3}
m_Name: HDRenderPipelineAsset
m_EditorClassIdentifier:
m_Version: 14
m_Version: 9
m_ObsoleteFrameSettings:
overrides: 0
enableShadow: 0

type: 2}
m_RenderPipelineRayTracingResources: {fileID: 0}
m_DefaultVolumeProfile: {fileID: 0}
m_DefaultLookDevProfile: {fileID: 0}
data1: 70280697347933
data2: 4539628425463136256
data1: 69730941533981
data2: 4539628424926265344
lodBias: 1
lodBiasMode: 0
lodBiasQualityLevel: 0

supportVolumetrics: 1
increaseResolutionOfVolumetrics: 0
supportLightLayers: 0
lightLayerName0: Light Layer default
lightLayerName1: Light Layer 1
lightLayerName2: Light Layer 2
lightLayerName3: Light Layer 3
lightLayerName4: Light Layer 4
lightLayerName5: Light Layer 5
lightLayerName6: Light Layer 6
lightLayerName7: Light Layer 7
supportCustomPass: 1
customBufferFormat: 12
supportedLitShaderMode: 3
supportedLitShaderMode: 2
supportDecals: 1
msaaSampleCount: 1
supportMotionVectors: 1

supportRayTracing: 0
supportedRaytracingTier: 2
cookieAtlasSize: 512
cookieFormat: 74
cookieSize: 128
cookieTexArraySize: 16
cookieAtlasLastValidMip: 0
cookieTexArraySize: 16
planarReflectionAtlasSize: 4096
planarReflectionProbeCacheSize: 2
planarReflectionTextureSize: 1024
reflectionProbeCacheSize: 64
reflectionCubemapSize: 256
reflectionCacheCompressed: 0

maxAreaLightsOnScreen: 64
maxEnvLightsOnScreen: 64
maxDecalsOnScreen: 512
maxPlanarReflectionOnScreen: 16
hdShadowInitParams:
maxShadowRequests: 128
directionalShadowsDepthBits: 32

useDynamicViewportRescale: 1
shadowResolutionDirectional:
m_Values: 00010000000200000004000000080000
m_SchemaId:
m_Id: With4Levels
m_SchemaId:
m_Id: With4Levels
m_SchemaId:
m_Id: With4Levels
maxScreenSpaceShadowSlots: 4
screenSpaceShadowBufferFormat: 48
maxScreenSpaceShadows: 2
decalSettings:
drawDistance: 1000
atlasWidth: 4096

m_LutSize: 32
lutFormat: 48
bufferFormat: 74
dynamicResolutionSettings:
enabled: 0
maxPercentage: 100

checkerboardDepthBuffer: 1
upsampleType: 1
xrSettings:
singlePass: 1
occlusionMesh: 1
postProcessQualitySettings:
NearBlurSampleCount: 030000000500000008000000

ChromaticAberrationMaxSamples: 03000000060000000c000000
lightSettings:
useContactShadow:
m_Values:
m_SchemaId:
m_Id:
m_Low: 0
m_Medium: 0
m_High: 0
m_Values: 000000000000000000000000
m_SchemaId:
m_Id: With3Levels
m_Low: 0
m_Medium: 0
m_High: 0
m_Values:
- 1
- 1
- 1
m_SchemaId:
m_Id: With3Levels
lightingQualitySettings:
AOStepCount: 040000000600000010000000
AOFullRes: 000001
AOMaximumRadiusPixels: 200000002800000050000000
AOBilateralUpsample: 000101
AODirectionCount: 010000000200000004000000
ContactShadowSampleCount: 060000000a00000010000000
SSRMaxRaySteps: 100000002000000040000000
m_Low: 1
m_Medium: 1
m_High: 1
availableMaterialQualityLevels: -1
m_DefaultMaterialQualityLevel: 4
materialQualityLevels: -1
m_CurrentMaterialQualityLevel: 4
diffusionProfileSettings: {fileID: 0}
diffusionProfileSettingsList: []
beforeTransparentCustomPostProcesses: []

492
TestProjects/PerceptionHDRP/Assets/Scenes/SampleScene.unity


m_IndirectOutputScale: 1
m_AlbedoBoost: 1
m_EnvironmentLightingMode: 0
m_EnableBakedLightmaps: 0
m_EnableBakedLightmaps: 1
m_EnableRealtimeLightmaps: 1
m_LightmapEditorSettings:
serializedVersion: 12

debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &117484506
--- !u!1 &4662619
m_ObjectHideFlags: 1
m_ObjectHideFlags: 0
- component: {fileID: 117484508}
- component: {fileID: 117484507}
- component: {fileID: 4662620}
m_Name: StaticLightingSky
m_Name: GameObject
--- !u!114 &117484507
MonoBehaviour:
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 117484506}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 441482e8936e35048a1dffac814e3ef8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Profile: {fileID: 0}
m_StaticLightingSkyUniqueID: 0
m_SkySettings: {fileID: 0}
m_SkySettingsFromProfile: {fileID: 0}
--- !u!4 &117484508
--- !u!4 &4662620
m_ObjectHideFlags: 1
m_ObjectHideFlags: 0
m_GameObject: {fileID: 117484506}
m_GameObject: {fileID: 4662619}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_LocalPosition: {x: -2.7286716, y: -2.380882, z: 5.498973}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children:
- {fileID: 963194228}
- {fileID: 705507995}
- {fileID: 1640252283}
- {fileID: 464025709}
- {fileID: 411238281}
m_RootOrder: 6
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &411238276
GameObject:

- component: {fileID: 411238277}
- component: {fileID: 411238282}
m_Layer: 0
m_Name: Crate
m_Name: Cube (2)
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
classes:
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &411238278
BoxCollider:
m_ObjectHideFlags: 0

m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: ddfd18df3e5ef3043b7889c5a070d8ca, type: 2}
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 411238276}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 121.42, y: 72.4, z: -161.73}
m_LocalScale: {x: 15, y: 15, z: 15}
m_LocalPosition: {x: 4.3786716, y: 2.380882, z: -4.288973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_RootOrder: 0
m_Father: {fileID: 4662620}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &411238282
MonoBehaviour:

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &464025704
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 464025709}
- component: {fileID: 464025708}
- component: {fileID: 464025707}
- component: {fileID: 464025706}
- component: {fileID: 464025705}
- component: {fileID: 464025710}
m_Layer: 0
m_Name: Cube (1)
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &464025705
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
classes:
- Cube
--- !u!65 &464025706
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &464025707
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &464025708
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &464025709
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 2.6386716, y: 2.380882, z: -3.9089727}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 4662620}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &464025710
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &705507993
GameObject:
m_ObjectHideFlags: 0

m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_BoundingSphereOverride: {x: 0, y: 1.1418e-41, z: 0, w: 0}
m_BoundingSphereOverride: {x: 6.25e-43, y: 1.0156355e+12, z: 6.25e-43, w: 2.8676e-41}
m_UseBoundingSphereOverride: 0
m_ShadowRadius: 0
m_ShadowAngle: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 705507993}
m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261}
m_LocalPosition: {x: 96.1856, y: 192.67596, z: -193.83864}
m_LocalScale: {x: 36.249973, y: 36.249977, z: 36.249973}
m_LocalPosition: {x: 2.7286716, y: 5.3808823, z: -5.498973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_RootOrder: 3
m_Father: {fileID: 4662620}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0}
--- !u!114 &705507996
MonoBehaviour:

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Version: 10
m_Version: 9
m_ObsoleteShadowResolutionTier: 1
m_ObsoleteUseShadowQualitySettings: 0
m_ObsoleteCustomShadowResolution: 512

m_FilterSizeTraced: 16
m_SunLightConeAngle: 0.5
m_LightShadowRadius: 0.5
m_SemiTransparentShadow: 0
m_ColorShadow: 1
m_EvsmExponent: 15
m_EvsmLightLeakBias: 0
m_EvsmVarianceBias: 0.00001

useVolumetric: 1
featuresFoldout: 1
showAdditionalSettings: 0
m_AreaLightEmissiveMeshShadowCastingMode: 0
m_AreaLightEmissiveMeshMotionVectorGenerationMode: 0
--- !u!1 &934158981
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 934158987}
- component: {fileID: 934158986}
- component: {fileID: 934158985}
- component: {fileID: 934158984}
- component: {fileID: 934158983}
- component: {fileID: 934158982}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &934158982
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!114 &934158983
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Cube
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &934158984
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &934158985
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &934158986
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &934158987
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 107.21, y: 73.4, z: -144.12}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &963194225
GameObject:
m_ObjectHideFlags: 0

height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 59.991566
field of view: 60
orthographic: 0
orthographic size: 5
m_Depth: -1

m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.22882307, y: 0.124303445, z: -0.029468497, w: -0.96504945}
m_LocalPosition: {x: 116.52855, y: 91.11087, z: -194.85445}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_LocalRotation: {x: -0.17179534, y: 0.30667058, z: -0.056378223, w: -0.93448436}
m_LocalPosition: {x: 5.5378666, y: 3.5565922, z: -7.528791}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_RootOrder: 4
m_Father: {fileID: 4662620}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &963194229
MonoBehaviour:

m_Name:
m_EditorClassIdentifier:
description: The main camera
captureRgbImages: 1
firstCaptureFrame: 0
captureTriggerMode: 0
manualSensorAffectSimulationTiming: 0
simulationDeltaTime: 0.0166
framesBetweenCaptures: 0
m_Labelers:
- id: 0
- id: 1
- id: 2
- id: 3
showVisualizations: 1
references:
version: 1
00000000:
type: {class: BoundingBox2DLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
annotationId: f9f22e05-443f-4602-a422-ebe4ea9b55cb
idLabelConfig: {fileID: 11400000, guid: 258de5b48703743468d34fc5bbdfa3aa,
type: 2}
00000001:
type: {class: SemanticSegmentationLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
annotationId: 12f94d8d-5425-4deb-9b21-5e53ad957d66
labelConfig: {fileID: 11400000, guid: c140c5aa05dd09e4fadaa26de31b1f39, type: 2}
m_TargetTextureOverride: {fileID: 0}
00000002:
type: {class: ObjectCountLabeler, ns: UnityEngine.Perception.GroundTruth, asm: Unity.Perception.Runtime}
data:
enabled: 1
objectCountMetricId: 51da3c27-369d-4929-aea6-d01614635ce2
m_LabelConfig: {fileID: 11400000, guid: 258de5b48703743468d34fc5bbdfa3aa,
type: 2}
00000003:
type: {class: RenderedObjectInfoLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
objectInfoMetricId: 5ba92024-b3b7-41a7-9d3f-c03a6a8ddd01
idLabelConfig: {fileID: 11400000, guid: 258de5b48703743468d34fc5bbdfa3aa,
type: 2}
--- !u!1 &1321518866
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1321518869}
- component: {fileID: 1321518868}
- component: {fileID: 1321518867}
- component: {fileID: 1321518870}
m_Layer: 0
m_Name: Terrain
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 4294967295
m_IsActive: 1
--- !u!154 &1321518867
TerrainCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Material: {fileID: 0}
m_Enabled: 1
m_TerrainData: {fileID: 15600000, guid: 15ded0116bd9f864f80b9813d4f4477f, type: 2}
m_EnableTreeColliders: 1
--- !u!218 &1321518868
Terrain:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Enabled: 1
serializedVersion: 6
m_TerrainData: {fileID: 15600000, guid: 15ded0116bd9f864f80b9813d4f4477f, type: 2}
m_TreeDistance: 5000
m_TreeBillboardDistance: 50
m_TreeCrossFadeLength: 5
m_TreeMaximumFullLODCount: 50
m_DetailObjectDistance: 80
m_DetailObjectDensity: 1
m_HeightmapPixelError: 5
m_SplatMapDistance: 1000
m_HeightmapMaximumLOD: 0
m_ShadowCastingMode: 2
m_DrawHeightmap: 1
m_DrawInstanced: 0
m_DrawTreesAndFoliage: 1
m_ReflectionProbeUsage: 1
m_MaterialTemplate: {fileID: 2100000, guid: 22ff8771d87ef27429e670136399094b, type: 2}
m_BakeLightProbesForTrees: 1
m_PreserveTreePrototypeLayers: 0
m_DeringLightProbesForTrees: 1
m_ScaleInLightmap: 0.0256
m_LightmapParameters: {fileID: 15203, guid: 0000000000000000f000000000000000, type: 0}
m_GroupingID: 0
m_RenderingLayerMask: 1
m_AllowAutoConnect: 1
--- !u!4 &1321518869
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_LocalRotation: {x: -0, y: -0.07853227, z: -0, w: 0.9969116}
m_LocalPosition: {x: -604.5, y: 65.1, z: -902.3}
m_LocalScale: {x: 0.95858, y: 0.95858, z: 0.95858}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: -9.008, z: 0}
--- !u!114 &1321518870
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1321518866}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Terrain
useAutoLabeling: 0
autoLabelingSchemeType:
period: 0.0166
startTime: 0
produceSegmentationImages: 1
produceObjectCountAnnotations: 1
LabelingConfiguration: {fileID: 11400000, guid: be3971a848968144e8d07d9136a5bf49,
type: 2}
--- !u!1 &1640252278
GameObject:
m_ObjectHideFlags: 0

- component: {fileID: 1640252279}
- component: {fileID: 1640252284}
m_Layer: 0
m_Name: Box
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
classes:
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &1640252280
BoxCollider:
m_ObjectHideFlags: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1640252278}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 95.88, y: 73.33, z: -165.54}
m_LocalScale: {x: 15, y: 15, z: 15}
m_LocalPosition: {x: 2.7286716, y: 2.380882, z: -5.498973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_Father: {fileID: 4662620}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1640252284

11
TestProjects/PerceptionHDRP/Packages/manifest.json


{
"dependencies": {
"com.unity.collab-proxy": "1.2.16",
"com.unity.ext.nunit": "1.0.6",
"com.unity.ext.nunit": "1.0.0",
"com.unity.ide.vscode": "1.2.3",
"com.unity.ide.vscode": "1.2.0",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21",
"com.unity.test-framework": "1.1.20",
"com.unity.simulation.capture": "0.0.10-preview.6",
"com.unity.simulation.core": "0.0.10-preview.8",
"com.unity.test-framework": "1.1.13",
"nuget.moq": "1.0.0",
"com.unity.modules.ai": "1.0.0",
"com.unity.modules.androidjni": "1.0.0",
"com.unity.modules.animation": "1.0.0",

5
TestProjects/PerceptionHDRP/ProjectSettings/EditorBuildSettings.asset


EditorBuildSettings:
m_ObjectHideFlags: 0
serializedVersion: 2
m_Scenes:
- enabled: 1
path: Assets/Scenes/SampleScene.unity
guid: 9fc0d4010bbf28b4594072e72b8655ab
m_Scenes: []
m_configObjects: {}

2
TestProjects/PerceptionHDRP/ProjectSettings/EditorSettings.asset


inProgressEnabled: 1
m_EnableTextureStreamingInEditMode: 1
m_EnableTextureStreamingInPlayMode: 1
m_AsyncShaderCompilation: 0
m_AsyncShaderCompilation: 1
m_EnterPlayModeOptionsEnabled: 0
m_EnterPlayModeOptions: 3
m_ShowLightmapResolutionOverlay: 1

31
TestProjects/PerceptionHDRP/ProjectSettings/ProjectSettings.asset


useOnDemandResources: 0
accelerometerFrequency: 60
companyName: DefaultCompany
productName: PerceptionHDRP
productName: UnityTestFramework
defaultCursor: {fileID: 0}
cursorHotspot: {x: 0, y: 0}
m_SplashScreenBackgroundColor: {r: 0.13725491, g: 0.12156863, b: 0.1254902, a: 1}

switchNVNShaderPoolsGranularity: 33554432
switchNVNDefaultPoolsGranularity: 16777216
switchNVNOtherPoolsGranularity: 16777216
switchNVNMaxPublicTextureIDCount: 0
switchNVNMaxPublicSamplerIDCount: 0
stadiaPresentMode: 0
stadiaTargetFramerate: 0
vulkanEnableLateAcquireNextImage: 0
m_SupportedAspectRatios:
4:3: 1
5:4: 1

uIStatusBarHidden: 1
uIExitOnSuspend: 0
uIStatusBarStyle: 0
iPhoneSplashScreen: {fileID: 0}
iPhoneHighResSplashScreen: {fileID: 0}
iPhoneTallHighResSplashScreen: {fileID: 0}
iPhone47inSplashScreen: {fileID: 0}
iPhone55inPortraitSplashScreen: {fileID: 0}
iPhone55inLandscapeSplashScreen: {fileID: 0}
iPhone58inPortraitSplashScreen: {fileID: 0}
iPhone58inLandscapeSplashScreen: {fileID: 0}
iPadPortraitSplashScreen: {fileID: 0}
iPadHighResPortraitSplashScreen: {fileID: 0}
iPadLandscapeSplashScreen: {fileID: 0}
iPadHighResLandscapeSplashScreen: {fileID: 0}
iPhone65inPortraitSplashScreen: {fileID: 0}
iPhone65inLandscapeSplashScreen: {fileID: 0}
iPhone61inPortraitSplashScreen: {fileID: 0}
iPhone61inLandscapeSplashScreen: {fileID: 0}
appleTVSplashScreen: {fileID: 0}
appleTVSplashScreen2x: {fileID: 0}
tvOSSmallIconLayers: []

metalEditorSupport: 1
metalAPIValidation: 1
iOSRenderExtraFrameOnPause: 0
iosCopyPluginsCodeInsteadOfSymlink: 0
appleDeveloperTeamID:
iOSManualSigningProvisioningProfileID:
tvOSManualSigningProvisioningProfileID:

ps4ShareFilePath:
ps4ShareOverlayImagePath:
ps4PrivacyGuardImagePath:
ps4ExtraSceSysFile:
ps4NPtitleDatPath:
ps4RemotePlayKeyAssignment: -1
ps4RemotePlayKeyMappingDir:

ps4UseResolutionFallback: 0
ps4ReprojectionSupport: 0
ps4UseAudio3dBackend: 0
ps4UseLowGarlicFragmentationMode: 1
ps4SocialScreenEnabled: 0
ps4ScriptOptimizationLevel: 2
ps4Audio3dVirtualSpeakerCount: 14

ps4disableAutoHideSplash: 0
ps4videoRecordingFeaturesUsed: 0
ps4contentSearchFeaturesUsed: 0
ps4CompatibilityPS5: 0
ps4GPU800MHz: 1
ps4attribEyeToEyeDistanceSettingVR: 0
ps4IncludedModules: []
ps4attribVROutputEnabled: 0

additionalIl2CppArgs:
scriptingRuntimeVersion: 1
gcIncremental: 0
assemblyVersionValidation: 1
gcWBarrierValidation: 0
apiCompatibilityLevelPerPlatform:
Standalone: 6

XboxOneCapability: []
XboxOneGameRating: {}
XboxOneIsContentPackage: 0
XboxOneEnhancedXboxCompatibilityMode: 0
XboxOneEnableGPUVariability: 1
XboxOneSockets: {}
XboxOneSplashScreen: {fileID: 0}

XboxOneOverrideIdentityName:
XboxOneOverrideIdentityPublisher:
vrEditorSettings:
daydream:
daydreamIconForeground: {fileID: 0}

4
TestProjects/PerceptionHDRP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.19f1
m_EditorVersionWithRevision: 2019.4.19f1 (ca5b14067cec)
m_EditorVersion: 2019.3.13f1
m_EditorVersionWithRevision: 2019.3.13f1 (d4ddf0d95db9)

559
TestProjects/PerceptionURP/Assets/Scenes/SampleScene.unity


debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &85886256
--- !u!1 &4662619
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}

m_Component:
- component: {fileID: 85886258}
- component: {fileID: 85886257}
- component: {fileID: 4662620}
m_Name: Scenario
m_Name: GameObject
--- !u!114 &85886257
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 85886256}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d50076aff0af4515b4422166496fdd5e, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Randomizers: []
quitOnComplete: 1
serializedConstantsFileName: constants
constants:
totalIterations: 100
instanceCount: 1
instanceIndex: 0
framesPerIteration: 1
references:
version: 1
--- !u!4 &85886258
--- !u!4 &4662620
m_GameObject: {fileID: 85886256}
m_GameObject: {fileID: 4662619}
m_LocalPosition: {x: 107.21, y: 72.77, z: -144.12}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_LocalPosition: {x: -2.7286716, y: -2.380882, z: 5.498973}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children:
- {fileID: 963194228}
- {fileID: 705507995}
- {fileID: 1640252283}
- {fileID: 464025709}
- {fileID: 411238281}
m_RootOrder: 6
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &411238276
GameObject:

m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
classes:
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &411238278
BoxCollider:
m_ObjectHideFlags: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 411238276}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 121.42, y: 72.4, z: -161.73}
m_LocalScale: {x: 15, y: 15, z: 15}
m_LocalPosition: {x: 4.3786716, y: 2.380882, z: -4.288973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_RootOrder: 0
m_Father: {fileID: 4662620}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &411238282
MonoBehaviour:

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &464025704
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 464025709}
- component: {fileID: 464025708}
- component: {fileID: 464025707}
- component: {fileID: 464025706}
- component: {fileID: 464025705}
- component: {fileID: 464025710}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &464025705
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
classes:
- Cube
--- !u!65 &464025706
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &464025707
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &464025708
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &464025709
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 2.6386716, y: 2.380882, z: -3.9089727}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 4662620}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &464025710
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 464025704}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &705507993
GameObject:
m_ObjectHideFlags: 0

m_Component:
- component: {fileID: 705507995}
- component: {fileID: 705507994}
- component: {fileID: 705507996}
m_Layer: 0
m_Name: Directional Light
m_TagString: Untagged

m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_BoundingSphereOverride: {x: 0, y: 1.1418e-41, z: 0, w: 0}
m_BoundingSphereOverride: {x: 6.25e-43, y: 1.0156355e+12, z: 6.25e-43, w: 2.8676e-41}
m_UseBoundingSphereOverride: 0
m_ShadowRadius: 0
m_ShadowAngle: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 705507993}
m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261}
m_LocalPosition: {x: 96.1856, y: 192.67596, z: -193.83864}
m_LocalScale: {x: 36.249973, y: 36.249977, z: 36.249973}
m_LocalPosition: {x: 2.7286716, y: 5.3808823, z: -5.498973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_RootOrder: 3
m_Father: {fileID: 4662620}
m_RootOrder: 1
--- !u!1 &934158981
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 934158987}
- component: {fileID: 934158986}
- component: {fileID: 934158985}
- component: {fileID: 934158984}
- component: {fileID: 934158983}
- component: {fileID: 934158982}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &934158982
--- !u!114 &705507996
m_GameObject: {fileID: 934158981}
m_GameObject: {fileID: 705507993}
m_Script: {fileID: 11500000, guid: 673a227032a8e4940b9828c5b6f852ab, type: 3}
m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
yDegreesPerSecond: 180
--- !u!114 &934158983
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Cube
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &934158984
BoxCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!23 &934158985
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: 31321ba15b8f8eb4c954353edc038b1d, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 1
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &934158986
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &934158987
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 934158981}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 107.21, y: 72.77, z: -144.12}
m_LocalScale: {x: 15, y: 15, z: 15}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &963194225
GameObject:
m_ObjectHideFlags: 0

serializedVersion: 6
m_Component:
- component: {fileID: 963194228}
- component: {fileID: 963194227}
- component: {fileID: 963194231}
- component: {fileID: 963194229}
- component: {fileID: 963194227}
m_Layer: 0
m_Name: Main Camera
m_TagString: MainCamera

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_Enabled: 1
--- !u!20 &963194227
--- !u!114 &963194227
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a79441f348de89743a2939f4d699eac1, type: 3}
m_Name:
m_EditorClassIdentifier:
m_RenderShadows: 1
m_RequiresDepthTextureOption: 2
m_RequiresOpaqueTextureOption: 2
m_CameraType: 0
m_CameraOutput: 0
m_Cameras: []
m_RendererIndex: -1
m_VolumeLayerMask:
serializedVersion: 2
m_Bits: 1
m_VolumeTrigger: {fileID: 0}
m_RenderPostProcessing: 0
m_Antialiasing: 0
m_AntialiasingQuality: 2
m_StopNaN: 0
m_Dithering: 0
m_RequiresDepthTexture: 0
m_RequiresColorTexture: 0
m_Version: 2
--- !u!4 &963194228
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.17179534, y: 0.30667058, z: -0.056378223, w: -0.93448436}
m_LocalPosition: {x: 5.5378666, y: 3.5565922, z: -7.528791}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 4662620}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!20 &963194229
Camera:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}

height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 59.991566
field of view: 60
m_Depth: -1
m_Depth: 0
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295

m_TargetEye: 3
m_HDR: 0
m_AllowMSAA: 0
m_HDR: 1
m_AllowMSAA: 1
--- !u!4 &963194228
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_LocalRotation: {x: -0.22882307, y: 0.124303445, z: -0.029468497, w: -0.96504945}
m_LocalPosition: {x: 116.52855, y: 91.11087, z: -194.85445}
m_LocalScale: {x: 36.249973, y: 36.249973, z: 36.249973}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &963194230
MonoBehaviour:
m_ObjectHideFlags: 0

period: 0.0166
startTime: 0
captureRgbImages: 1
m_Labelers:
- id: 0
- id: 1
- id: 2
- id: 3
showVisualizations: 1
references:
version: 1
00000000:
type: {class: BoundingBox2DLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
annotationId: f9f22e05-443f-4602-a422-ebe4ea9b55cb
idLabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
00000001:
type: {class: SemanticSegmentationLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
annotationId: 12f94d8d-5425-4deb-9b21-5e53ad957d66
labelConfig: {fileID: 11400000, guid: c140c5aa05dd09e4fadaa26de31b1f39, type: 2}
m_TargetTextureOverride: {fileID: 0}
00000002:
type: {class: ObjectCountLabeler, ns: UnityEngine.Perception.GroundTruth, asm: Unity.Perception.Runtime}
data:
enabled: 1
objectCountMetricId: 51da3c27-369d-4929-aea6-d01614635ce2
m_LabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
00000003:
type: {class: RenderedObjectInfoLabeler, ns: UnityEngine.Perception.GroundTruth,
asm: Unity.Perception.Runtime}
data:
enabled: 1
objectInfoMetricId: 5ba92024-b3b7-41a7-9d3f-c03a6a8ddd01
idLabelConfig: {fileID: 11400000, guid: cedcacfb1d9beb34fbbb231166c472fe,
type: 2}
--- !u!114 &963194231
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 963194225}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a79441f348de89743a2939f4d699eac1, type: 3}
m_Name:
m_EditorClassIdentifier:
m_RenderShadows: 1
m_RequiresDepthTextureOption: 2
m_RequiresOpaqueTextureOption: 2
m_CameraType: 0
m_Cameras: []
m_RendererIndex: -1
m_VolumeLayerMask:
serializedVersion: 2
m_Bits: 1
m_VolumeTrigger: {fileID: 0}
m_RenderPostProcessing: 0
m_Antialiasing: 0
m_AntialiasingQuality: 2
m_StopNaN: 0
m_Dithering: 0
m_ClearDepth: 1
m_RequiresDepthTexture: 0
m_RequiresColorTexture: 0
m_Version: 2
produceSegmentationImages: 1
produceObjectCountAnnotations: 1
LabelingConfiguration: {fileID: 11400000, guid: e74234fe725079e4aa7ecd74797ceb79,
type: 2}
--- !u!1 &1640252278
GameObject:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
classes:
useAutoLabeling: 0
autoLabelingSchemeType:
--- !u!65 &1640252280
BoxCollider:
m_ObjectHideFlags: 0

m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1640252278}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 95.88, y: 73.33, z: -165.54}
m_LocalScale: {x: 15, y: 15, z: 15}
m_LocalPosition: {x: 2.7286716, y: 2.380882, z: -5.498973}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Father: {fileID: 0}
m_Father: {fileID: 4662620}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1640252284

m_Name:
m_EditorClassIdentifier:
yDegreesPerSecond: 180
--- !u!1 &1800622449
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1800622452}
- component: {fileID: 1800622451}
- component: {fileID: 1800622450}
- component: {fileID: 1800622453}
m_Layer: 0
m_Name: Terrain
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 4294967295
m_IsActive: 1
--- !u!154 &1800622450
TerrainCollider:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Material: {fileID: 0}
m_Enabled: 1
m_TerrainData: {fileID: 15600000, guid: 627ddb42b637b9148bc53c50bf82faff, type: 2}
m_EnableTreeColliders: 1
--- !u!218 &1800622451
Terrain:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Enabled: 1
serializedVersion: 6
m_TerrainData: {fileID: 15600000, guid: 627ddb42b637b9148bc53c50bf82faff, type: 2}
m_TreeDistance: 5000
m_TreeBillboardDistance: 50
m_TreeCrossFadeLength: 5
m_TreeMaximumFullLODCount: 50
m_DetailObjectDistance: 80
m_DetailObjectDensity: 1
m_HeightmapPixelError: 5
m_SplatMapDistance: 1000
m_HeightmapMaximumLOD: 0
m_ShadowCastingMode: 2
m_DrawHeightmap: 1
m_DrawInstanced: 0
m_DrawTreesAndFoliage: 1
m_ReflectionProbeUsage: 1
m_MaterialTemplate: {fileID: 2100000, guid: 594ea882c5a793440b60ff72d896021e, type: 2}
m_BakeLightProbesForTrees: 1
m_PreserveTreePrototypeLayers: 0
m_DeringLightProbesForTrees: 1
m_ScaleInLightmap: 0.0256
m_LightmapParameters: {fileID: 15203, guid: 0000000000000000f000000000000000, type: 0}
m_GroupingID: 0
m_RenderingLayerMask: 1
m_AllowAutoConnect: 1
--- !u!4 &1800622452
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: -385, y: 64.6, z: -673}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1800622453
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1800622449}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b33f0bc2b78db642a758f07826d0dd0, type: 3}
m_Name:
m_EditorClassIdentifier:
labels:
- Terrain
useAutoLabeling: 0
autoLabelingSchemeType:

3
TestProjects/PerceptionURP/Assets/Settings/ForwardRenderer.asset


m_EditorClassIdentifier:
m_RendererFeatures:
- {fileID: 5741507687788441411}
m_RendererFeatureMap:
postProcessData: {fileID: 11400000, guid: 41439944d30ece34e96484bdb6645b55, type: 2}
shaders:
blitPS: {fileID: 4800000, guid: c17132b1f77d20942aa75f8429c0f8bc, type: 3}

passOperation: 0
failOperation: 0
zFailOperation: 0
m_ShadowTransparentReceive: 1
--- !u!114 &5741507687788441411
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 780f1bb8e775c4245b86116069a82828, type: 3}
m_Name: Segmentation
m_EditorClassIdentifier:
m_Active: 1

2
TestProjects/PerceptionURP/Assets/Settings/ForwardRenderer.asset.meta


fileFormatVersion: 2
guid: 86cfa956de1e1ca4f8d5f126f8f52b35
guid: 4a8e21d5c33334b11b34a596161b9360
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0

5
TestProjects/PerceptionURP/Assets/Settings/UniversalRP-HighQuality.asset


m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bf2edee5c58d82540a51f03df9d42094, type: 3}
m_Name: UniversalRPAsset
m_Name: UniversalRP-HighQuality
m_EditorClassIdentifier:
k_AssetVersion: 5
k_AssetPreviousVersion: 5

- {fileID: 11400000, guid: 86cfa956de1e1ca4f8d5f126f8f52b35, type: 2}
- {fileID: 11400000, guid: 4a8e21d5c33334b11b34a596161b9360, type: 2}
m_DefaultRendererIndex: 0
m_RequireDepthTexture: 0
m_RequireOpaqueTexture: 0

m_SupportsDynamicBatching: 0
m_MixedLightingSupported: 1
m_DebugLevel: 0
m_PostProcessingFeatureSet: 0
m_ColorGradingMode: 0
m_ColorGradingLutSize: 32
m_ShadowType: 1

9
TestProjects/PerceptionURP/Packages/manifest.json


"dependencies": {
"com.unity.collab-proxy": "1.2.16",
"com.unity.ide.rider": "1.1.4",
"com.unity.ide.vscode": "1.2.3",
"com.unity.ide.vscode": "1.2.0",
"com.unity.simulation.capture": "0.0.10-preview.16",
"com.unity.simulation.core": "0.0.10-preview.21",
"com.unity.test-framework": "1.1.19",
"com.unity.simulation.capture": "0.0.10-preview.6",
"com.unity.simulation.core": "0.0.10-preview.8",
"com.unity.test-framework": "1.1.13",
"nuget.moq": "1.0.0",
"com.unity.modules.ai": "1.0.0",
"com.unity.modules.androidjni": "1.0.0",
"com.unity.modules.animation": "1.0.0",

2
TestProjects/PerceptionURP/ProjectSettings/EditorSettings.asset


inProgressEnabled: 1
m_EnableTextureStreamingInEditMode: 1
m_EnableTextureStreamingInPlayMode: 1
m_AsyncShaderCompilation: 0
m_AsyncShaderCompilation: 1
m_EnterPlayModeOptionsEnabled: 0
m_EnterPlayModeOptions: 3
m_ShowLightmapResolutionOverlay: 1

2
TestProjects/PerceptionURP/ProjectSettings/GraphicsSettings.asset


m_PreloadedShaders: []
m_SpritesDefaultMaterial: {fileID: 10754, guid: 0000000000000000f000000000000000,
type: 0}
m_CustomRenderPipeline: {fileID: 11400000, guid: a6b7f179d262ade4fb856c528a71db82,
m_CustomRenderPipeline: {fileID: 11400000, guid: 19ba41d7c0026c3459d37c2fe90c55a0,
type: 2}
m_TransparencySortMode: 0
m_TransparencySortAxis: {x: 0, y: 0, z: 1}

49
TestProjects/PerceptionURP/ProjectSettings/ProjectSettings.asset


defaultCursor: {fileID: 0}
cursorHotspot: {x: 0, y: 0}
m_SplashScreenBackgroundColor: {r: 0.13725491, g: 0.12156863, b: 0.1254902, a: 1}
m_ShowUnitySplashScreen: 0
m_ShowUnitySplashScreen: 1
m_ShowUnitySplashLogo: 1
m_SplashScreenOverlayOpacity: 1
m_SplashScreenAnimation: 1

bakeCollisionMeshes: 0
forceSingleInstance: 0
useFlipModelSwapchain: 1
resizableWindow: 1
resizableWindow: 0
useMacAppStoreValidation: 0
macAppStoreCategory: public.app-category.games
gpuSkinning: 1

xboxEnableFitness: 0
visibleInBackground: 1
allowFullscreenSwitch: 1
fullscreenMode: 3
fullscreenMode: 1
xboxSpeechDB: 0
xboxEnableHeadOrientation: 0
xboxEnableGuest: 0

switchNVNShaderPoolsGranularity: 33554432
switchNVNDefaultPoolsGranularity: 16777216
switchNVNOtherPoolsGranularity: 16777216
switchNVNMaxPublicTextureIDCount: 0
switchNVNMaxPublicSamplerIDCount: 0
stadiaPresentMode: 0
stadiaTargetFramerate: 0
vulkanEnableLateAcquireNextImage: 0
m_SupportedAspectRatios:
4:3: 1
5:4: 1

AndroidMinSdkVersion: 19
AndroidTargetSdkVersion: 0
AndroidPreferredInstallLocation: 1
aotOptions: nimt-trampolines=1024
aotOptions:
stripEngineCode: 1
iPhoneStrippingLevel: 0
iPhoneScriptCallOptimization: 0

uIStatusBarHidden: 1
uIExitOnSuspend: 0
uIStatusBarStyle: 0
iPhoneSplashScreen: {fileID: 0}
iPhoneHighResSplashScreen: {fileID: 0}
iPhoneTallHighResSplashScreen: {fileID: 0}
iPhone47inSplashScreen: {fileID: 0}
iPhone55inPortraitSplashScreen: {fileID: 0}
iPhone55inLandscapeSplashScreen: {fileID: 0}
iPhone58inPortraitSplashScreen: {fileID: 0}
iPhone58inLandscapeSplashScreen: {fileID: 0}
iPadPortraitSplashScreen: {fileID: 0}
iPadHighResPortraitSplashScreen: {fileID: 0}
iPadLandscapeSplashScreen: {fileID: 0}
iPadHighResLandscapeSplashScreen: {fileID: 0}
iPhone65inPortraitSplashScreen: {fileID: 0}
iPhone65inLandscapeSplashScreen: {fileID: 0}
iPhone61inPortraitSplashScreen: {fileID: 0}
iPhone61inLandscapeSplashScreen: {fileID: 0}
appleTVSplashScreen: {fileID: 0}
appleTVSplashScreen2x: {fileID: 0}
tvOSSmallIconLayers: []

metalEditorSupport: 1
metalAPIValidation: 1
iOSRenderExtraFrameOnPause: 0
iosCopyPluginsCodeInsteadOfSymlink: 0
appleDeveloperTeamID:
iOSManualSigningProvisioningProfileID:
tvOSManualSigningProvisioningProfileID:

m_Automatic: 0
- m_BuildTarget: WebGLSupport
m_APIs: 0b000000
m_Automatic: 0
- m_BuildTarget: WindowsStandaloneSupport
m_APIs: 02000000
m_Automatic: 0
m_BuildTargetVRSettings: []
openGLRequireES31: 0

ps4ShareFilePath:
ps4ShareOverlayImagePath:
ps4PrivacyGuardImagePath:
ps4ExtraSceSysFile:
ps4NPtitleDatPath:
ps4RemotePlayKeyAssignment: -1
ps4RemotePlayKeyMappingDir:

ps4UseResolutionFallback: 0
ps4ReprojectionSupport: 0
ps4UseAudio3dBackend: 0
ps4UseLowGarlicFragmentationMode: 1
ps4SocialScreenEnabled: 0
ps4ScriptOptimizationLevel: 0
ps4Audio3dVirtualSpeakerCount: 14

ps4disableAutoHideSplash: 0
ps4videoRecordingFeaturesUsed: 0
ps4contentSearchFeaturesUsed: 0
ps4CompatibilityPS5: 0
ps4GPU800MHz: 1
ps4attribEyeToEyeDistanceSettingVR: 0
ps4IncludedModules: []
ps4attribVROutputEnabled: 0

webGLWasmStreaming: 0
scriptingDefineSymbols: {}
platformArchitecture: {}
scriptingBackend:
Standalone: 0
scriptingBackend: {}
il2cppCompilerConfiguration: {}
managedStrippingLevel: {}
incrementalIl2cppBuild: {}

gcIncremental: 0
assemblyVersionValidation: 1
gcWBarrierValidation: 0
apiCompatibilityLevelPerPlatform: {}
m_RenderingPath: 1

XboxOneCapability: []
XboxOneGameRating: {}
XboxOneIsContentPackage: 0
XboxOneEnhancedXboxCompatibilityMode: 0
XboxOneEnableGPUVariability: 1
XboxOneSockets: {}
XboxOneSplashScreen: {fileID: 0}

XboxOneOverrideIdentityName:
XboxOneOverrideIdentityPublisher:
vrEditorSettings:
daydream:
daydreamIconForeground: {fileID: 0}

m_VersionCode: 1
m_VersionName:
apiCompatibilityLevel: 6
cloudProjectId: 7f560aa0-0da3-47a1-88e7-3884d7922bdd
cloudProjectId:
projectName: New Unity Project
organizationId: jonhunity
projectName:
organizationId:
cloudEnabled: 0
enableNativePlatformBackendsForNewInputSystem: 0
disableOldInputManagerSupport: 0

4
TestProjects/PerceptionURP/ProjectSettings/UnityConnectSettings.asset


UnityConnectSettings:
m_ObjectHideFlags: 0
serializedVersion: 1
m_Enabled: 1
m_Enabled: 0
m_TestMode: 0
m_EventOldUrl: https://api.uca.cloud.unity3d.com/v1/events
m_EventUrl: https://cdp.cloud.unity3d.com/v1/events

m_EventUrl: https://perf-events.cloud.unity3d.com
m_Enabled: 1
m_Enabled: 0
m_LogBufferSize: 10
m_CaptureEditorExceptions: 1
UnityPurchasingSettings:

4
TestProjects/PerceptionURP/ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.4.19f1
m_EditorVersionWithRevision: 2019.4.19f1 (ca5b14067cec)
m_EditorVersion: 2019.3.13f1
m_EditorVersionWithRevision: 2019.3.13f1 (d4ddf0d95db9)

414
com.unity.perception/CHANGELOG.md


# Changelog
## Unreleased
### Upgrade Notes
### Known Issues
### Added
Added support for 'step' button in editor.
Added random seed field to the Run in Unity Simulation Window
User can now choose the base folder location to store their generated data.
### Changed
Increased color variety in instance segmentation images
The PoissonDiskSampling utility now samples a larger region of points to then crop to size of the intended region to prevent edge case bias.
Upgraded capture package dependency to 0.0.10-preview.22 to fix an issue with URP where post processing effects were not included when capturing images.
Changed the JSON serialization key of Normal Sampler's standard deviation property from "standardDeviation" to "stddev". Scneario JSON configurations that were generated using previous versions will need to be manually updated to reflect this change.
### Deprecated
### Removed
### Fixed
Fixed keypoint labeling bug when visualizations are disabled.
Fixed an issue where Simulation Delta Time values larger than 100 seconds (in Perception Camera) would cause incorrect capture scheduling behavior.
Fixed an issue where Categorical Parameters sometimes tried to fetch items at `i = categories.Count`, which caused an exception.
## [0.8.0-preview.3] - 2021-03-24
### Changed
Expanded documentation on the Keypoint Labeler
Updated Keypoint Labeler logic to only report keypoints for visible objects by default
Increased color variety in instance segmentation images
### Fixed
Fixed compiler warnings in projects with HDRP on 2020.1 and later
Fixed a bug in the Normal Sampler where it would return values less than the passed in minimum value, or greater than the passed in maximum value, for random values very close to 0 or 1 respectively.
## [0.8.0-preview.2] - 2021-03-15
### Upgrade Notes
All appearances of the term `KeyPoint` have been renamed to `Keypoint`. If you have code that relies on any renamed types or names, make sure to alter your code to reflect the new names.
`ScenarioBase`'s `Awake()`, `Start()`, and `Update()` functions are now private. If you previously used these, replace the usages with `OnAwake()`, `OnStart()`, and `OnUpdate()`.
The interface `IGroundTruthGenerator` now contains a new method named `ClearMaterialProperties` for disabling ground truth generation on a `Labeling` component or its associated `MaterialPropertyBlock`. Update your implementing classes to including this method.
### Known Issues
### Added
Added error message when missing Randomizer scripts are detected.
Scenario serialization has been updated to include scalar values on Randomizers and Parameters.
Added new `ScenarioBase` virtual lifecycle hooks: `OnAwake()`, `OnStart()`, `OnUpdate()`, `OnComplete()`, and `OnIdle()`.
Keypoint occlusion has been added. No keypoint information will be recorded for a labeled asset completely out of the camera's frustum.
New keypoint tests have been added to test keypoint states.
The color of keypoints and connections are now reported in the annotation definition JSON file for keypoint templates.
The `PerceptionScenario` abstract class has been added to abstract perception data capture specific functionality from the vanilla Scenario lifecycle.
The newly added `LabelManager` class now enables custom Labelers to access the list of registered `Labeling` Components present in the Scene.
Improved UI for `KeypointTemplate` and added useful default colors for keypoint and skeleton definitions.
Added the ability to switch ground truth generation on or off for an object at runtime by enabling or disabling its `Labeling` component. A new method named `ClearMaterialProperties()` in `IGroundTruthGenerator` handles this functionality.
### Changed
Renamed all appearances of the term `KeyPoint` within types and names to `Keypoint`.
ScenarioBase's `Awake()`, `Start()`, and `Update()` methods are now private. The newly added virtual lifecycle hooks are to be used as replacements.
Improved _Run in Unity Simulation_ window UI.
The _Run in Unity Simulation_ window now accepts an optional Scenario JSON configuration to override existing Scenario editor UI settings.
The `GetRandomizer()` and `CreateRandomizer()` methods of `ScenarioBase` have been augmented or replaced with more generic list index style accessors.
The Scenario inspector buttons for serialization and deserialization have been refactored to open a file explorer so that the user can choose where to save the generated JSON configuration or which file to import a configuration from.
RandomizerTags now use `OnEnable()` and `OnDisable()` to manage their lifecycle. This allows the user to toggle them on and off in the editor.
Upgraded `com.unity.simulation.capture` package dependency to integrate new changes that prevent the API updater from looping infinitely when opening the project settings window on new URP projects.
`CameraLabeler` methods `OnBeginRendering()` and `OnEndRendering()` now have an added `ScriptableRenderContext` parameter.
### Deprecated
The Randomizer methods `OnCreate()`, `OnStartRunning()`, and `OnStopRunning()` are now deprecated and have been replaced with `OnAwake()`, `OnEnable()` and `OnDisable()` respectively, so as to better reflect the existing MonoBehaviour lifecycle methods.
### Removed
Removed the Entities package dependency.
### Fixed
Fixed a null reference error that appeared when adding options to Categorical Parameters.
Fixed ground truth not properly being produced when there are other disabled PerceptionCameras present. Note: this does not yet add support for multiple enabled PerceptionCameras.
Fixed an exception when rendering inspector for Randomizers with private serialized fields.
Fixed an issue preventing the user from adding more options to a Categorical Parameter's list of options with the _Add Folder_ button. _Add Folder_ now correctly appends the contents of the new folder to the existing list.
Fixed a bug where uniform probabilities were not properly reset upon adding or removing options from a Categorical Parameter's list of options.
Fixed keypoints being reported in wrong locations on the first frame in which an object is visible.
Fixed an out of range error that occurred when a keypoint template skeleton relied on a joint that was not available.
Fixed wrong labels on 2d bounding boxes when all labeled objects are deleted in a frame.
## [0.7.0-preview.2] - 2021-02-08
### Upgrade Notes
### Known Issues
### Added
Added Register() and Unregister() methods to the RandomizerTag API so users can implement RandomizerTag compatible GameObject caching
### Changed
Switched accessibility of scenario MonoBehaviour lifecycle functions (Awake, Start, Update) from private to protected to enable users to define their own overrides when deriving the Scenario class.
The GameObjectOneWayCache has been made public for users to cache GameObjects within their own custom Randomizers.
### Deprecated
### Removed
### Fixed
Fixed the math offsetting the iteration index of each Unity Simulation instance directly after they deserialize their app-params.
The RandomizerTagManager now uses a LinkedHashSet data structure to register tags to preserve insertion order determinism in Unity Simulation.
GameObjectOneWayCache now correctly registers and unregisters RandomizerTags on cached GameObjects.
## [0.7.0-preview.1] - 2021-02-01
### Upgrade Notes
#### Randomization Namespace Change
The Randomization toolset has been moved out of the Experimental namespace. After upgrading to this version of the Perception package, please follow these steps:
* Replace all references to `UnityEngine.Experimental.Perception.Randomization` with `UnityEngine.Perception.Randomization` in your C# code.
* Open your Unity Scene file in a text editor and replace all mentions of `UnityEngine.Experimental.Perception.Randomization` with `UnityEngine.Perception.Randomization`, and save the file.
#### Random Seed Generation
Replace usages of `ScenarioBase.GenerateRandomSeed()` with `SamplerState.NextRandomState()` in your custom Randomizer code.
#### Sampler Ranges
Before upgrading a project to this version of the Perception package, make sure to keep a record of **all sampler ranges** in your added Randomizers. Due to a change in how sampler ranges are serialized, **after upgrading to this version, ranges for all stock Perception samplers (Uniform and Normal Samplers) will be reset**, and will need to be manually reverted by the user.
#### Tag Querying
The `RandomizerTagManager.Query<T>` function now returns the tag object itself instead of the GameObject it is attached to. You will need to slightly modify your custom Randomizers to accommodate this change. Please refer to the included sample Randomizers as examples.
### Known Issues
The bounding box 3D labeler does not work with labeled assets that utilize a skinned mesh renderer. These are commonly used with animated models.
### Added
Added keypoint ground truth labeling
Added animation randomization
Added ScenarioConstants base class for all scenario constants objects
Added ScenarioBase.SerializeToConfigFile()
Randomizer tags now support inheritance
Added AnimationCurveSampler, which returns random values according to a range and probability distribution denoted by a user provided AnimationCurve.
Added ParameterUIElementsEditor class to allow custom ScriptableObjects and MonoBehaviours to render Parameter and Sampler typed public fields correctly in their inspector windows.
Added new capture options to Perception Camera:
* Can now render intermediate frames between captures.
* Capture can now be triggered manually using a function call, instead of automatic capturing on a schedule.
Added 3D bounding box visualizer
Categorical Parameters will now validate that their specified options are unique at runtime.
### Changed
Randomizers now access their parent scenario through the static activeScenario property.
Unique seeds per Sampler have been replaced with one global random seed configured via the ScenarioConstants of a Scenario
Samplers now derive their random state from the static SamplerState class instead of individual scenarios to allow parameters and samplers to be used outside of the context of a scenario
Replaced ScenarioBase.GenerateRandomSeed() with SamplerState.NextRandomState() and SamplerState.CreateGenerator()
ScenarioBase.Serialize() now directly returns the serialized scenario configuration JSON string instead of writing directly to a file (use SerializeToConfigFile() instead)
ScenarioBase.Serialize() now not only serializes scenario constants, but also all sampler member fields on randomizers attached to the scenario
RandomizerTagManager.Query<T>() now returns RandomizerTags directly instead of the GameObjects attached to said tags
Semantic Segmentation Labeler now places data in folders with randomized filenames.
The uniform toggle on Categorical Parameters will now reset the Parameter's probability weights to be uniform.
Reorganized Perception MonoBehaviour paths within the AddComponentMenu.
Upgraded the Unity Simulation Capture package dependency to 0.0.10-preview.18 and Unity Simulation Core to 0.0.10-preview.22
### Deprecated
### Removed
Removed ScenarioBase.GenerateRandomSeedFromIndex()
Removed native sampling (through jobs) capability from all samplers and parameters as it introduced additional complexity to the code and was not a common usage pattern
Removed `range` as a required ISampler interface property.
Removed randomization tooling from the "Experimental" namespace
### Fixed
Fixed an issue where the overlay panel would display a full screen semi-transparent image over the entire screen when the overlay panel is disabled in the UI
Fixed a bug in instance segmentation labeler that erroneously logged that object ID 255 was not supported
Fixed the simulation stopping while the editor/player is not focused
Fixed memory leak or crash occurring at the end of long simulations when using BackgroundObjectPlacementRandomizer or ForegroundObjectPlacementRandomizer
Randomizer.OnCreate() is no longer called in edit-mode when adding a randomizer to a scenario
Fixed a bug where removing all randomizers from a scenario caused the randomizer container UI element to overflow over the end of Scenario component UI
Semantic Segmentation Labeler now produces output in the proper form for distributed data generation on Unity Simulation by placing output in randomized directory names
Texture Randomizer is now compatible with HDRP.
Categorical Parameters no longer produce errors when deleting items from long options lists.
Parameter, ISampler, and non-generic Sampler class UIs now render properly in MonoBehaviours and ScriptableObjects.
Fixed an issue in the perception tutorial sample assets where upon the editor being first opened, and a user generates a dataset by clicking the play button, the first generated image has duplicated textures and hue offsets for all background objects. Enabling the "GPU instancing" boolean in the tutorial's sample material's inspector fixed this issue.
## [0.6.0-preview.1] - 2020-12-03
## [Unreleased]
Added support for labeling Terrain objects. Trees and details are not labeled but will occlude other objects.
Added analytics for Unity Simulation runs
Added instance segmentation labeler.
Added support for full screen visual overlays and overlay manager.
All-new editor interface for the Labeling component and Label Configuration assets. The new UI improves upon various parts of the label specification and configuration workflow, making it more efficient and less error-prone to setup a new Perception project.
Added Assets->Perception menu for current and future asset preparation and validation tools. Currently contains one function which lets the user create prefabs out of multiple selected models with one click, removing the need for going through all models individually.
Updated dependencies to com.unity.simulation.capture:0.0.10-preview.14, com.unity.simulation.core:0.0.10-preview.20, and com.unity.burst:1.3.9.
Changed InstanceSegmentationImageReadback event to provide a NativeArray\<Color32\> instead of NativeArray\<uint\>.
Expanded all Unity Simulation references from USim to Unity Simulation.
Uniform and Normal samplers now serialize their random seeds.
The ScenarioBase's GenerateIterativeRandomSeed() method has been renamed to GenerateRandomSeedFromIndex().
### Deprecated
### Removed

UnitySimulationScenario now correctly deserializes app-params before offsetting the current scenario iteration when executing on Unity Simulation.
Fixed Unity Simulation nodes generating one extra empty image before generating their share of the randomization scenario iterations.
Fixed enumeration in the CategoricalParameter.categories property.
The GenerateRandomSeedFromIndex method now correctly hashes the current scenario iteration into the random seed it generates.
Corrupted .meta files have been rebuilt and replaced.
The Randomizer list inspector UI now updates appropriately when a user clicks undo.
## [0.5.0-preview.1] - 2020-10-14
### Known Issues
Creating a new 2020.1.x project and adding the perception package to the project causes a memory error that is a [known issue in 2020.1 editors](https://issuetracker.unity3d.com/issues/wild-memory-leaks-leading-to-stackallocator-walkallocations-crashes). Users can remedy this issue by closing and reopening the editor.
### Added
Added Randomizers and RandomizerTags
Added support for generating 3D bounding box ground truth data
### Changed
### Deprecated
### Removed
Removed ParameterConfigurations (replaced with Randomizers)
### Fixed
Fixed visualization issue where object count and pixel count labelers were shown stale values
Fixed visualization issue where HUD entry labels could be too long and take up the entire panel
## [0.4.0-preview.1] - 2020-08-07
### Added
Added new experimental randomization tools
Added support for 2020.1
Added Labeling.RefreshLabeling(), which can be used to update ground truth generators after the list of labels or the renderers is changed
Added support for renderers with MaterialPropertyBlocks assigned to individual materials
### Changed
Changed the way realtime visualizers rendered to avoid rendering conflicts
Changed default labeler ids to be lower-case to be consistent with the ids in the dataset
Switched to latest versions of com.unity.simulation.core and com.unity.simulation.capture
### Deprecated
### Removed
### Fixed
Fixed 2d bounding boxes being reported for objects that do not match the label config.
Fixed a categorical parameter UI error in which deleting an individual option would successfully remove the option from the UI but only serialize the option to null during serialization instead of removing it
Fixed the "Application Frequency" parameter UI field not initializing to a default value
Fixed the IterateSeed() method where certain combinations of indices and random seeds would produce a random state value of zero, causing Unity.Mathematics.Random to throw an exception
Fixed labeler editor to allow for editing multiple labelers at a time
Fixed labeler editor to ensure that when duplicating prefabs all labeler entries are also duplicated
Fixed colors in semantic segmentation images being darker than those specified in the label config
Fixed objects being incorrectly labeled when they do not match any entries in the label config
Fixed lens distortion in URP and HDRP now being applied to ground truth
## [0.3.0-preview.1] - 2020-08-07
### Added
Added realtime visualization capability to the perception package.
Added visualizers for built-in labelers: Semantic Segmentation, 2D Bounding Boxes, Object Count, and Rendered Object Info.
Added references to example projects in manual.
Added notification when an HDRP project is in Deferred Only mode, which is not supported by the labelers.
### Changed
Updated to com.unity.simulation.capture version 0.0.10-preview.10 and com.unity.simulation.core version 0.0.10-preview.17
Changed minimum Unity Editor version to 2019.4
### Fixed
Fixed compilation warnings with latest com.unity.simulation.core package.
Fixed errors in example script when exiting play mode
## [0.2.0-preview.2] - 2020-07-15
### Fixed
Fixed bug that prevented RGB captures to be written out to disk
Fixed compatibility with com.unity.simulation.capture@0.0.10-preview.8
## [0.2.0-preview.1] - 2020-07-02
### Added
Added CameraLabeler, an extensible base type for all forms of dataset output from a camera.
Added LabelConfig\<T\>, a base class for mapping labels to data used by a labeler. There are two new derived types - ID label config and semantic segmentation label config.
### Changed
Moved the various forms of ground truth from PerceptionCamera into various subclasses of CameraLabeler.
Renamed SimulationManager to DatasetCapture.
Changed Semantic Segmentation to take a SemanticSegmentationLabelConfig, which maps labels to color pixel values.
## [0.1.0] - 2020-06-24
### This is the first release of the _Perception_ package

173
com.unity.perception/Documentation~/Schema/Synthetic_Dataset_Schema.md


y: <float> -- y coordinate of the upper left corner.
width: <float> -- number of pixels in the x direction
height: <float> -- number of pixels in the y direction
}
##### instance segmentation - color image
A color png file that stores instance ids as a color value per pixel. The png files are located in the "filename" location.
<!-- Not yet implemented annotations
##### instance segmentation - polygon
A json object that stores collections of polygons. Each polygon record maps a tuple of (instance, label) to a list of
K pixel coordinates that forms a polygon. This object can be directly stored in annotation.values
instance_segmentation {
instance_id: <int> -- The instance ID of the labeled object
color { -- The pixel color that correlates with the instance ID
r: <int> -- The red value of the pixel between 0 and 255
g: <int> -- The green value of the pixel between 0 and 255
b: <int> -- The blue value of the pixel between 0 and 255
a: <int> -- The alpha value of the pixel between 0 and 255
}
semantic_segmentation_polygon {
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
polygon: [<int, int>,...] -- List of points in pixel coordinates of the outer edge. Connecting these points in order should create a polygon that identifies the object.
-->
##### 3D bounding box - json file
A json file that stored 3D bounding box information. Unlike the 2D bounding box, 3D bounding boxes coordinates are captured in **sensor coordinate system**.
<!-- Not yet implemented annotations
##### 3D bounding box
A json file that stored collections of 3D bounding boxes.
All location data is given with respect to the **sensor coordinate system**.
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
translation { -- 3d bounding box's center location in meters with respect to the sensor's coordinate system
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
size { -- 3d bounding box size in meters
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
rotation { -- 3d bounding box orientation as quaternion: w, x, y, z.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
w: <float> -- The w coordinate
}
velocity { -- [Optional] 3d bounding box velocity in meters per second.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
acceleration { -- [Optional] 3d bounding box acceleration in meters per second^2.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
translation: <float, float, float> -- 3d bounding box's center location in meters as center_x, center_y, center_z with respect to global coordinate system.
size: <float, float, float> -- 3d bounding box size in meters as width, length, height.
rotation: <float, float, float, float> -- 3d bounding box orientation as quaternion: w, x, y, z.
velocity: <float, float, float> -- 3d bounding box velocity in meters per second as v_x, v_y, v_z.
acceleration: <float, float, float> [optional] -- 3d bounding box acceleration in meters per second^2 as a_x, a_y, a_z.
##### Keypoints - json file
#### instances (V2, WIP)
Although we don’t have a specific table that account for instances, it should be noted that instances should be checked against the following cases
* Consider cases for object tracking
* Consider cases not used for object tracking, so that instances do not need to be consistent across different captures/annotations.
How to support instance segmentation (maybe we need to use polygon instead of pixel color)
* Stored in values of annotation and metric values
Keypoint data, commonly used for human pose estimation. A keypoint capture is associated to a template that defines the keypoints (see annotation.definition file).
Each keypoint record maps a tuple of (instance, label) to template, pose, and an array of keypoints. A keypoint will exist in this record for each keypoint defined in the template file.
If a given keypoint doesn't exist in the labeled gameobject, then that keypoint will have a state value of 0; if it exists but is not visible, it will have a state value of 1,
if it exists and is visible it will have a state value of 2.
```
keypoints {
label_id: <int> -- Integer identifier of the label
instance_id: <str> -- UUID of the instance.
template_guid: <str> -- UUID of the keypoint template
pose: <str> -- Pose ground truth information
keypoints [ -- Array of keypoint data, one entry for each keypoint defined in associated template file.
{
index: <int> -- Index of keypoint in template
x: <float> -- X pixel coordinate of keypoint
y: <float> -- Y pixel coordinate of keypoint
state: <int> -- 0: keypoint does not exist, 1 keypoint exists but is not visible, 2 keypoint exists and is visible
}, ...
]
}
```
##### instance segmentation file - grayscale image (V2)
A grayscale PNG file that stores integer values of labeled instances at each pixel.
![image alt text](image_4.png)
-->
### metrics

Typically, the `spec` key describes all labels_id and label_name used by the annotation.
Some special cases like semantic segmentation might assign additional values (e.g. pixel value) to record the mapping between label_id/label_name and pixel color in the annotated PNG files.
##### annotation definition header
id: <int> -- Integer identifier of the annotation definition.
name: <str> -- Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
description: <str> -- [Optional] Description of this annotation specifications.
format: <str> -- The format of the annotation files. (e.g. png, json, etc.)
spec: [<obj>...] -- Format-specific specification for the annotation values (ex. label-value mappings for semantic segmentation images)
}
```
##### semantic segmantation
Annotation spec for semantic [segmantation labeler](#semantic-segmantation---grayscale-image)
```
annotation_definition.spec {
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
pixel_value: <int> -- Grayscale pixel value
color_pixel_value: <int, int, int> -- [Optional] Color pixel value
id: <int> -- Integer identifier of the annotation definition.
name: <str> -- Human readable annotation spec name (e.g. semantic_segmentation, instance_segmentation, etc.)
description: <str, optional> -- Description of this annotation specifications.
format: <str> -- The format of the annotation files. (e.g. png, json, etc.)
spec: [<obj>...] -- Format-specific specification for the annotation values (ex. label-value mappings for semantic segmentation images)
```
##### label enumeration spec
This spec is used for annotations like [bounding box 2d](#2d-bounding-box). This might be a subset of all labels used in simulation.
```
# semantic segmentation
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
pixel_value: <int> -- Grayscale pixel value
color_pixel_value: <int, int, int> [optional] -- Color pixel value
```
##### keypoint template
keypoint templates are used to define the keypoints and skeletal connections captured by the [keypoint labeler](#keypoints).
```
# label enumeration spec, used for annotations like bounding box 2d. This might be a subset of all labels used in simulation.
template_id: <str> -- The UUID of the template
template_name: <str> -- Human readable name of the template
key_points [ -- Array of joints defined in this template
{
label: <str> -- The label of the joint
index: <int> -- The index of the joint
color { -- [Optional] The color to use for the visualization of the keypoint
r: <float> -- Value from 0 to 1 for the red channel
g: <float> -- Value from 0 to 1 for the green channel
b: <float> -- Value from 0 to 1 for the blue channel
a: <float> -- Value from 0 to 1 for the alpha channel
}
}, ...
]
skeleton [ -- Array of skeletal connections (which joints have connections between one another) defined in this template
{
joint1: <int> -- The first joint of the connection
joint2: <int> -- The second joint of the connection
color { -- [Optional] The color to use for the visualization of the bone
r: <float> -- Value from 0 to 1 for the red channel
g: <float> -- Value from 0 to 1 for the green channel
b: <float> -- Value from 0 to 1 for the blue channel
a: <float> -- Value from 0 to 1 for the alpha channel
}
}, ...
]
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
}
```

35
com.unity.perception/Documentation~/SetupSteps.md


# Installing the Perception package in your project
# Setup for local development
* Clone the [Perception](https://github.com/Unity-Technologies/com.unity.perception) repository
* Install and use Unity latest [2019.3 Unity editor](https://unity.com/releases/2019-3)
![ReleaseBadge](https://badge-proxy.cds.internal.unity3d.com/5ab9a162-9dd0-4ba1-ba41-cf25378a927a)
## Setting up a Project
Below are two options for getting started using the Perception package. Option 1 is opening existing test projects in the repository. Option 2 new Unity project and integrate the Perception package.
This page provides brief instructions on installing the Perception package. Head over to the [Perception Tutorial](Tutorial/TUTORIAL.md) for more detailed instructions and steps for building a sample project.
### Option 1: PerceptionHDRP & PerceptionURP Projects
The repository includes two projects for local development in `TestProjects` folder, one set up for HDRP and the other for URP. You can open these with the Unity
editor you installed in Setup instructions.
<img src="images/TestProjects.PNG" align="middle"/>
1. Install the latest version of **2020.2.x** Unity Editor from [here](https://unity3d.com/get-unity/download/archive). (The Perception package has not been fully tested on newer Unity versions)
1. Create a new HDRP or URP project, or open an existing project.
1. Open `Window` -> `Package Manager`
1. In the Package Manager window find and click the ***+*** button in the upper lefthand corner of the window
1. Select ***Add package from git URL...***
1. Enter `com.unity.perception` and click ***Add***
### Option 2: Create a new Project
These option is walkthrough in creating a new project, then adding the Perception SDK package to the project for development use.
*The following instructions reference the Unity doc's page on [installing a local package](https://docs.unity3d.com/Manual/upm-ui-local.html)*
Note that although the Perception package is compatible with both URP and HDRP, Unity Simulation currently only supports URP projects, therefore a URP project is recommended.
If you want a specific version of the package, append the version to the end of the "git URL". Ex. `com.unity.perception@0.8.0-preview.1`
#### Create a new project
1. Create a new HDRP project or open an existing project
1. Creating anew HDRP project can be done by creating a new project using the HDRP template
2. Back in Unity editor, got Window -> Package Manager
1. Add the High Definition RP package, version 7.1.2 or later from the packages list
2. In the Package Manager window find and click the ***+*** button in the upper lefthand corner of the window
3. Select the ***add package from disk*** option
4. Navigate to the com.unity.perception folder in your cloned repository and select the package.json file
3. Once you have a project with Perception SDK installed you can move forward to the Getting Started walkthrough
To install from a local clone of the repository, see [installing a local package](https://docs.unity3d.com/Manual/upm-ui-local.html) in the Unity manual.
Once completed you can move on to the getting started steps, click [here](Documentation~/GettingStarted.md) to start project setup.

125
com.unity.perception/Documentation~/images/LabelingConfigurationFinished.PNG

之前 之后
宽度: 702  |  高度: 249  |  大小: 15 KiB

366
com.unity.perception/Documentation~/images/MainCameraConfig.PNG

之前 之后
宽度: 707  |  高度: 589  |  大小: 56 KiB

28
com.unity.perception/Editor/Unity.Perception.Editor.asmdef


{
"name": "Unity.Perception.Editor",
"references": [
"Unity.Collections",
"Unity.Entities",
"Unity.Mathematics",
"Unity.Perception.Runtime",
"Unity.RenderPipelines.HighDefinition.Editor",
"Unity.Simulation.Client.Editor",
"UnityEngine.UI"
"Unity.RenderPipelines.HighDefinition.Editor",
"Unity.Mathematics",
"Unity.Entities",
"Unity.Collections",
"Unity.Perception.Runtime",
"PathCreatorEditor",
"PathCreator"
],
"includePlatforms": [
"Editor"

"overrideReferences": true,
"precompiledReferences": [
"Newtonsoft.Json.dll",
"ZipUtility.dll"
],
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": [],
"versionDefines": [

"define": "HDRP_PRESENT"
},
{
"name": "com.unity.render-pipelines.universal",
"expression": "",
"define": "URP_PRESENT"
}
}

3
com.unity.perception/Editor/GroundTruth/InstanceSegmentationPassEditor.cs


#if HDRP_PRESENT
using UnityEditor.Rendering.HighDefinition;
using UnityEngine.Perception.GroundTruth;

{
protected override void Initialize(SerializedProperty customPass)
{
var targetCameraProperty = customPass.FindPropertyRelative(nameof(InstanceSegmentationPass.targetCamera));
var targetCameraProperty = customPass.FindPropertyRelative(nameof(GroundTruthPass.targetCamera));
AddProperty(targetCameraProperty);
AddProperty(customPass.FindPropertyRelative(nameof(InstanceSegmentationPass.targetTexture)));
base.Initialize(customPass);

2
com.unity.perception/Editor/GroundTruth/LabelingConfigurationEditor.cs.meta


fileFormatVersion: 2
guid: 43cb2a3117353435abe59ca5217974a8
guid: 910dd3186e1c4fad8eb6aca9b9ee0f48
timeCreated: 1585940009

934
com.unity.perception/Editor/GroundTruth/LabelingEditor.cs


using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEditor.UIElements;
using UnityEditorInternal;
using UnityEngine.UIElements;
using Button = UnityEngine.UIElements.Button;
using Toggle = UnityEngine.UIElements.Toggle;
[CustomEditor(typeof(Labeling)), CanEditMultipleObjects]
[CustomEditor(typeof(Labeling))]
VisualElement m_Root;
VisualElement m_ManualLabelingContainer;
VisualElement m_AutoLabelingContainer;
VisualElement m_FromLabelConfigsContainer;
VisualElement m_SuggestedLabelsContainer;
VisualElement m_SuggestedOnNamePanel;
VisualElement m_SuggestedOnPathPanel;
ListView m_CurrentLabelsListView;
ListView m_SuggestedLabelsListViewFromName;
ListView m_SuggestedLabelsListViewFromPath;
ScrollView m_LabelConfigsScrollView;
PopupField<string> m_LabelingSchemesPopup;
Button m_AddButton;
Button m_AddAutoLabelToConfButton;
Toggle m_AutoLabelingToggle;
Label m_CurrentAutoLabel;
Label m_CurrentAutoLabelTitle;
Label m_AddManualLabelsTitle;
Labeling m_Labeling;
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
string m_UxmlPath;
List<string> m_SuggestedLabelsBasedOnName = new List<string>();
List<string> m_SuggestedLabelsBasedOnPath = new List<string>();
public List<string> CommonLabels { get; private set; } = new List<string>();
List<Type> m_LabelConfigTypes;
readonly List<ScriptableObject> m_AllLabelConfigsInProject = new List<ScriptableObject>();
readonly List<AssetLabelingScheme> m_LabelingSchemes = new List<AssetLabelingScheme>();
/// <summary>
/// List of separator characters used for parsing asset names for auto labeling or label suggestion purposes
/// </summary>
public static readonly string[] NameSeparators = {".", "-", "_"};
/// <summary>
/// List of separator characters used for parsing asset paths for auto labeling or label suggestion purposes
/// </summary>
public static readonly string[] PathSeparators = {"/"};
void OnEnable()
{
m_LabelConfigTypes = AddToConfigWindow.FindAllSubTypes(typeof(LabelConfig<>));
var mySerializedObject = new SerializedObject(serializedObject.targetObjects[0]);
m_Labeling = mySerializedObject.targetObject as Labeling;
m_UxmlPath = m_UxmlDir + "Labeling_Main.uxml";
m_Root = AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(m_UxmlPath).CloneTree();
m_CurrentLabelsListView = m_Root.Q<ListView>("current-labels-listview");
m_SuggestedLabelsListViewFromName = m_Root.Q<ListView>("suggested-labels-name-listview");
m_SuggestedLabelsListViewFromPath = m_Root.Q<ListView>("suggested-labels-path-listview");
m_LabelConfigsScrollView = m_Root.Q<ScrollView>("label-configs-scrollview");
m_SuggestedOnNamePanel = m_Root.Q<VisualElement>("suggested-labels-from-name");
m_SuggestedOnPathPanel = m_Root.Q<VisualElement>("suggested-labels-from-path");
m_AddButton = m_Root.Q<Button>("add-label");
m_CurrentAutoLabel = m_Root.Q<Label>("current-auto-label");
m_CurrentAutoLabelTitle = m_Root.Q<Label>("current-auto-label-title");
m_AutoLabelingToggle = m_Root.Q<Toggle>("auto-or-manual-toggle");
m_ManualLabelingContainer = m_Root.Q<VisualElement>("manual-labeling");
m_AutoLabelingContainer = m_Root.Q<VisualElement>("automatic-labeling");
m_FromLabelConfigsContainer = m_Root.Q<VisualElement>("from-label-configs");
m_SuggestedLabelsContainer = m_Root.Q<VisualElement>("suggested-labels");
m_AddAutoLabelToConfButton = m_Root.Q<Button>("add-auto-label-to-config");
m_AddManualLabelsTitle = m_Root.Q<Label>("add-manual-labels-title");
var dropdownParent = m_Root.Q<VisualElement>("drop-down-parent");
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = false;
InitializeLabelingSchemes(dropdownParent);
AssesAutoLabelingStatus();
m_FirstItemLabelsArray = serializedObject.FindProperty(nameof(Labeling.labels));
if (serializedObject.targetObjects.Length > 1)
{
var addedTitle = m_Root.Q<Label>("added-labels-title");
addedTitle.text = "Common Labels of Selected Items";
m_SuggestedOnNamePanel.style.display = DisplayStyle.None;
m_AddAutoLabelToConfButton.text = "Add Automatic Labels of All Selected Assets to Config...";
}
else
{
m_AddAutoLabelToConfButton.text = "Add to Label Config...";
}
m_AddAutoLabelToConfButton.clicked += () =>
{
AddToConfigWindow.ShowWindow(CreateUnionOfAllLabels().ToList());
};
m_AddButton.clicked += () =>
{
var labelsUnion = CreateUnionOfAllLabels();
var newLabel = FindNewLabelValue(labelsUnion);
foreach (var targetObject in targets)
{
if (targetObject is Labeling labeling)
{
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty(nameof(Labeling.labels));
serializedLabelArray2.InsertArrayElementAtIndex(serializedLabelArray2.arraySize);
serializedLabelArray2.GetArrayElementAtIndex(serializedLabelArray2.arraySize-1).stringValue = newLabel;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
serializedObject.SetIsDifferentCacheDirty();
}
}
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
};
m_AutoLabelingToggle.RegisterValueChangedCallback(evt =>
{
AutoLabelToggleChanged();
});
ChangesHappeningInForeground = true;
m_Root.schedule.Execute(CheckForModelChanges).Every(30);
}
int m_PreviousLabelsArraySize = -1;
/// <summary>
/// This boolean is used to signify when changes in the model are triggered directly from the inspector UI by the user.
/// In these cases, the scheduled model checker does not need to update the UI again.
/// </summary>
public bool ChangesHappeningInForeground { get; set; }
SerializedProperty m_FirstItemLabelsArray;
void CheckForModelChanges()
{
if (ChangesHappeningInForeground)
{
ChangesHappeningInForeground = false;
m_PreviousLabelsArraySize = m_FirstItemLabelsArray.arraySize;
return;
}
if (m_FirstItemLabelsArray.arraySize != m_PreviousLabelsArraySize)
{
AssesAutoLabelingStatus();
RefreshManualLabelingData();
m_PreviousLabelsArraySize = m_FirstItemLabelsArray.arraySize;
}
}
bool SerializedObjectHasValidLabelingScheme(SerializedObject serObj)
{
var schemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
return IsValidLabelingSchemeName(schemeName);
}
bool IsValidLabelingSchemeName(string schemeName)
{
return schemeName != string.Empty &&
m_LabelingSchemes.FindAll(scheme => scheme.GetType().Name == schemeName).Count > 0;
}
bool m_ItIsPossibleToAddMultipleAutoLabelsToConfig;
void UpdateUiAspects()
{
m_ManualLabelingContainer.SetEnabled(!m_AutoLabelingToggle.value);
m_AutoLabelingContainer.SetEnabled(m_AutoLabelingToggle.value);
m_AddManualLabelsTitle.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_FromLabelConfigsContainer.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_SuggestedLabelsContainer.style.display = m_AutoLabelingToggle.value ? DisplayStyle.None : DisplayStyle.Flex;
m_CurrentLabelsListView.style.minHeight = m_AutoLabelingToggle.value ? 70 : 120;
if (!m_AutoLabelingToggle.value || serializedObject.targetObjects.Length > 1 ||
!SerializedObjectHasValidLabelingScheme(new SerializedObject(serializedObject.targetObjects[0])))
{
m_CurrentAutoLabel.style.display = DisplayStyle.None;
m_AddAutoLabelToConfButton.SetEnabled(false);
}
else
{
m_CurrentAutoLabel.style.display = DisplayStyle.Flex;
m_AddAutoLabelToConfButton.SetEnabled(true);
}
if(m_AutoLabelingToggle.value && serializedObject.targetObjects.Length > 1 && m_ItIsPossibleToAddMultipleAutoLabelsToConfig)
{
m_AddAutoLabelToConfButton.SetEnabled(true);
}
if (serializedObject.targetObjects.Length == 1)
{
m_AutoLabelingToggle.text = "Use Automatic Labeling";
}
else
{
m_CurrentAutoLabelTitle.text = "Select assets individually to inspect their automatic labels.";
m_AutoLabelingToggle.text = "Use Automatic Labeling for All Selected Items";
}
}
void UpdateCurrentAutoLabelValue(SerializedObject serObj)
{
var array = serObj.FindProperty(nameof(Labeling.labels));
if (array.arraySize > 0)
{
m_CurrentAutoLabel.text = array.GetArrayElementAtIndex(0).stringValue;
}
}
bool AreSelectedAssetsCompatibleWithAutoLabelScheme(AssetLabelingScheme scheme)
{
foreach (var asset in serializedObject.targetObjects)
{
string label = scheme.GenerateLabel(asset);
if (label == null)
{
return false;
}
}
return true;
}
void InitializeLabelingSchemes(VisualElement parent)
{
//this function should be called only once during the lifecycle of the editor element
AssetLabelingScheme labelingScheme = new AssetNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
labelingScheme = new AssetFileNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
labelingScheme = new CurrentOrParentsFolderNameLabelingScheme();
if (AreSelectedAssetsCompatibleWithAutoLabelScheme(labelingScheme)) m_LabelingSchemes.Add(labelingScheme);
var descriptions = m_LabelingSchemes.Select(scheme => scheme.Description).ToList();
descriptions.Insert(0, "<Select Scheme>");
m_LabelingSchemesPopup = new PopupField<string>(descriptions, 0) {label = "Labeling Scheme"};
m_LabelingSchemesPopup.style.marginLeft = 0;
parent.Add(m_LabelingSchemesPopup);
m_LabelingSchemesPopup.RegisterValueChangedCallback(evt => AssignAutomaticLabelToSelectedAssets());
}
void AutoLabelToggleChanged()
{
UpdateUiAspects();
if (!m_AutoLabelingToggle.value)
{
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = false;
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue = false;
if (SerializedObjectHasValidLabelingScheme(serObj))
{
//asset already had a labeling scheme before auto labeling was disabled, which means it has auto label(s) attached. these should be cleared now.
serObj.FindProperty(nameof(Labeling.labels)).ClearArray();
}
serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue = string.Empty;
m_LabelingSchemesPopup.index = 0;
serObj.ApplyModifiedProperties();
serObj.SetIsDifferentCacheDirty();
}
}
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
}
void AssignAutomaticLabelToSelectedAssets()
{
//the 0th index of this popup is "<Select Scheme>" and should not do anything
if (m_LabelingSchemesPopup.index == 0)
{
return;
}
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = true;
var labelingScheme = m_LabelingSchemes[m_LabelingSchemesPopup.index - 1];
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue = true; //only set this flag once the user has actually chosen a scheme, otherwise, we will not touch the flag
serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue = labelingScheme.GetType().Name;
var serLabelsArray = serObj.FindProperty(nameof(Labeling.labels));
serLabelsArray.ClearArray();
serLabelsArray.InsertArrayElementAtIndex(0);
var label = labelingScheme.GenerateLabel(targetObj);
serLabelsArray.GetArrayElementAtIndex(0).stringValue = label;
if (targetObj == serializedObject.targetObjects[0] && serializedObject.targetObjects.Length == 1)
{
UpdateCurrentAutoLabelValue(serObj);
}
serObj.ApplyModifiedProperties();
serObj.SetIsDifferentCacheDirty();
}
UpdateUiAspects();
ChangesHappeningInForeground = true;
RefreshManualLabelingData();
}
void AssesAutoLabelingStatus()
{
var enabledOrNot = true;
if (serializedObject.targetObjects.Length == 1)
{
var serObj = new SerializedObject(serializedObject.targetObjects[0]);
var enabled = serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue;
m_AutoLabelingToggle.value = enabled;
var currentLabelingSchemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
if (IsValidLabelingSchemeName(currentLabelingSchemeName))
{
m_LabelingSchemesPopup.index =
m_LabelingSchemes.FindIndex(scheme => scheme.GetType().Name.ToString() == currentLabelingSchemeName) + 1;
}
UpdateCurrentAutoLabelValue(serObj);
}
else
{
string unifiedLabelingScheme = null;
var allAssetsUseSameLabelingScheme = true;
foreach (var targetObj in serializedObject.targetObjects)
{
var serObj = new SerializedObject(targetObj);
var enabled = serObj.FindProperty(nameof(Labeling.useAutoLabeling)).boolValue;
enabledOrNot &= enabled;
var schemeName = serObj.FindProperty(nameof(Labeling.autoLabelingSchemeType)).stringValue;
if (schemeName == string.Empty)
{
//if any of the selected assets does not have a labeling scheme, they can't all have the same valid scheme
allAssetsUseSameLabelingScheme = false;
}
if (allAssetsUseSameLabelingScheme)
{
if (unifiedLabelingScheme == null)
{
unifiedLabelingScheme = schemeName;
}
else if (unifiedLabelingScheme != schemeName)
{
allAssetsUseSameLabelingScheme = false;
}
}
}
m_AutoLabelingToggle.value = enabledOrNot;
if (allAssetsUseSameLabelingScheme)
{
//all selected assets have the same scheme recorded in their serialized objects
m_LabelingSchemesPopup.index =
m_LabelingSchemes.FindIndex(scheme => scheme.GetType().Name.ToString() == unifiedLabelingScheme) + 1;
m_ItIsPossibleToAddMultipleAutoLabelsToConfig = enabledOrNot;
//if all selected assets have the same scheme recorded in their serialized objects, and they all
//have auto labeling enabled, we can now add all auto labels to a config
}
else
{
//the selected DO NOT have the same scheme recorded in their serialized objects
m_LabelingSchemesPopup.index = 0;
}
}
UpdateUiAspects();
}
HashSet<string> CreateUnionOfAllLabels()
{
HashSet<String> result = new HashSet<string>();
foreach (var obj in targets)
{
if (obj is Labeling labeling)
{
result.UnionWith(labeling.labels);
}
}
return result;
}
string FindNewLabelValue(HashSet<string> labels)
{
string baseLabel = "New Label";
string label = baseLabel;
int count = 1;
while (labels.Contains(label))
{
label = baseLabel + "_" + count++;
}
return label;
}
public override VisualElement CreateInspectorGUI()
{
serializedObject.Update();
m_Labeling = serializedObject.targetObject as Labeling;
RefreshCommonLabels();
RefreshSuggestedLabelLists();
RefreshLabelConfigsList();
SetupListsAndScrollers();
return m_Root;
}
void RefreshLabelConfigsList()
{
List<string> labelConfigGuids = new List<string>();
foreach (var type in m_LabelConfigTypes)
{
labelConfigGuids.AddRange(AssetDatabase.FindAssets("t:"+type.Name));
}
m_AllLabelConfigsInProject.Clear();
foreach (var configGuid in labelConfigGuids)
{
var asset = AssetDatabase.LoadAssetAtPath<ScriptableObject>(AssetDatabase.GUIDToAssetPath(configGuid));
m_AllLabelConfigsInProject.Add(asset);
}
}
void RemoveAddedLabelsFromSuggestedLists()
{
m_SuggestedLabelsBasedOnName.RemoveAll(s => CommonLabels.Contains(s));
m_SuggestedLabelsBasedOnPath.RemoveAll(s => CommonLabels.Contains(s));
}
void RefreshSuggestedLabelLists()
{
m_SuggestedLabelsBasedOnName.Clear();
m_SuggestedLabelsBasedOnPath.Clear();
//based on name
if (serializedObject.targetObjects.Length == 1)
{
string assetName = serializedObject.targetObject.name;
var pieces = assetName.Split(NameSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
if (pieces.Count > 1)
{
//means the asset name was actually split
m_SuggestedLabelsBasedOnName.Add(assetName);
}
m_SuggestedLabelsBasedOnName.AddRange(pieces);
}
//based on path
string assetPath = GetAssetOrPrefabPath(m_Labeling.gameObject);
//var prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(m_Labeling.gameObject);
if (assetPath != null)
{
var stringList = assetPath.Split(PathSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
stringList.Reverse();
m_SuggestedLabelsBasedOnPath.AddRange(stringList);
}
foreach (var targetObject in targets)
{
if (targetObject == target)
continue; //we have already taken care of this one above
const int k_Indent = 7;
ReorderableList m_LabelsList;
assetPath = GetAssetOrPrefabPath(((Labeling)targetObject).gameObject);
//prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(((Labeling)targetObject).gameObject);
if (assetPath != null)
{
var stringList = assetPath.Split(PathSeparators, StringSplitOptions.RemoveEmptyEntries).ToList();
m_SuggestedLabelsBasedOnPath = m_SuggestedLabelsBasedOnPath.Intersect(stringList).ToList();
}
}
RemoveAddedLabelsFromSuggestedLists();
//Debug.Log("list update, source list count is:" + m_SuggestedLabelsBasedOnPath.Count);
}
public void RefreshManualLabelingData()
public void OnEnable()
serializedObject.SetIsDifferentCacheDirty();
serializedObject.Update();
RefreshCommonLabels();
RefreshSuggestedLabelLists();
SetupSuggestedLabelsListViews();
SetupCurrentLabelsListView();
UpdateSuggestedPanelVisibility();
m_LabelsList = new ReorderableList(serializedObject, serializedObject.FindProperty(nameof(global::UnityEngine.Perception.GroundTruth.Labeling.labels)), true, false, true, true);
m_LabelsList.drawElementCallback = DrawElement;
m_LabelsList.onAddCallback += OnAdd;
m_LabelsList.onRemoveCallback += OnRemove;
void SetupListsAndScrollers()
void OnRemove(ReorderableList list)
//Labels that have already been added to the target Labeling component
SetupCurrentLabelsListView();
//Labels suggested by the system, which the user can add
SetupSuggestedLabelsListViews();
//Add labels from Label Configs present in project
SetupLabelConfigsScrollView();
UpdateSuggestedPanelVisibility();
if (list.index != -1)
Labeling.labels.RemoveAt(list.index);
void UpdateSuggestedPanelVisibility()
{
m_SuggestedOnNamePanel.style.display = m_SuggestedLabelsBasedOnName.Count == 0 ? DisplayStyle.None : DisplayStyle.Flex;
m_SuggestedOnPathPanel.style.display = m_SuggestedLabelsBasedOnPath.Count == 0 ? DisplayStyle.None : DisplayStyle.Flex;
if (m_SuggestedLabelsBasedOnPath.Count == 0 && m_SuggestedLabelsBasedOnName.Count == 0)
{
m_SuggestedLabelsContainer.style.display = DisplayStyle.None;
}
}
void RefreshCommonLabels()
{
CommonLabels.Clear();
CommonLabels.AddRange(((Labeling)serializedObject.targetObjects[0]).labels);
foreach (var obj in serializedObject.targetObjects)
{
CommonLabels = CommonLabels.Intersect(((Labeling) obj).labels).ToList();
}
}
void SetupCurrentLabelsListView()
{
m_CurrentLabelsListView.itemsSource = CommonLabels;
VisualElement MakeItem() =>
new AddedLabelEditor(this, m_CurrentLabelsListView);
void BindItem(VisualElement e, int i)
{
if (e is AddedLabelEditor addedLabel)
{
addedLabel.indexInList = i;
addedLabel.labelTextField.value = CommonLabels[i];
}
}
const int itemHeight = 35;
m_CurrentLabelsListView.bindItem = BindItem;
m_CurrentLabelsListView.makeItem = MakeItem;
m_CurrentLabelsListView.itemHeight = itemHeight;
m_CurrentLabelsListView.itemsSource = CommonLabels;
m_CurrentLabelsListView.selectionType = SelectionType.None;
}
void SetupSuggestedLabelsListViews()
{
SetupSuggestedLabelsBasedOnFlatList(m_SuggestedLabelsListViewFromName, m_SuggestedLabelsBasedOnName);
SetupSuggestedLabelsBasedOnFlatList(m_SuggestedLabelsListViewFromPath, m_SuggestedLabelsBasedOnPath);
}
void SetupSuggestedLabelsBasedOnFlatList(ListView labelsListView, List<string> stringList)
{
labelsListView.itemsSource = stringList;
VisualElement MakeItem() => new SuggestedLabelElement(this);
Labeling Labeling => (Labeling)target;
void BindItem(VisualElement e, int i)
{
if (e is SuggestedLabelElement suggestedLabel)
{
suggestedLabel.label.text = stringList[i];
}
}
const int itemHeight = 32;
labelsListView.bindItem = BindItem;
labelsListView.makeItem = MakeItem;
labelsListView.itemHeight = itemHeight;
labelsListView.selectionType = SelectionType.None;
}
void SetupLabelConfigsScrollView()
void OnAdd(ReorderableList list)
m_LabelConfigsScrollView.Clear();
foreach (var config in m_AllLabelConfigsInProject)
{
VisualElement configElement = new LabelConfigElement(this, config);
m_LabelConfigsScrollView.Add(configElement);
}
Labeling.labels.Add("");
/// <summary>
/// Get the path of the given asset in the project, or get the path of the given Scene GameObject's source prefab if any
/// </summary>
/// <param name="obj"></param>
/// <returns></returns>
public static string GetAssetOrPrefabPath(UnityEngine.Object obj)
void DrawElement(Rect rect, int index, bool isactive, bool isfocused)
string assetPath = AssetDatabase.GetAssetPath(obj);
if (assetPath == string.Empty)
using (var change = new EditorGUI.ChangeCheckScope())
//this indicates that gObj is a scene object and not a prefab directly selected from the Project tab
var prefabObject = PrefabUtility.GetCorrespondingObjectFromSource(obj);
if (prefabObject)
{
assetPath = AssetDatabase.GetAssetPath(prefabObject);
}
}
return assetPath;
}
}
class AddedLabelEditor : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
public TextField labelTextField;
public int indexInList;
public AddedLabelEditor(LabelingEditor editor, ListView listView)
{
var uxmlPath = m_UxmlDir + "AddedLabelElement.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
labelTextField = this.Q<TextField>("label-value");
var removeButton = this.Q<Button>("remove-button");
var addToConfigButton = this.Q<Button>("add-to-config-button");
labelTextField.isDelayed = true;
labelTextField.RegisterValueChangedCallback((cEvent) =>
{
//Do not let the user define a duplicate label
if (editor.CommonLabels.Contains(cEvent.newValue) && editor.CommonLabels.IndexOf(cEvent.newValue) != indexInList)
{
//The listview recycles child visual elements and that causes the RegisterValueChangedCallback event to be called when scrolling.
//Therefore, we need to make sure we are not in this code block just because of scrolling, but because the user is actively changing one of the labels.
//The editor.CommonLabels.IndexOf(cEvent.newValue) != m_IndexInList check is for this purpose.
Debug.LogError("A label with the string " + cEvent.newValue + " has already been added to selected objects.");
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
var indent = k_Indent * index;
if (indent >= rect.width)
}
bool shouldRefresh = false;
foreach (var targetObject in editor.targets)
{
if (targetObject is Labeling labeling)
{
var indexToModifyInTargetLabelList =
labeling.labels.IndexOf(editor.CommonLabels[indexInList]);
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty(nameof(Labeling.labels));
serializedLabelArray2.GetArrayElementAtIndex(indexToModifyInTargetLabelList).stringValue = cEvent.newValue;
shouldRefresh = shouldRefresh || serializedLabelArray2.serializedObject.hasModifiedProperties;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
}
}
//the value change event is called even when the listview recycles its child elements for re-use during scrolling, therefore, we should check to make sure there are modified properties, otherwise we would be doing the refresh for no reason (reduces scrolling performance)
if (shouldRefresh)
var contentRect = new Rect(rect.x + indent, rect.y, rect.width - indent, rect.height);
var value = EditorGUI.TextField(contentRect, Labeling.labels[index]);
if (change.changed)
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
}
});
addToConfigButton.clicked += () =>
{
AddToConfigWindow.ShowWindow(labelTextField.value);
};
removeButton.clicked += () =>
{
List<string> commonLabels = new List<string>();
commonLabels.Clear();
var firstTarget = editor.targets[0] as Labeling;
if (firstTarget != null)
{
commonLabels.AddRange(firstTarget.labels);
foreach (var obj in editor.targets)
{
commonLabels = commonLabels.Intersect(((Labeling) obj).labels).ToList();
}
foreach (var targetObject in editor.targets)
{
if (targetObject is Labeling labeling)
{
RemoveLabelFromLabelingSerObj(labeling, commonLabels);
}
}
editor.serializedObject.SetIsDifferentCacheDirty();
editor.RefreshManualLabelingData();
}
};
}
void RemoveLabelFromLabelingSerObj(Labeling labeling, List<string> commonLabels)
{
Dictionary<int, int> commonsIndexToLabelsIndex = new Dictionary<int, int>();
for (int i = 0; i < labeling.labels.Count; i++)
{
string label = labeling.labels[i];
for (int j = 0; j < commonLabels.Count; j++)
{
string label2 = commonLabels[j];
if (string.Equals(label, label2) && !commonsIndexToLabelsIndex.ContainsKey(j))
{
commonsIndexToLabelsIndex.Add(j, i);
}
Labeling.labels[index] = value;
var serializedLabelingObject2 = new SerializedObject(labeling);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty("labels");
serializedLabelArray2.DeleteArrayElementAtIndex(commonsIndexToLabelsIndex[indexInList]);
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
}
class SuggestedLabelElement : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
public Label label;
public SuggestedLabelElement(LabelingEditor editor)
public override void OnInspectorGUI()
var uxmlPath = m_UxmlDir + "SuggestedLabelElement.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
label = this.Q<Label>("label-value");
var addButton = this.Q<Button>("add-button");
addButton.clicked += () =>
{
foreach (var targetObject in editor.serializedObject.targetObjects)
{
if (targetObject is Labeling labeling)
{
if (labeling.labels.Contains(label.text))
continue; //Do not allow duplicate labels in one asset. Duplicate labels have no use and cause other operations (especially mutlt asset editing) to get messed up
var serializedLabelingObject2 = new SerializedObject(targetObject);
var serializedLabelArray2 = serializedLabelingObject2.FindProperty("labels");
serializedLabelArray2.InsertArrayElementAtIndex(serializedLabelArray2.arraySize);
serializedLabelArray2.GetArrayElementAtIndex(serializedLabelArray2.arraySize-1).stringValue = label.text;
serializedLabelingObject2.ApplyModifiedProperties();
serializedLabelingObject2.SetIsDifferentCacheDirty();
editor.serializedObject.SetIsDifferentCacheDirty();
}
}
editor.ChangesHappeningInForeground = true;
editor.RefreshManualLabelingData();
};
}
}
class LabelConfigElement : VisualElement
{
string m_UxmlDir = "Packages/com.unity.perception/Editor/GroundTruth/Uxml/";
bool m_Collapsed = true;
ListView m_LabelsListView;
VisualElement m_CollapseToggle;
public LabelConfigElement(LabelingEditor editor, ScriptableObject config)
{
var uxmlPath = m_UxmlDir + "ConfigElementForAddingLabelsFrom.uxml";
AssetDatabase.LoadAssetAtPath<VisualTreeAsset>(uxmlPath).CloneTree(this);
m_LabelsListView = this.Q<ListView>("label-config-contents-listview");
var openButton = this.Q<Button>("open-config-button");
var configName = this.Q<Label>("config-name");
configName.text = config.name;
m_CollapseToggle = this.Q<VisualElement>("collapse-toggle");
openButton.clicked += () =>
{
Selection.SetActiveObjectWithContext(config, null);
};
var propertyInfo = config.GetType().GetProperty(IdLabelConfig.publicLabelEntriesFieldName);
if (propertyInfo != null)
{
var objectList = (IEnumerable) propertyInfo.GetValue(config);
var labelEntryList = objectList.Cast<ILabelEntry>().ToList();
var labelList = labelEntryList.Select(entry => entry.label).ToList();
m_LabelsListView.itemsSource = labelList;
VisualElement MakeItem()
{
var element = new SuggestedLabelElement(editor);
element.AddToClassList("label_add_from_config");
return element;
}
void BindItem(VisualElement e, int i)
{
if (e is SuggestedLabelElement suggestedLabel)
{
suggestedLabel.label.text = labelList[i];
}
}
const int itemHeight = 27;
m_LabelsListView.bindItem = BindItem;
m_LabelsListView.makeItem = MakeItem;
m_LabelsListView.itemHeight = itemHeight;
m_LabelsListView.selectionType = SelectionType.None;
}
m_CollapseToggle.RegisterCallback<MouseUpEvent>(evt =>
{
m_Collapsed = !m_Collapsed;
ApplyCollapseState();
});
ApplyCollapseState();
}
void ApplyCollapseState()
{
if (m_Collapsed)
{
m_CollapseToggle.AddToClassList("collapsed-toggle-state");
m_LabelsListView.AddToClassList("collapsed");
}
else
{
m_CollapseToggle.RemoveFromClassList("collapsed-toggle-state");
m_LabelsListView.RemoveFromClassList("collapsed");
}
}
}
/// <summary>
/// A labeling scheme based on which an automatic label can be produced for a given asset. E.g. based on asset name, asset path, etc.
/// </summary>
abstract class AssetLabelingScheme
{
/// <summary>
/// The description of how this scheme generates labels. Used in the dropdown menu in the UI.
/// </summary>
public abstract string Description { get; }
/// <summary>
/// Generate a label for the given asset
/// </summary>
/// <param name="asset"></param>
/// <returns></returns>
public abstract string GenerateLabel(UnityEngine.Object asset);
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's name as its automatic label
/// </summary>
class AssetNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use asset name";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
return asset.name;
}
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's file name, including extension, as its automatic label
/// </summary>
class AssetFileNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use file name with extension";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
string assetPath = LabelingEditor.GetAssetOrPrefabPath(asset);
var stringList = assetPath.Split(LabelingEditor.PathSeparators, StringSplitOptions.RemoveEmptyEntries)
.ToList();
return stringList.Count > 0 ? stringList.Last() : null;
}
}
/// <summary>
/// Asset labeling scheme that outputs the given asset's folder name as its automatic label
/// </summary>
class CurrentOrParentsFolderNameLabelingScheme : AssetLabelingScheme
{
///<inheritdoc/>
public override string Description => "Use the asset's folder name";
///<inheritdoc/>
public override string GenerateLabel(UnityEngine.Object asset)
{
string assetPath = LabelingEditor.GetAssetOrPrefabPath(asset);
var stringList = assetPath.Split(LabelingEditor.PathSeparators, StringSplitOptions.RemoveEmptyEntries)
.ToList();
return stringList.Count > 1 ? stringList[stringList.Count-2] : null;
m_LabelsList.DoLayoutList();
}
}
}

2
com.unity.perception/Editor/GroundTruth/LabelingEditor.cs.meta


fileFormatVersion: 2
guid: 387b8732b87094321af57795df93aec4
guid: 2e725508a34c40a0938c8d891b371980
timeCreated: 1585933334

2
com.unity.perception/Editor/GroundTruth/SemanticSegmentationPassEditor.cs


{
AddProperty(customPass.FindPropertyRelative(nameof(SemanticSegmentationPass.targetCamera)));
AddProperty(customPass.FindPropertyRelative(nameof(SemanticSegmentationPass.targetTexture)));
AddProperty(customPass.FindPropertyRelative(nameof(SemanticSegmentationPass.semanticSegmentationLabelConfig)));
AddProperty(customPass.FindPropertyRelative(nameof(SemanticSegmentationPass.labelingConfiguration)));
base.Initialize(customPass);
}
}

2
com.unity.perception/LICENSE.md


com.unity.perception copyright © 2020 Unity Technologies ApS
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/

1
com.unity.perception/Runtime/AssemblyInfo.cs


[assembly: InternalsVisibleTo("Unity.Perception.Runtime.Tests")]
[assembly: InternalsVisibleTo("Unity.Perception.Runtime")]
[assembly: InternalsVisibleTo("Unity.Perception.TestProject")]
[assembly: InternalsVisibleTo("Unity.Perception.Performance.Tests")]

16
com.unity.perception/Runtime/Unity.Perception.Runtime.asmdef


"Unity.RenderPipelines.Core.Runtime",
"Unity.RenderPipelines.HighDefinition.Runtime",
"Unity.RenderPipelines.Universal.Runtime",
"Unity.Simulation.Core",
"Unity.Simulation.Core"
"PathCreator"
],
"includePlatforms": [],
"excludePlatforms": [],

"Newtonsoft.Json.dll"
"Newtonsoft.Json.dll",
"QuickGraph.dll"
],
"autoReferenced": true,
"defineConstraints": [],

"name": "com.unity.render-pipelines.universal",
"expression": "",
"define": "URP_PRESENT"
},
{
"name": "com.unity.simulation.capture",
"expression": "0.0.10-preview.16",
"define": "SIMULATION_CAPTURE_0_0_10_PREVIEW_16_OR_NEWER"
},
{
"name": "com.unity.render-pipelines.high-definition",
"expression": "9.0",
"define": "HDRP_9_OR_NEWER"
}
],
"noEngineReferences": false

45
com.unity.perception/Runtime/GroundTruth/GroundTruthRendererFeature.cs


using System;
using UnityEditor;
using UnityEngine;
using UnityEngine.Rendering;
class InstanceSegmentationUrpPass : ScriptableRenderPass
{
InstanceSegmentationCrossPipelinePass m_InstanceSegmentationPass;
public InstanceSegmentationUrpPass(Camera camera, RenderTexture targetTexture)
{
m_InstanceSegmentationPass = new InstanceSegmentationCrossPipelinePass(camera);
ConfigureTarget(targetTexture, targetTexture.depthBuffer);
m_InstanceSegmentationPass.Setup();
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var commandBuffer = CommandBufferPool.Get(nameof(InstanceSegmentationUrpPass));
m_InstanceSegmentationPass.Execute(context, commandBuffer, renderingData.cameraData.camera, renderingData.cullResults);
CommandBufferPool.Release(commandBuffer);
}
}
class SemanticSegmentationUrpPass : ScriptableRenderPass
{
SemanticSegmentationCrossPipelinePass m_SemanticSegmentationCrossPipelinePass;
public SemanticSegmentationUrpPass(Camera camera, RenderTexture targetTexture, LabelingConfiguration labelingConfiguration)
{
m_SemanticSegmentationCrossPipelinePass = new SemanticSegmentationCrossPipelinePass(camera, labelingConfiguration);
ConfigureTarget(targetTexture, targetTexture.depthBuffer);
m_SemanticSegmentationCrossPipelinePass.Setup();
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
var commandBuffer = CommandBufferPool.Get(nameof(SemanticSegmentationUrpPass));
m_SemanticSegmentationCrossPipelinePass.Execute(context, commandBuffer, renderingData.cameraData.camera, renderingData.cullResults);
CommandBufferPool.Release(commandBuffer);
}
}
public class GroundTruthRendererFeature : ScriptableRendererFeature
{
public override void Create() {}

if (!EditorApplication.isPlaying)
return;
#endif
perceptionCamera.MarkGroundTruthRendererFeatureAsPresent();
foreach (var pass in perceptionCamera.passes)
renderer.EnqueuePass(pass);
renderer.EnqueuePass(perceptionCamera.instanceSegmentationUrpPass);
renderer.EnqueuePass(perceptionCamera.semanticSegmentationUrpPass);
}
}
}

23
com.unity.perception/Runtime/GroundTruth/IGroundTruthGenerator.cs


namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Interface for setting up Renderers for ground truth generation via <see cref="LabelManager"/>.
/// Interface for setting up MeshRenderers for ground truth generation via <see cref="GroundTruthLabelSetupSystem"/>.
/// Enables ground truth generation for a <see cref="Labeling"/> component or its associated <see cref="MaterialPropertyBlock"/>. This function is called by <see cref="LabelManager"/> when a <see cref="Labeling"/> component is registered, created, or enabled.
/// Called by <see cref="GroundTruthLabelSetupSystem"/> when first registered or when a Labeling is created at runtime.
/// <param name="mpb">The <see cref="MaterialPropertyBlock"/> for the given <see cref="MeshRenderer"/>. Can be used to set properties for custom rendering.</param>
/// <param name="renderer">The <see cref="Renderer"/> under the given <see cref="LabelManager"/>.</param>
/// <param name="labeling">The <see cref="LabelManager"/> component that was registered, created, or enabled</param>
/// <param name="instanceId">The instanceId assigned to the given <see cref="LabelManager"/> instance.</param>
void SetupMaterialProperties(MaterialPropertyBlock mpb, Renderer renderer, Labeling labeling, uint instanceId);
/// <summary>
/// Disables ground truth generation for a <see cref="Labeling"/> component or its associated <see cref="MaterialPropertyBlock"/>. This function is called by <see cref="LabelManager"/> when a <see cref="Labeling"/> component is disabled.
/// </summary>
/// <param name="mpb">The <see cref="MaterialPropertyBlock"/> for the given <see cref="MeshRenderer"/>. Can be used to set properties for custom rendering.</param>
/// <param name="renderer">The <see cref="Renderer"/> under the given <see cref="LabelManager"/>.</param>
/// <param name="labeling">The <see cref="LabelManager"/> component for which ground-truth generation should stop.</param>
/// <param name="instanceId">The instanceId assigned to the given <see cref="LabelManager"/> instance.</param>
void ClearMaterialProperties(MaterialPropertyBlock mpb, Renderer renderer, Labeling labeling, uint instanceId);
/// <param name="mpb">The MaterialPropertyBlock for the given meshRenderer. Can be used to set properties for custom rendering.</param>
/// <param name="meshRenderer">The MeshRenderer which exists under the given Labeling.</param>
/// <param name="labeling">The Labeling component created</param>
/// <param name="instanceId">The instanceId assigned to the given Labeling instance.</param>
void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId);
}
}

64
com.unity.perception/Runtime/GroundTruth/Labeling/Labeling.cs


using System;
using System.Collections.Generic;
using UnityEditor;
using Unity.Entities;
using UnityEngine;
using UnityEngine.Serialization;
namespace UnityEngine.Perception.GroundTruth

/// </summary>
[AddComponentMenu("Perception/Labeling/Labeling")]
static LabelManager labelManager => LabelManager.singleton;
/// The label names to associate with the GameObject. Modifications to this list after the Update() step of the frame the object is created in are
/// not guaranteed to be reflected by labelers.
/// The label names to associate with the GameObject.
/// <summary>
/// Whether this labeling component is currently using an automatic labeling scheme. When this is enabled, the asset can have only one label (the automatic one) and the user cannot add more labels.
/// </summary>
public bool useAutoLabeling;
/// <summary>
/// The specific subtype of AssetLabelingScheme that this component is using, if useAutoLabeling is enabled.
/// </summary>
public string autoLabelingSchemeType = string.Empty;
/// <summary>
/// The unique id of this labeling component instance
/// </summary>
public uint instanceId { get; private set; }
void OnDestroy()
{
labelManager.Unregister(this);
}
void OnEnable()
{
RefreshLabeling();
}
void OnDisable()
{
RefreshLabeling();
}
void Reset()
{
labels.Clear();
useAutoLabeling = false;
autoLabelingSchemeType = string.Empty;
#if UNITY_EDITOR
EditorUtility.SetDirty(gameObject);
#endif
}
/// <summary>
/// Refresh ground truth generation for the labeling of the attached GameObject. This is necessary when the
/// list of labels changes or when renderers or materials change on objects in the hierarchy.
/// </summary>
public void RefreshLabeling()
Entity m_Entity;
void Awake()
labelManager.RefreshLabeling(this);
m_Entity = World.DefaultGameObjectInjectionWorld.EntityManager.CreateEntity();
World.DefaultGameObjectInjectionWorld.EntityManager.AddComponentObject(m_Entity, this);
internal void SetInstanceId(uint id)
void OnDestroy()
instanceId = id;
if (World.DefaultGameObjectInjectionWorld != null)
World.DefaultGameObjectInjectionWorld.EntityManager.DestroyEntity(m_Entity);
}
}
}

2
com.unity.perception/Runtime/GroundTruth/Labeling/StartingLabelId.cs


namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Selector for whether label ids should start at zero or one. <seealso cref="IdLabelConfig.startingLabelId"/>.
/// Selector for whether label ids should start at zero or one. <seealso cref="LabelingConfiguration.StartingLabelId"/>.
/// </summary>
public enum StartingLabelId
{

69
com.unity.perception/Runtime/GroundTruth/RenderTextureReader.cs


namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// RenderTextureReader reads a RenderTexture from the GPU whenever Capture is called and passes the data back through a provided callback.
/// RenderTextureReader reads a RenderTexture from the GPU each frame and passes the data back through a provided callback.
class RenderTextureReader<T> : IDisposable where T : struct
public class RenderTextureReader<T> : IDisposable where T : struct
Action<int, NativeArray<T>, RenderTexture> m_ImageReadCallback;
int m_NextFrameToCapture;
Camera m_CameraRenderingToSource;
public RenderTextureReader(RenderTexture source)
/// <param name="cameraRenderingToSource">The <see cref="Camera"/> which renders to the given renderTexture. This is used to determine when to read from the texture.</param>
/// <param name="imageReadCallback">The callback to call after reading the texture</param>
public RenderTextureReader(RenderTexture source, Camera cameraRenderingToSource, Action<int, NativeArray<T>, RenderTexture> imageReadCallback)
m_Source = source;
this.m_Source = source;
this.m_ImageReadCallback = imageReadCallback;
this.m_CameraRenderingToSource = cameraRenderingToSource;
m_NextFrameToCapture = Time.frameCount;
if (!GraphicsUtilities.SupportsAsyncReadback())
m_CpuTexture = new Texture2D(m_Source.width, m_Source.height, m_Source.graphicsFormat, TextureCreationFlags.None);
RenderPipelineManager.endFrameRendering += OnEndFrameRendering;
public void Capture(ScriptableRenderContext context, Action<int, NativeArray<T>, RenderTexture> imageReadCallback)
void OnEndFrameRendering(ScriptableRenderContext context, Camera[] cameras)
#if UNITY_EDITOR
if (UnityEditor.EditorApplication.isPaused)
return;
#endif
if (!cameras.Contains(m_CameraRenderingToSource))
return;
if (m_NextFrameToCapture > Time.frameCount)
return;
m_NextFrameToCapture = Time.frameCount + 1;
if (m_CpuTexture == null)
m_CpuTexture = new Texture2D(m_Source.width, m_Source.height, m_Source.graphicsFormat, TextureCreationFlags.None);
m_CpuTexture.ReadPixels(new Rect(
Vector2.zero,
new Vector2(m_Source.width, m_Source.height)),

imageReadCallback(Time.frameCount, data, m_Source);
}
else
{
var commandBuffer = CommandBufferPool.Get("RenderTextureReader");
var frameCount = Time.frameCount;
commandBuffer.RequestAsyncReadback(m_Source, r => OnGpuReadback(r, frameCount, imageReadCallback));
context.ExecuteCommandBuffer(commandBuffer);
context.Submit();
CommandBufferPool.Release(commandBuffer);
m_ImageReadCallback(Time.frameCount, data, m_Source);
return;
var commandBuffer = CommandBufferPool.Get("RenderTextureReader");
var frameCount = Time.frameCount;
commandBuffer.RequestAsyncReadback(m_Source, r => OnGpuReadback(r, frameCount));
context.ExecuteCommandBuffer(commandBuffer);
context.Submit();
CommandBufferPool.Release(commandBuffer);
void OnGpuReadback(AsyncGPUReadbackRequest request, int frameCount,
Action<int, NativeArray<T>, RenderTexture> imageReadCallback)
void OnGpuReadback(AsyncGPUReadbackRequest request, int frameCount)
else if (request.done && imageReadCallback != null)
else if (request.done && m_ImageReadCallback != null)
imageReadCallback(frameCount, request.GetData<T>(), m_Source);
m_ImageReadCallback(frameCount, request.GetData<T>(), m_Source);
}
}

public void Dispose()
{
WaitForAllImages();
RenderPipelineManager.endFrameRendering -= OnEndFrameRendering;
if (m_CpuTexture != null)
{
Object.Destroy(m_CpuTexture);

25
com.unity.perception/Runtime/GroundTruth/RenderedObjectInfo.cs


using System;
// ReSharper disable NonReadonlyMemberInGetHashCode
/// Describes an instance of an object in an instance segmentation frame.
/// Generated by <see cref="RenderedObjectInfoGenerator"/>.
/// Describes an instance of an object in an instance segmentation frame. Generated by <see cref="RenderedObjectInfoGenerator"/>.
/// </summary>
public struct RenderedObjectInfo : IEquatable<RenderedObjectInfo>
{

public uint instanceId;
public int instanceId;
/// <summary>
/// The labelId of the object resolved by a <see cref="LabelingConfiguration"/>
/// </summary>
public int labelId;
/// <summary>
/// The bounding box of the object in pixel coordinates.
/// </summary>

/// </summary>
public int pixelCount;
/// <summary>
/// The unique RGBA color for the instance.
/// </summary>
public Color32 instanceColor;
return $"{nameof(instanceId)}: {instanceId}, {nameof(boundingBox)}: {boundingBox}, " +
$"{nameof(pixelCount)}: {pixelCount}, {nameof(instanceColor)}: {instanceColor}";
return $"{nameof(instanceId)}: {instanceId}, {nameof(labelId)}: {labelId}, {nameof(boundingBox)}: {boundingBox}, {nameof(pixelCount)}: {pixelCount}";
return instanceId == other.instanceId &&
boundingBox.Equals(other.boundingBox) &&
pixelCount == other.pixelCount;
return instanceId == other.instanceId && labelId == other.labelId && boundingBox.Equals(other.boundingBox) && pixelCount == other.pixelCount;
}
/// <inheritdoc />

{
unchecked
{
// ReSharper disable NonReadonlyMemberInGetHashCode
var hashCode = (int)instanceId;
var hashCode = instanceId;
hashCode = (hashCode * 397) ^ labelId;
hashCode = (hashCode * 397) ^ boundingBox.GetHashCode();
hashCode = (hashCode * 397) ^ pixelCount;
return hashCode;

121
com.unity.perception/Runtime/GroundTruth/RenderedObjectInfoGenerator.cs


/// <summary>
/// A CPU-based pass which computes bounding box and pixel counts per-object from instance segmentation images
/// </summary>
public class RenderedObjectInfoGenerator
public class RenderedObjectInfoGenerator : IGroundTruthGenerator, IDisposable
const int k_StartingObjectCount = 1 << 8;
public uint instanceId;
public int instanceId;
public NativeSlice<Color32> segmentationImageData;
public NativeSlice<uint> segmentationImageData;
public int width;
public int rows;
public int rowStart;

{
for (var row = 0; row < rows; row++)
{
var rowSlice = new NativeSlice<Color32>(segmentationImageData, width * row, width);
var rowSlice = new NativeSlice<uint>(segmentationImageData, width * row, width);
instanceId = 0,
instanceId = -1,
var packed = InstanceIdToColorMapping.GetPackedColorFromColor(rowSlice[i]);
// pixel color black (0,0,0,255) is reserved for no hit, so set it to id 0
var id = packed == 255 ? 0 : packed;
var value = rowSlice[i];
if (id != currentBB.instanceId)
if (value != currentBB.instanceId)
{
if (currentBB.instanceId > 0)
{

currentBB = new Object1DSpan
{
instanceId = id,
instanceId = (int)value,
left = i,
row = row + rowStart
};

}
}
NativeList<int> m_InstanceIdToLabelEntryIndexLookup;
LabelingConfiguration m_LabelingConfiguration;
// ReSharper disable once InvalidXmlDocComment
/// <summary>
/// Create a new CpuRenderedObjectInfoPass with the given LabelingConfiguration.
/// </summary>
/// <param name="labelingConfiguration">The LabelingConfiguration to use to determine labelId. Should match the
/// one used by the <seealso cref="InstanceSegmentationUrpPass"/> generating the input image. See <see cref="Compute"/></param>
public RenderedObjectInfoGenerator(LabelingConfiguration labelingConfiguration)
{
m_LabelingConfiguration = labelingConfiguration;
m_InstanceIdToLabelEntryIndexLookup = new NativeList<int>(k_StartingObjectCount, Allocator.Persistent);
}
/// <inheritdoc/>
public void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId)
{
if (m_LabelingConfiguration.TryGetMatchingConfigurationEntry(labeling, out var entry, out var index))
{
if (m_InstanceIdToLabelEntryIndexLookup.Length <= instanceId)
{
m_InstanceIdToLabelEntryIndexLookup.Resize((int)instanceId + 1, NativeArrayOptions.ClearMemory);
}
m_InstanceIdToLabelEntryIndexLookup[(int)instanceId] = index;
}
}
// ReSharper disable once InvalidXmlDocComment
/// <summary>

/// <param name="stride">Stride of the image data. Should be equal to the width of the image.</param>
/// <param name="boundingBoxOrigin">Whether bounding boxes should be top-left or bottom-right-based.</param>
/// <param name="renderedObjectInfos">When this method returns, filled with RenderedObjectInfo entries for each object visible in the frame.</param>
/// <param name="perLabelEntryObjectCount">When the method returns, filled with a NativeArray with the count of objects for each entry in <see cref="LabelingConfiguration.LabelEntries"/> in the LabelingConfiguration passed into the constructor.</param>
public void Compute(NativeArray<Color32> instanceSegmentationRawData, int stride, BoundingBoxOrigin boundingBoxOrigin, out NativeArray<RenderedObjectInfo> renderedObjectInfos, Allocator allocator)
public void Compute(NativeArray<uint> instanceSegmentationRawData, int stride, BoundingBoxOrigin boundingBoxOrigin, out NativeArray<RenderedObjectInfo> renderedObjectInfos, out NativeArray<uint> perLabelEntryObjectCount, Allocator allocator)
{
const int jobCount = 24;
var height = instanceSegmentationRawData.Length / stride;

handles[jobIndex] = new ComputeHistogramPerRowJob
{
segmentationImageData = new NativeSlice<Color32>(instanceSegmentationRawData, row * stride, stride * rowsThisJob),
segmentationImageData = new NativeSlice<uint>(instanceSegmentationRawData, row * stride, stride * rowsThisJob),
width = stride,
rowStart = row,
rows = rowsThisJob,

JobHandle.CompleteAll(handles);
}
var boundingBoxMap = new NativeHashMap<uint, RenderedObjectInfo>(100, Allocator.Temp);
perLabelEntryObjectCount = new NativeArray<uint>(m_LabelingConfiguration.LabelEntries.Count, allocator);
var boundingBoxMap = new NativeHashMap<int, RenderedObjectInfo>(100, Allocator.Temp);
using (s_LabelMerge.Auto())
{
foreach (var boundingBoxList in jobBoundingBoxLists)

renderedObjectInfos = new NativeArray<RenderedObjectInfo>(keyValueArrays.Keys.Length, allocator);
for (var i = 0; i < keyValueArrays.Keys.Length; i++)
{
var color = InstanceIdToColorMapping.GetColorFromPackedColor(keyValueArrays.Keys[i]);
if (InstanceIdToColorMapping.TryGetInstanceIdFromColor(color, out var instanceId))
{
var renderedObjectInfo = keyValueArrays.Values[i];
var boundingBox = renderedObjectInfo.boundingBox;
if (boundingBoxOrigin == BoundingBoxOrigin.TopLeft)
{
var y = height - boundingBox.yMax;
boundingBox = new Rect(boundingBox.x, y, boundingBox.width, boundingBox.height);
}
var instanceId = keyValueArrays.Keys[i];
if (m_InstanceIdToLabelEntryIndexLookup.Length <= instanceId)
continue;
renderedObjectInfos[i] = new RenderedObjectInfo
{
instanceId = instanceId,
boundingBox = boundingBox,
pixelCount = renderedObjectInfo.pixelCount,
instanceColor = color
};
}
else
var labelIndex = m_InstanceIdToLabelEntryIndexLookup[instanceId];
var labelId = m_LabelingConfiguration.LabelEntries[labelIndex].id;
perLabelEntryObjectCount[labelIndex]++;
var renderedObjectInfo = keyValueArrays.Values[i];
var boundingBox = renderedObjectInfo.boundingBox;
if (boundingBoxOrigin == BoundingBoxOrigin.TopLeft)
Debug.LogError($"Could not generate instance ID for object, ID exceeded maximum ID");
var y = height - boundingBox.yMax;
boundingBox = new Rect(boundingBox.x, y, boundingBox.width, boundingBox.height);
renderedObjectInfos[i] = new RenderedObjectInfo
{
instanceId = instanceId,
labelId = labelId,
boundingBox = boundingBox,
pixelCount = renderedObjectInfo.pixelCount
};
}
keyValueArrays.Dispose();
}

}
handles.Dispose();
}
/// <summary>
/// Attempts to find the label id for the given instance id using the LabelingConfiguration passed into the constructor.
/// </summary>
/// <param name="instanceId">The instanceId of the object for which the labelId should be found</param>
/// <param name="labelId">The labelId of the object. -1 if not found</param>
/// <returns>True if a labelId is found for the given instanceId.</returns>
public bool TryGetLabelIdFromInstanceId(int instanceId, out int labelId)
{
labelId = -1;
if (m_InstanceIdToLabelEntryIndexLookup.Length <= instanceId)
return false;
labelId = m_InstanceIdToLabelEntryIndexLookup[instanceId];
return true;
}
/// <inheritdoc />
public void Dispose()
{
m_InstanceIdToLabelEntryIndexLookup.Dispose();
}
}
}

10
com.unity.perception/Runtime/GroundTruth/Resources/InstanceSegmentation.shader


{
Properties
{
[PerObjectData] _SegmentationId("Segmentation ID", vector) = (0,0,0,1)
[PerObjectData] _SegmentationId("Segmentation ID", int) = 0
}
SubShader
{

#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#include "Packing.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl"
struct appdata
{

float4 vertex : SV_POSITION;
};
float4 _SegmentationId;
uint _SegmentationId;
v2f vert (appdata v)
{

fixed4 frag (v2f i) : SV_Target
{
return _SegmentationId;
return float4(UnpackUIntToFloat((uint)_SegmentationId, 0, 8), UnpackUIntToFloat(_SegmentationId, 8, 8), UnpackUIntToFloat(_SegmentationId, 16, 8), UnpackUIntToFloat(_SegmentationId, 24, 8));
}
ENDCG
}

18
com.unity.perception/Runtime/GroundTruth/Resources/LabeledObjectHistogram.compute


return objectId;
}
// For each pixel in the segmentation image, set InstanceIdPresenceMask[pixelValue] to 1
// For each pixel in the segmentation image, set InstanceIdPresenceMask[pixelValue] to 1
[numthreads(8,8,1)]
void PresenseMask (uint3 id : SV_DispatchThreadID)
{

//Attempt at packing presense into single bits. Good for memory, bad for perf due to InterlockedOr.
//Attempt at packing presense into single bits. Good for memory, bad for perf due to InterlockedOr.
//InterlockedOr(IdPresenceMask[maskOffset], 1 << bitOffset);
}

uint mask = InstanceIdPresenceMask[id.x];
if (mask > 0)
InterlockedAdd(ClassCounts[InstanceIdToClassId[id.x]], 1);
//Attempt at packing presense into single bits. Good for memory, bad for perf due to InterlockedOr in PresenseMask(...).
//Attempt at packing presense into single bits. Good for memory, bad for perf due to InterlockedOr in PresenseMask(...).
//int idStart = id.x * 32;
//for(int i = 0; i < 32 ; i++)
//{

// InterlockedAdd(ClassCounts[InstanceIdToClassId[idStart + i]], 1);
// }
//}
}
}

29
com.unity.perception/Runtime/GroundTruth/Resources/SemanticSegmentation.shader


{
Properties
{
[PerObjectData] LabelingId("Labeling Id", Vector) = (0,0,0,1)
[PerObjectData] LabelingId("Labeling Id", int) = 0
}
HLSLINCLUDE

CGPROGRAM
#pragma vertex semanticSegmentationVertexStage
#pragma fragment semanticSegmentationFragmentStage
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl"
float4 LabelingId;
uint LabelingId;
struct in_vert
struct appdata
struct vertexToFragment
struct v2f
vertexToFragment semanticSegmentationVertexStage (in_vert vertWorldSpace)
uint _SegmentationId;
v2f vert (appdata v)
vertexToFragment vertScreenSpace;
vertScreenSpace.vertex = UnityObjectToClipPos(vertWorldSpace.vertex);
return vertScreenSpace;
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
return o;
fixed4 semanticSegmentationFragmentStage (vertexToFragment vertScreenSpace) : SV_Target
fixed4 frag (v2f i) : SV_Target
return LabelingId;
return float4(UnpackUIntToFloat((uint)LabelingId, 0, 8), UnpackUIntToFloat((uint)LabelingId, 8, 8), 0, 1.0);
}
ENDCG

123
com.unity.perception/Runtime/GroundTruth/SimulationManager.cs


using System;
using System.Collections.Generic;
using System.IO;
using Unity.Collections;
using Unity.Simulation;
using UnityEngine;

/// Global manager for frame scheduling and output capture for simulations.
/// Data capture follows the schema defined in *TODO: Expose schema publicly*
/// </summary>
public static class DatasetCapture
public static class SimulationManager
internal static string OutputDirectory => SimulationState.GetOutputDirectoryNoCreate();
internal static string OutputDirectory => SimulationState.OutputDirectory;
/// The json metadata schema version the DatasetCapture's output conforms to.
/// The json metadata schema version the SimulationManager's output conforms to.
/// </summary>
public static string SchemaVersion => "0.0.1";

/// <param name="egoHandle">The ego container for the sensor. Sensor orientation will be reported in the context of the given ego.</param>
/// <param name="modality">The kind of the sensor (ex. "camera", "lidar")</param>
/// <param name="description">A human-readable description of the sensor (ex. "front-left rgb camera")</param>
/// <param name="firstCaptureFrame">The offset from the current frame on which this sensor should first be scheduled.</param>
/// <param name="captureTriggerMode">The method of triggering captures for this sensor.</param>
/// <param name="simulationDeltaTime">The simulation frame time (seconds) requested by this sensor.</param>
/// <param name="framesBetweenCaptures">The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame.</param>
/// <param name="manualSensorAffectSimulationTiming">Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time</param>
/// <param name="period">The period, in seconds, on which the sensor should capture. Frames will be scheduled in the simulation such that each sensor is triggered every _period_ seconds.</param>
/// <param name="firstCaptureTime">The time, in seconds, from the start of the sequence on which this sensor should first be scheduled.</param>
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float simulationDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming = false)
public static SensorHandle RegisterSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime)
SimulationState.AddSensor(egoHandle, modality, description, firstCaptureFrame, captureTriggerMode, simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming, sensor);
SimulationState.AddSensor(egoHandle, modality, description, period, firstCaptureTime, sensor);
return sensor;
}

/// Report a metric not associated with any sensor or annotation.
/// </summary>
/// <param name="metricDefinition">The metric definition of the metric being reported</param>
/// <returns>An <see cref="AsyncMetric"/> which should be used to report the metric values, potentially in a later frame</returns>
public static AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => SimulationState.CreateAsyncMetric(metricDefinition);
/// <summary>

static SimulationState CreateSimulationData()
{
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
return new SimulationState($"Dataset{k_DatasetGuid}");
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
return new SimulationState(Manager.Instance.GetDirectoryFor($"Dataset{k_DatasetGuid}"));
}
[RuntimeInitializeOnLoadMethod]

}
}
/// Capture trigger modes for sensors.
/// </summary>
public enum CaptureTriggerMode
{
/// <summary>
/// Captures happen automatically based on a start frame and frame delta time.
/// </summary>
Scheduled,
/// <summary>
/// Captures should be triggered manually through calling the manual capture method of the sensor using this trigger mode.
/// </summary>
Manual
}
/// <summary>
/// A handle to a sensor managed by the <see cref="DatasetCapture"/>. It can be used to check whether the sensor
/// A handle to a sensor managed by the <see cref="SimulationManager"/>. It can be used to check whether the sensor
/// is expected to capture this frame and report captures, annotations, and metrics regarding the sensor.
/// </summary>
public struct SensorHandle : IDisposable, IEquatable<SensorHandle>

}
/// <summary>
/// Whether the sensor is currently enabled. When disabled, the DatasetCapture will no longer schedule frames for running captures on this sensor.
/// Whether the sensor is currently enabled. When disabled, the SimulationManager will no longer schedule frames for running captures on this sensor.
get => DatasetCapture.SimulationState.IsEnabled(this);
get => SimulationManager.SimulationState.IsEnabled(this);
DatasetCapture.SimulationState.SetEnabled(this, value);
SimulationManager.SimulationState.SetEnabled(this, value);
}
}

if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationFile(annotationDefinition, this, filename);
return SimulationManager.SimulationState.ReportAnnotationFile(annotationDefinition, this, filename);
}
/// <summary>

if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationValues(annotationDefinition, this, values);
return SimulationManager.SimulationState.ReportAnnotationValues(annotationDefinition, this, values);
}
/// <summary>

if (!annotationDefinition.IsValid)
throw new ArgumentException("The given annotationDefinition is invalid", nameof(annotationDefinition));
return DatasetCapture.SimulationState.ReportAnnotationAsync(annotationDefinition, this);
return SimulationManager.SimulationState.ReportAnnotationAsync(annotationDefinition, this);
}
/// <summary>

throw new InvalidOperationException("Capture reported in frame when ShouldCaptureThisFrame is false.");
}
DatasetCapture.SimulationState.ReportCapture(this, filename, sensorSpatialData, additionalSensorValues);
SimulationManager.SimulationState.ReportCapture(this, filename, sensorSpatialData, additionalSensorValues);
}
/// <summary>

public bool ShouldCaptureThisFrame => DatasetCapture.SimulationState.ShouldCaptureThisFrame(this);
/// <summary>
/// Requests a capture from this sensor on the next rendered frame. Can only be used with manual capture mode (<see cref="PerceptionCamera.CaptureTriggerMode.Manual"/>).
/// </summary>
public void RequestCapture()
{
DatasetCapture.SimulationState.SetNextCaptureTimeToNowForSensor(this);
}
public bool ShouldCaptureThisFrame => SimulationManager.SimulationState.ShouldCaptureThisFrame(this);
/// <summary>
/// Report a metric regarding this sensor in the current frame.

if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, values, this, default);
SimulationManager.SimulationState.ReportMetric(metricDefinition, values, this, default);
}
/// <summary>

if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), this, default);
SimulationManager.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), this, default);
}
/// <summary>

if (!ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
return DatasetCapture.SimulationState.CreateAsyncMetric(metricDefinition, this);
return SimulationManager.SimulationState.CreateAsyncMetric(metricDefinition, this);
}
/// <summary>

/// <summary>
/// Returns whether this SensorHandle is valid in the current simulation. Nil SensorHandles are never valid.
/// </summary>
public bool IsValid => DatasetCapture.IsValid(this.Id);
public bool IsValid => SimulationManager.IsValid(this.Id);
/// <summary>
/// Returns true if this SensorHandle was default-instantiated.
/// </summary>

{
if (!DatasetCapture.IsValid(this.Id))
if (!SimulationManager.IsValid(this.Id))
throw new InvalidOperationException("SensorHandle has been disposed or its simulation has ended");
}

}
/// <summary>
/// Report file-based and value-based data for this annotation.
/// </summary>
/// <param name="path">The path to the file containing the annotation data.</param>
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if path or values is null</exception>
public void ReportFileAndValues<T>(string path, IEnumerable<T> values)
{
if (path == null)
throw new ArgumentNullException(nameof(path));
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, path, values);
}
/// <summary>
public void ReportValues<T>(IEnumerable<T> values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));
m_SimulationState.ReportAsyncAnnotationResult(this, values: values);
}
/// <summary>
/// Report a value-based data for this annotation.
/// </summary>
/// <param name="values">The annotation data.</param>
/// <typeparam name="T">The type of the data.</typeparam>
/// <exception cref="ArgumentNullException">Thrown if values is null</exception>
public void ReportValues<T>(NativeSlice<T> values) where T : struct
public void ReportValues<T>(T[] values)
{
if (values == null)
throw new ArgumentNullException(nameof(values));

if (!SensorHandle.ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, values, SensorHandle, this);
SimulationManager.SimulationState.ReportMetric(metricDefinition, values, SensorHandle, this);
}
/// <summary>

if (!SensorHandle.ShouldCaptureThisFrame)
throw new InvalidOperationException($"Sensor-based metrics may only be reported when SensorHandle.ShouldCaptureThisFrame is true");
DatasetCapture.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), SensorHandle, this);
SimulationManager.SimulationState.ReportMetric(metricDefinition, new JRaw(valuesJsonArray), SensorHandle, this);
}
/// <summary>

/// <returns>A handle to an AsyncMetric, which can be used to report values for this metric in future frames.</returns>
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => DatasetCapture.SimulationState.CreateAsyncMetric(metricDefinition, SensorHandle, this);
public AsyncMetric ReportMetricAsync(MetricDefinition metricDefinition) => SimulationManager.SimulationState.CreateAsyncMetric(metricDefinition, SensorHandle, this);
/// <inheritdoc/>
public bool Equals(Annotation other)

}
/// <summary>
/// A metric type, used to define a kind of metric. <see cref="DatasetCapture.RegisterMetricDefinition"/>.
/// A metric type, used to define a kind of metric. <see cref="SimulationManager.RegisterMetricDefinition"/>.
/// </summary>
public struct MetricDefinition : IEquatable<MetricDefinition>
{

}
/// <summary>
/// A metric type, used to define a kind of annotation. <see cref="DatasetCapture.RegisterAnnotationDefinition"/>.
/// A metric type, used to define a kind of annotation. <see cref="SimulationManager.RegisterAnnotationDefinition"/>.
/// </summary>
public struct AnnotationDefinition : IEquatable<AnnotationDefinition>
{

/// The ID of the annotation type. Used in the json metadata to associate anntations with the type.
/// </summary>
public readonly Guid Id;
internal bool IsValid => DatasetCapture.IsValid(Id);
internal bool IsValid => SimulationManager.IsValid(Id);
internal AnnotationDefinition(Guid id)
{

5
com.unity.perception/Runtime/GroundTruth/Ego.cs


public string Description;
EgoHandle m_EgoHandle;
/// <summary>
/// The EgoHandle registered with DatasetCapture at runtime.
/// </summary>
public EgoHandle EgoHandle
{
get

void EnsureEgoInitialized()
{
if (m_EgoHandle == default)
m_EgoHandle = DatasetCapture.RegisterEgo(Description);
m_EgoHandle = SimulationManager.RegisterEgo(Description);
}
}
}

871
com.unity.perception/Runtime/GroundTruth/PerceptionCamera.cs


using System;
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using Unity.Mathematics;
using JetBrains.Annotations;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Entities;
using Unity.Profiling;
using Unity.Simulation;
using UnityEngine;

using UnityEngine.Serialization;
#if URP_PRESENT
using UnityEngine.Rendering.Universal;
#endif
namespace UnityEngine.Perception.GroundTruth
{

[RequireComponent(typeof(Camera))]
public partial class PerceptionCamera : MonoBehaviour
public class PerceptionCamera : MonoBehaviour
const float k_PanelWidth = 200;
const float k_PanelHeight = 250;
const string k_RgbFilePrefix = "rgb_";
static ProfilerMarker s_WriteFrame = new ProfilerMarker("Write Frame (PerceptionCamera)");
static ProfilerMarker s_EncodeAndSave = new ProfilerMarker("Encode and save (PerceptionCamera)");
static PerceptionCamera s_VisualizedPerceptionCamera;
//TODO: Remove the Guid path when we have proper dataset merging in Unity Simulation and Thea
internal string rgbDirectory { get; } = $"RGB{Guid.NewGuid()}";
internal HUDPanel hudPanel;
internal OverlayPanel overlayPanel;
[SerializeReference]
List<CameraLabeler> m_Labelers = new List<CameraLabeler>();
Dictionary<string, object> m_PersistentSensorData = new Dictionary<string, object>();
bool m_ShowingVisualizations;
bool m_GUIStylesInitialized;
int m_LastFrameCaptured = -1;
int m_LastFrameEndRendering = -1;
Ego m_EgoMarker;
SensorHandle m_SensorHandle;
Vector2 m_ScrollPosition;
#if URP_PRESENT
// only used to confirm that GroundTruthRendererFeature is present in URP
bool m_IsGroundTruthRendererFeaturePresent;
internal List<ScriptableRenderPass> passes = new List<ScriptableRenderPass>();
#endif
const string k_SemanticSegmentationDirectory = "SemanticSegmentation";
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
internal static string RgbDirectory { get; } = $"RGB{Guid.NewGuid()}";
static string s_RgbFilePrefix = "rgb_";
const string k_SegmentationFilePrefix = "segmentation_";
/// <summary>
/// The period in seconds that the Camera should render
/// </summary>
public float period = .0166f;
/// <summary>
/// The start time in seconds of the first frame in the simulation.
/// </summary>
public float startTime;
/// Caches access to the camera attached to the perception camera
/// Whether semantic segmentation images should be generated
public Camera attachedCamera { get; private set; }
public bool produceSegmentationImages = true;
/// Frame number at which this camera starts capturing.
/// Whether object counts should be computed
public int firstCaptureFrame;
public bool produceObjectCountAnnotations = true;
/// The method of triggering captures for this camera.
/// The ID to use for object count annotations in the resulting dataset
public CaptureTriggerMode captureTriggerMode = CaptureTriggerMode.Scheduled;
[FormerlySerializedAs("m_ObjectCountID")]
public string objectCountId = "51DA3C27-369D-4929-AEA6-D01614635CE2";
/// Have this unscheduled (manual capture) camera affect simulation timings (similar to a scheduled camera) by
/// requesting a specific frame delta time
/// Whether object bounding boxes should be computed
public bool manualSensorAffectSimulationTiming;
public bool produceBoundingBoxAnnotations = true;
/// The simulation frame time (seconds) for this camera. E.g. 0.0166 translates to 60 frames per second.
/// This will be used as Unity's <see cref="Time.captureDeltaTime"/>, causing a fixed number of frames to be
/// generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware.
/// The ID to use for bounding box annotations in the resulting dataset
public float simulationDeltaTime = 0.0166f;
[FormerlySerializedAs("m_BoundingBoxID")]
public string boundingBoxId = "F9F22E05-443F-4602-A422-EBE4EA9B55CB";
/// <summary>
/// Whether rendered object info metrics should be generated. This metric currently contains label id, instance id, and visible pixels.
/// </summary>
[FormerlySerializedAs("produceVisiblePixelsMetric")]
public bool produceRenderedObjectInfoMetric = true;
/// The number of frames to simulate and render between the camera's scheduled captures.
/// Setting this to 0 makes the camera capture every frame.
/// The ID to use for visible pixels metrics in the resulting dataset
public int framesBetweenCaptures;
[FormerlySerializedAs("visiblePixelsId")]
[FormerlySerializedAs("m_VisiblePixelsID")]
public string renderedObjectInfoId = "5BA92024-B3B7-41A7-9D3F-C03A6A8DDD01";
/// Turns on/off the realtime visualization capability.
/// The corner of the image to use as the origin for bounding boxs.
[SerializeField]
public bool showVisualizations = true;
public BoundingBoxOrigin boundingBoxOrigin = BoundingBoxOrigin.TopLeft;
/// <summary>
/// The LabelingConfiguration to use for segmentation and object count.
/// </summary>
public LabelingConfiguration LabelingConfiguration;
/// The <see cref="CameraLabeler"/> instances which will be run for this PerceptionCamera.
/// Invoked when RenderedObjectInfos are calculated. The first parameter is the Time.frameCount at which the objects were rendered. This may be called many frames after the frame in which the objects were rendered.
public IReadOnlyList<CameraLabeler> labelers => m_Labelers;
public event Action<int, NativeArray<RenderedObjectInfo>> renderedObjectInfosCalculated;
internal event Action<int, NativeArray<uint>> segmentationImageReceived;
internal event Action<NativeSlice<uint>, IReadOnlyList<LabelEntry>, int> classCountsReceived;
[NonSerialized]
internal RenderTexture labelingTexture;
[NonSerialized]
internal RenderTexture segmentationTexture;
RenderTextureReader<short> m_ClassLabelingTextureReader;
RenderTextureReader<uint> m_SegmentationReader;
RenderedObjectInfoGenerator m_RenderedObjectInfoGenerator;
Dictionary<string, object> m_PersistentSensorData = new Dictionary<string, object>();
#if URP_PRESENT
[NonSerialized]
internal InstanceSegmentationUrpPass instanceSegmentationUrpPass;
[NonSerialized]
internal SemanticSegmentationUrpPass semanticSegmentationUrpPass;
#endif
bool m_CapturedLastFrame;
Ego m_EgoMarker;
/// Requests a capture from this camera on the next rendered frame.
/// Can only be used when using <see cref="CaptureTriggerMode.Manual"/> capture mode.
/// The <see cref="SensorHandle"/> associated with this camera. Use this to report additional annotations and metrics at runtime.
public void RequestCapture()
public SensorHandle SensorHandle { get; private set; }
struct AsyncSemanticSegmentationWrite
{
public short[] dataArray;
public int width;
public int height;
public string path;
}
struct AsyncCaptureInfo
{
public int FrameCount;
public AsyncAnnotation SegmentationAsyncAnnotation;
public AsyncMetric ClassCountAsyncMetric;
public AsyncMetric RenderedObjectInfoAsyncMetric;
public AsyncAnnotation BoundingBoxAsyncMetric;
}
List<AsyncCaptureInfo> m_AsyncCaptureInfos = new List<AsyncCaptureInfo>();
[SuppressMessage("ReSharper", "InconsistentNaming")]
[SuppressMessage("ReSharper", "NotAccessedField.Local")]
struct ClassCountValue
if (captureTriggerMode.Equals(CaptureTriggerMode.Manual))
{
SensorHandle.RequestCapture();
}
else
{
Debug.LogError($"{nameof(RequestCapture)} can only be used if the camera is in " +
$"{nameof(CaptureTriggerMode.Manual)} capture mode.");
}
public int label_id;
public string label_name;
public uint count;
/// <summary>
/// The <see cref="SensorHandle"/> associated with this camera.
/// Use this to report additional annotations and metrics at runtime.
/// </summary>
public SensorHandle SensorHandle
[SuppressMessage("ReSharper", "InconsistentNaming")]
[SuppressMessage("ReSharper", "NotAccessedField.Local")]
struct BoundingBoxValue
get
{
EnsureSensorRegistered();
return m_SensorHandle;
}
private set => m_SensorHandle = value;
public int label_id;
public string label_name;
public int instance_id;
public float x;
public float y;
public float width;
public float height;
}
ClassCountValue[] m_ClassCountValues;
BoundingBoxValue[] m_BoundingBoxValues;
RenderedObjectInfoValue[] m_VisiblePixelsValues;
#if HDRP_PRESENT
InstanceSegmentationPass m_SegmentationPass;
SemanticSegmentationPass m_SemanticSegmentationPass;
#endif
MetricDefinition m_ObjectCountMetricDefinition;
AnnotationDefinition m_BoundingBoxAnnotationDefinition;
AnnotationDefinition m_SegmentationAnnotationDefinition;
MetricDefinition m_RenderedObjectInfoMetricDefinition;
static ProfilerMarker s_WriteFrame = new ProfilerMarker("Write Frame (PerceptionCamera)");
static ProfilerMarker s_FlipY = new ProfilerMarker("Flip Y (PerceptionCamera)");
static ProfilerMarker s_EncodeAndSave = new ProfilerMarker("Encode and save (PerceptionCamera)");
static ProfilerMarker s_ClassCountCallback = new ProfilerMarker("OnClassLabelsReceived");
static ProfilerMarker s_RenderedObjectInfosCalculatedEvent = new ProfilerMarker("renderedObjectInfosCalculated event");
static ProfilerMarker s_BoundingBoxCallback = new ProfilerMarker("OnBoundingBoxesReceived");
static ProfilerMarker s_ProduceRenderedObjectInfoMetric = new ProfilerMarker("ProduceRenderedObjectInfoMetric");
[SuppressMessage("ReSharper", "InconsistentNaming")]
struct SemanticSegmentationSpec
{
[UsedImplicitly]
public int label_id;
[UsedImplicitly]
public string label_name;
[UsedImplicitly]
public int pixel_value;
}
[SuppressMessage("ReSharper", "InconsistentNaming")]
struct ObjectCountSpec
{
[UsedImplicitly]
public int label_id;
[UsedImplicitly]
public string label_name;
/// Add a data object which will be added to the dataset with each capture.
/// Overrides existing sensor data associated with the given key.
/// Add a data object which will be added to the dataset with each capture. Overrides existing sensor data associated with the given key.
/// </summary>
/// <param name="key">The key to associate with the data.</param>
/// <param name="data">An object containing the data. Will be serialized into json.</param>

return m_PersistentSensorData.Remove(key);
}
/// <summary>
/// Add the given <see cref="CameraLabeler"/> to the PerceptionCamera. It will be set up and executed by this
/// PerceptionCamera each frame it captures data.
/// </summary>
/// <param name="cameraLabeler">The labeler to add to this PerceptionCamera</param>
public void AddLabeler(CameraLabeler cameraLabeler) => m_Labelers.Add(cameraLabeler);
/// <summary>
/// Removes the given <see cref="CameraLabeler"/> from the list of labelers under this PerceptionCamera, if it
/// is in the list. The labeler is cleaned up in the process. Labelers removed from a PerceptionCamera should
/// not be used again.
/// </summary>
/// <param name="cameraLabeler"></param>
/// <returns></returns>
public bool RemoveLabeler(CameraLabeler cameraLabeler)
// Start is called before the first frame update
void Awake()
if (m_Labelers.Remove(cameraLabeler))
{
if (cameraLabeler.isInitialized)
cameraLabeler.InternalCleanup();
//CaptureOptions.useAsyncReadbackIfSupported = false;
return true;
}
return false;
}
m_EgoMarker = this.GetComponentInParent<Ego>();
var ego = m_EgoMarker == null ? SimulationManager.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = SimulationManager.RegisterSensor(ego, "camera", description, period, startTime);
void Start()
{
// Jobs are not chained to one another in any way, maximizing parallelism
AsyncRequest.maxJobSystemParallelism = 0;
AsyncRequest.maxAsyncRequestFrameAge = 0;
var myCamera = GetComponent<Camera>();
var width = myCamera.pixelWidth;
var height = myCamera.pixelHeight;
Application.runInBackground = true;
if ((produceSegmentationImages || produceObjectCountAnnotations || produceBoundingBoxAnnotations) && LabelingConfiguration == null)
{
Debug.LogError("LabelingConfiguration must be set if producing ground truth data");
produceSegmentationImages = false;
produceObjectCountAnnotations = false;
produceBoundingBoxAnnotations = false;
}
SetupInstanceSegmentation();
attachedCamera = GetComponent<Camera>();
SetupVisualizationCamera();
DatasetCapture.SimulationEnding += OnSimulationEnding;
}
segmentationTexture = new RenderTexture(new RenderTextureDescriptor(width, height, GraphicsFormat.R8G8B8A8_UNorm, 8));
segmentationTexture.name = "Segmentation";
labelingTexture = new RenderTexture(new RenderTextureDescriptor(width, height, GraphicsFormat.R8G8B8A8_UNorm, 8));
labelingTexture.name = "Labeling";
void OnEnable()
{
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
RenderPipelineManager.endFrameRendering += OnEndFrameRendering;
RenderPipelineManager.endCameraRendering += CheckForRendererFeature;
}
#if HDRP_PRESENT
var customPassVolume = this.GetComponent<CustomPassVolume>() ?? gameObject.AddComponent<CustomPassVolume>();
customPassVolume.injectionPoint = CustomPassInjectionPoint.BeforeRendering;
customPassVolume.isGlobal = true;
m_SegmentationPass = new InstanceSegmentationPass()
{
name = "Segmentation Pass",
targetCamera = myCamera,
targetTexture = segmentationTexture
};
m_SegmentationPass.EnsureInit();
m_SemanticSegmentationPass = new SemanticSegmentationPass(myCamera, labelingTexture, LabelingConfiguration)
{
name = "Labeling Pass"
};
// LateUpdate is called once per frame. It is called after coroutines, ensuring it is called properly after
// creation when running tests, since the test runner uses coroutines to run test code.
void LateUpdate()
{
EnsureSensorRegistered();
if (!SensorHandle.IsValid)
return;
SetupPasses(customPassVolume);
#endif
#if URP_PRESENT
instanceSegmentationUrpPass = new InstanceSegmentationUrpPass(myCamera, segmentationTexture);
semanticSegmentationUrpPass = new SemanticSegmentationUrpPass(myCamera, labelingTexture, LabelingConfiguration);
#endif
foreach (var labeler in m_Labelers)
if (produceSegmentationImages)
if (!labeler.enabled)
continue;
var specs = LabelingConfiguration.LabelEntries.Select((l) => new SemanticSegmentationSpec()
{
label_id = l.id,
label_name = l.label,
pixel_value = l.value
}).ToArray();
if (!labeler.isInitialized)
labeler.Init(this);
m_SegmentationAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition("semantic segmentation", specs, "pixel-wise semantic segmentation label", "PNG");
labeler.InternalOnUpdate();
m_ClassLabelingTextureReader = new RenderTextureReader<short>(labelingTexture, myCamera,
(frameCount, data, tex) => OnSemanticSegmentationImageRead(frameCount, data));
// Currently there is an issue in the perception camera that causes the UI layer not to be visualized
// if we are utilizing async readback and we have to flip our captured image.
// We have created a jira issue for this (aisv-779) and have notified the engine team about this.
if (m_ShowingVisualizations)
CaptureOptions.useAsyncReadbackIfSupported = false;
}
if (produceObjectCountAnnotations || produceBoundingBoxAnnotations || produceRenderedObjectInfoMetric)
{
var labelingMetricSpec = LabelingConfiguration.LabelEntries.Select((l) => new ObjectCountSpec()
{
label_id = l.id,
label_name = l.label,
}).ToArray();
void OnGUI()
{
if (!m_ShowingVisualizations) return;
if (produceObjectCountAnnotations)
{
m_ObjectCountMetricDefinition = SimulationManager.RegisterMetricDefinition("object count", labelingMetricSpec, "Counts of objects for each label in the sensor's view", id: new Guid(objectCountId));
}
if (!m_GUIStylesInitialized) SetUpGUIStyles();
if (produceBoundingBoxAnnotations)
{
m_BoundingBoxAnnotationDefinition = SimulationManager.RegisterAnnotationDefinition("bounding box", labelingMetricSpec, "Bounding box for each labeled object visible to the sensor", id: new Guid(boundingBoxId));
}
GUI.depth = 5;
if (produceRenderedObjectInfoMetric)
m_RenderedObjectInfoMetricDefinition = SimulationManager.RegisterMetricDefinition("rendered object info", labelingMetricSpec, "Information about each labeled object visible to the sensor", id: new Guid(renderedObjectInfoId));
var anyLabelerEnabled = false;
m_RenderedObjectInfoGenerator = new RenderedObjectInfoGenerator(LabelingConfiguration);
World.DefaultGameObjectInjectionWorld.GetExistingSystem<GroundTruthLabelSetupSystem>().Activate(m_RenderedObjectInfoGenerator);
// If a labeler has never been initialized then it was off from the
// start, it should not be called to draw on the UI
foreach (var labeler in m_Labelers.Where(labeler => labeler.isInitialized))
{
labeler.Visualize();
anyLabelerEnabled = true;
}
m_SegmentationReader = new RenderTextureReader<uint>(segmentationTexture, myCamera, (frameCount, data, tex) =>
{
if (segmentationImageReceived != null)
segmentationImageReceived(frameCount, data);
if (!anyLabelerEnabled)
{
DisplayNoLabelersMessage();
return;
}
m_RenderedObjectInfoGenerator.Compute(data, tex.width, boundingBoxOrigin, out var renderedObjectInfos, out var classCounts, Allocator.Temp);
GUI.depth = 0;
using (s_RenderedObjectInfosCalculatedEvent.Auto())
renderedObjectInfosCalculated?.Invoke(frameCount, renderedObjectInfos);
hudPanel.OnDrawGUI();
var x = Screen.width - k_PanelWidth - 10;
var height = Math.Min(Screen.height * 0.5f - 20, k_PanelHeight);
GUILayout.BeginArea(new Rect(x, 10, k_PanelWidth, height), GUI.skin.box);
if (produceObjectCountAnnotations)
OnObjectCountsReceived(classCounts, LabelingConfiguration.LabelEntries, frameCount);
m_ScrollPosition = GUILayout.BeginScrollView(m_ScrollPosition);
if (produceBoundingBoxAnnotations)
ProduceBoundingBoxesAnnotation(renderedObjectInfos, LabelingConfiguration.LabelEntries, frameCount);
// If a labeler has never been initialized then it was off from the
// start, it should not be called to draw on the UI
foreach (var labeler in m_Labelers.Where(labeler => labeler.isInitialized))
{
labeler.VisualizeUI();
if (produceRenderedObjectInfoMetric)
ProduceRenderedObjectInfoMetric(renderedObjectInfos, frameCount);
});
// This needs to happen here so that the overlay panel controls
// are placed in the controls panel
overlayPanel.OnDrawGUI(x, 10, k_PanelWidth, height);
GUILayout.EndScrollView();
GUILayout.EndArea();
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
SimulationManager.SimulationEnding += OnSimulationEnding;
void OnValidate()
// ReSharper disable InconsistentNaming
struct RenderedObjectInfoValue
if (m_Labelers == null)
m_Labelers = new List<CameraLabeler>();
[UsedImplicitly]
public int label_id;
[UsedImplicitly]
public int instance_id;
[UsedImplicitly]
public int visible_pixels;
// ReSharper restore InconsistentNaming
void OnDisable()
void ProduceRenderedObjectInfoMetric(NativeArray<RenderedObjectInfo> renderedObjectInfos, int frameCount)
RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;
RenderPipelineManager.endFrameRendering -= OnEndFrameRendering;
RenderPipelineManager.endCameraRendering -= CheckForRendererFeature;
using (s_ProduceRenderedObjectInfoMetric.Auto())
{
var findResult = FindAsyncCaptureInfo(frameCount);
if (findResult.index == -1)
return;
var asyncCaptureInfo = findResult.asyncCaptureInfo;
var metric = asyncCaptureInfo.RenderedObjectInfoAsyncMetric;
if (!metric.IsValid)
return;
if (m_VisiblePixelsValues == null || m_VisiblePixelsValues.Length != renderedObjectInfos.Length)
m_VisiblePixelsValues = new RenderedObjectInfoValue[renderedObjectInfos.Length];
for (var i = 0; i < renderedObjectInfos.Length; i++)
{
var objectInfo = renderedObjectInfos[i];
if (!TryGetLabelIdFromInstanceId(objectInfo.instanceId, out var labelId))
continue;
m_VisiblePixelsValues[i] = new RenderedObjectInfoValue
{
label_id = labelId,
instance_id = objectInfo.instanceId,
visible_pixels = objectInfo.pixelCount
};
}
metric.ReportValues(m_VisiblePixelsValues);
}
void OnDestroy()
#if HDRP_PRESENT
void SetupPasses(CustomPassVolume customPassVolume)
DatasetCapture.SimulationEnding -= OnSimulationEnding;
customPassVolume.customPasses.Remove(m_SegmentationPass);
customPassVolume.customPasses.Remove(m_SemanticSegmentationPass);
OnSimulationEnding();
CleanupVisualization();
if (produceSegmentationImages || produceObjectCountAnnotations)
customPassVolume.customPasses.Add(m_SegmentationPass);
if (SensorHandle.IsValid)
SensorHandle.Dispose();
if (produceSegmentationImages)
customPassVolume.customPasses.Add(m_SemanticSegmentationPass);
}
SensorHandle = default;
}
#endif
void EnsureSensorRegistered()
void ProduceBoundingBoxesAnnotation(NativeArray<RenderedObjectInfo> renderedObjectInfos, List<LabelEntry> labelingConfigurations, int frameCount)
if (m_SensorHandle.IsNil)
using (s_BoundingBoxCallback.Auto())
m_EgoMarker = GetComponentInParent<Ego>();
var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = DatasetCapture.RegisterSensor(
ego, "camera", description, firstCaptureFrame, captureTriggerMode,
simulationDeltaTime, framesBetweenCaptures, manualSensorAffectSimulationTiming);
var findResult = FindAsyncCaptureInfo(frameCount);
if (findResult.index == -1)
return;
var asyncCaptureInfo = findResult.asyncCaptureInfo;
var boundingBoxAsyncAnnotation = asyncCaptureInfo.BoundingBoxAsyncMetric;
if (!boundingBoxAsyncAnnotation.IsValid)
return;
if (m_BoundingBoxValues == null || m_BoundingBoxValues.Length != renderedObjectInfos.Length)
m_BoundingBoxValues = new BoundingBoxValue[renderedObjectInfos.Length];
for (var i = 0; i < renderedObjectInfos.Length; i++)
{
var objectInfo = renderedObjectInfos[i];
if (!TryGetLabelIdFromInstanceId(objectInfo.instanceId, out var labelId))
continue;
m_BoundingBoxValues[i] = new BoundingBoxValue
{
label_id = labelId,
label_name = labelingConfigurations[labelId].label,
instance_id = objectInfo.instanceId,
x = objectInfo.boundingBox.x,
y = objectInfo.boundingBox.y,
width = objectInfo.boundingBox.width,
height = objectInfo.boundingBox.height,
};
}
boundingBoxAsyncAnnotation.ReportValues(m_BoundingBoxValues);
void SetupVisualizationCamera()
/// <summary>
/// Returns the class ID for the given instance ID resolved by <see cref="LabelingConfiguration"/>. Only valid when bounding boxes are being computed.
/// </summary>
/// <param name="instanceId">The instanceId of the object</param>
/// <param name="labelId">When this method returns, contains the labelId associated with the given instanceId, if one exists. -1 otherwise.</param>
/// <returns>True if a valid labelId was found for the given instanceId.</returns>
/// <exception cref="InvalidOperationException">Thrown when <see cref="produceBoundingBoxAnnotations"/> was not true on Start.</exception>
public bool TryGetLabelIdFromInstanceId(int instanceId, out int labelId)
#if !UNITY_EDITOR && !DEVELOPMENT_BUILD
showVisualizations = false;
#else
var visualizationAllowed = s_VisualizedPerceptionCamera == null;
if (m_RenderedObjectInfoGenerator == null)
throw new InvalidOperationException($"{nameof(TryGetLabelIdFromInstanceId)} can only be used when bounding box capture is enabled");
return m_RenderedObjectInfoGenerator.TryGetLabelIdFromInstanceId(instanceId, out labelId);
}
if (!visualizationAllowed && showVisualizations)
void OnObjectCountsReceived(NativeSlice<uint> counts, IReadOnlyList<LabelEntry> entries, int frameCount)
{
using (s_ClassCountCallback.Auto())
Debug.LogWarning("Currently only one PerceptionCamera may be visualized at a time. " +
$"Disabling visualization on {gameObject.name}.");
showVisualizations = false;
return;
}
if (!showVisualizations)
return;
classCountsReceived?.Invoke(counts, entries, frameCount);
var findResult = FindAsyncCaptureInfo(frameCount);
if (findResult.index == -1)
return;
var asyncCaptureInfo = findResult.asyncCaptureInfo;
var classCountAsyncMetric = asyncCaptureInfo.ClassCountAsyncMetric;
if (!classCountAsyncMetric.IsValid)
return;
if (m_ClassCountValues == null || m_ClassCountValues.Length != entries.Count)
m_ClassCountValues = new ClassCountValue[entries.Count];
m_ShowingVisualizations = true;
s_VisualizedPerceptionCamera = this;
for (var i = 0; i < entries.Count; i++)
{
m_ClassCountValues[i] = new ClassCountValue()
{
label_id = entries[i].id,
label_name = entries[i].label,
count = counts[i]
};
}
hudPanel = gameObject.AddComponent<HUDPanel>();
overlayPanel = gameObject.AddComponent<OverlayPanel>();
overlayPanel.perceptionCamera = this;
#endif
classCountAsyncMetric.ReportValues(m_ClassCountValues);
}
void CheckForRendererFeature(ScriptableRenderContext context, Camera cam)
(int index, AsyncCaptureInfo asyncCaptureInfo) FindAsyncCaptureInfo(int frameCount)
if (cam == attachedCamera)
for (var i = 0; i < m_AsyncCaptureInfos.Count; i++)
#if URP_PRESENT
if (!m_IsGroundTruthRendererFeaturePresent)
var captureInfo = m_AsyncCaptureInfos[i];
if (captureInfo.FrameCount == frameCount)
Debug.LogError("GroundTruthRendererFeature must be present on the ScriptableRenderer associated " +
"with the camera. The ScriptableRenderer can be accessed through Edit -> Project Settings... " +
"-> Graphics -> Scriptable Render Pipeline Settings -> Renderer List.");
enabled = false;
return (i, captureInfo);
#endif
RenderPipelineManager.endCameraRendering -= CheckForRendererFeature;
}
#if URP_PRESENT
public void AddScriptableRenderPass(ScriptableRenderPass pass)
{
passes.Add(pass);
return (-1, default);
#endif
void SetUpGUIStyles()
// Update is called once per frame
void Update()
GUI.skin.label.fontSize = 12;
GUI.skin.label.font = Resources.Load<Font>("Inter-Light");
GUI.skin.label.padding = new RectOffset(0, 0, 1, 1);
GUI.skin.label.margin = new RectOffset(0, 0, 1, 1);
GUI.skin.label.wordWrap = true;
GUI.skin.label.alignment = TextAnchor.MiddleLeft;
GUI.skin.box.padding = new RectOffset(5, 5, 5, 5);
GUI.skin.toggle.margin = new RectOffset(0, 0, 0, 0);
GUI.skin.horizontalSlider.margin = new RectOffset(0, 0, 0, 0);
m_GUIStylesInitialized = true;
if (!SensorHandle.IsValid)
return;
var cam = GetComponent<Camera>();
cam.enabled = SensorHandle.ShouldCaptureThisFrame;
m_AsyncCaptureInfos.RemoveSwapBack(i =>
!i.SegmentationAsyncAnnotation.IsPending &&
!i.BoundingBoxAsyncMetric.IsPending &&
!i.RenderedObjectInfoAsyncMetric.IsPending &&
!i.ClassCountAsyncMetric.IsPending);
void DisplayNoLabelersMessage()
void ReportAsyncAnnotations()
var x = Screen.width - k_PanelWidth - 10;
var height = Math.Min(Screen.height * 0.5f - 20, 90);
if (produceSegmentationImages || produceObjectCountAnnotations || produceBoundingBoxAnnotations || produceRenderedObjectInfoMetric)
{
var captureInfo = new AsyncCaptureInfo()
{
FrameCount = Time.frameCount
};
if (produceSegmentationImages)
captureInfo.SegmentationAsyncAnnotation = SensorHandle.ReportAnnotationAsync(m_SegmentationAnnotationDefinition);
if (produceObjectCountAnnotations)
captureInfo.ClassCountAsyncMetric = SensorHandle.ReportMetricAsync(m_ObjectCountMetricDefinition);
GUILayout.BeginArea(new Rect(x, 10, k_PanelWidth, height), GUI.skin.box);
if (produceBoundingBoxAnnotations)
captureInfo.BoundingBoxAsyncMetric = SensorHandle.ReportAnnotationAsync(m_BoundingBoxAnnotationDefinition);
GUILayout.Label("Visualization: No labelers are currently active. Enable at least one labeler from the " +
"inspector window of your perception camera to see visualizations.");
if (produceRenderedObjectInfoMetric)
captureInfo.RenderedObjectInfoAsyncMetric = SensorHandle.ReportMetricAsync(m_RenderedObjectInfoMetricDefinition);
// If a labeler has never been initialized then it was off from the
// start, it should not be called to draw on the UI
foreach (var labeler in m_Labelers.Where(labeler => labeler.isInitialized))
{
labeler.VisualizeUI();
GUILayout.Space(4);
m_AsyncCaptureInfos.Add(captureInfo);
GUILayout.EndArea();
}
/// <summary>
/// Convert the Unity 4x4 projection matrix to a 3x3 matrix
/// </summary>
// ReSharper disable once InconsistentNaming
static float3x3 ToProjectionMatrix3x3(Matrix4x4 inMatrix)
{
return new float3x3(
inMatrix[0,0], inMatrix[0,1], inMatrix[0,2],
inMatrix[1,0], inMatrix[1,1], inMatrix[1,2],
inMatrix[2,0],inMatrix[2,1], inMatrix[2,2]);
Profiler.BeginSample("CaptureDataFromLastFrame");
Profiler.BeginSample("CaptureDataFromLastFrame");
// Record the camera's projection matrix
SetPersistentSensorData("camera_intrinsic", ToProjectionMatrix3x3(cam.projectionMatrix));
var captureFilename = $"{Manager.Instance.GetDirectoryFor(rgbDirectory)}/{k_RgbFilePrefix}{Time.frameCount}.png";
var dxRootPath = $"{rgbDirectory}/{k_RgbFilePrefix}{Time.frameCount}.png";
SensorHandle.ReportCapture(dxRootPath, SensorSpatialData.FromGameObjects(
m_EgoMarker == null ? null : m_EgoMarker.gameObject, gameObject),
m_PersistentSensorData.Select(kvp => (kvp.Key, kvp.Value)).ToArray());
var captureFilename = Path.Combine(Manager.Instance.GetDirectoryFor(RgbDirectory), $"{s_RgbFilePrefix}{Time.frameCount}.png");
var dxRootPath = Path.Combine(RgbDirectory, $"{s_RgbFilePrefix}{Time.frameCount}.png");
SensorHandle.ReportCapture(dxRootPath, SensorSpatialData.FromGameObjects(m_EgoMarker == null ? null : m_EgoMarker.gameObject, gameObject), m_PersistentSensorData.Select(kvp => (kvp.Key, kvp.Value)).ToArray());
Func<AsyncRequest<CaptureCamera.CaptureState>, AsyncRequest.Result> colorFunctor;
Func<AsyncRequest<CaptureCamera.CaptureState>, AsyncRequest.Result> colorFunctor = null;
var flipY = ShouldFlipY(cam);
colorFunctor = r =>
{

if (flipY)
FlipImageY(dataColorBuffer, height);
encodedData = ImageConversion.EncodeArrayToPNG(
dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height);
encodedData = ImageConversion.EncodeArrayToPNG(dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height);
return !FileProducer.Write(captureFilename, encodedData)
? AsyncRequest.Result.Error
: AsyncRequest.Result.Completed;
return !FileProducer.Write(captureFilename, encodedData) ? AsyncRequest.Result.Error : AsyncRequest.Result.Completed;
#if SIMULATION_CAPTURE_0_0_10_PREVIEW_16_OR_NEWER
CaptureCamera.Capture(cam, colorFunctor, forceFlip: ForceFlip.None);
#else
CaptureCamera.Capture(cam, colorFunctor, flipY: flipY);
#endif
CaptureCamera.Capture(cam, colorFunctor);
void OnSimulationEnding()
// ReSharper disable once ParameterHidesMember
bool ShouldFlipY(Camera camera)
CleanUpInstanceSegmentation();
foreach (var labeler in m_Labelers)
{
if (labeler.isInitialized)
labeler.InternalCleanup();
}
}
#if HDRP_PRESENT
var hdAdditionalCameraData = GetComponent<HDAdditionalCameraData>();
void OnBeginCameraRendering(ScriptableRenderContext scriptableRenderContext, Camera cam)
{
if (!ShouldCallLabelers(cam, m_LastFrameCaptured))
return;
m_LastFrameCaptured = Time.frameCount;
CaptureRgbData(cam);
CallOnLabelers(l => l.InternalOnBeginRendering(scriptableRenderContext));
//Based on logic in HDRenderPipeline.PrepareFinalBlitParameters
return camera.targetTexture != null || hdAdditionalCameraData.flipYMode == HDAdditionalCameraData.FlipYMode.ForceFlipY || camera.cameraType == CameraType.Game;
#elif URP_PRESENT
return (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) &&
(camera.targetTexture != null || camera.cameraType == CameraType.Game);
#else
return false;
#endif
void OnEndFrameRendering(ScriptableRenderContext scriptableRenderContext, Camera[] cameras)
static unsafe void FlipImageY(byte[] dataColorBuffer, int height)
bool anyCamera = false;
foreach (var cam in cameras)
using (s_FlipY.Auto())
if (ShouldCallLabelers(cam, m_LastFrameEndRendering))
var stride = dataColorBuffer.Length / height;
var buffer = new NativeArray<byte>(stride, Allocator.TempJob, NativeArrayOptions.UninitializedMemory);
fixed(byte* colorBufferPtr = &dataColorBuffer[0])
anyCamera = true;
break;
var unsafePtr = (byte*)buffer.GetUnsafePtr();
for (var row = 0; row < height / 2; row++)
{
var nearRowStartPtr = colorBufferPtr + stride * row;
var oppositeRowStartPtr = colorBufferPtr + stride * (height - row - 1);
UnsafeUtility.MemCpy(unsafePtr, oppositeRowStartPtr, stride);
UnsafeUtility.MemCpy(oppositeRowStartPtr, nearRowStartPtr, stride);
UnsafeUtility.MemCpy(nearRowStartPtr, unsafePtr, stride);
}
buffer.Dispose();
if (!anyCamera)
return;
m_LastFrameEndRendering = Time.frameCount;
CallOnLabelers(l => l.InternalOnEndRendering(scriptableRenderContext));
CaptureInstanceSegmentation(scriptableRenderContext);
void CallOnLabelers(Action<CameraLabeler> action)
void OnSimulationEnding()
foreach (var labeler in m_Labelers)
{
if (!labeler.enabled)
continue;
m_ClassLabelingTextureReader?.WaitForAllImages();
m_ClassLabelingTextureReader?.Dispose();
m_ClassLabelingTextureReader = null;
if (!labeler.isInitialized)
labeler.Init(this);
m_SegmentationReader?.WaitForAllImages();
m_SegmentationReader?.Dispose();
m_SegmentationReader = null;
action(labeler);
}
RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;
bool ShouldCallLabelers(Camera cam, int lastFrameCalledThisCallback)
void OnBeginCameraRendering(ScriptableRenderContext _, Camera cam)
if (cam != attachedCamera)
return false;
if (!SensorHandle.ShouldCaptureThisFrame)
return false;
// There are cases when OnBeginCameraRendering is called multiple times in the same frame.
// Ignore the subsequent calls.
if (lastFrameCalledThisCallback == Time.frameCount)
return false;
return true;
if (cam != GetComponent<Camera>())
return;
#if UNITY_EDITOR
if (UnityEditor.EditorApplication.isPaused)
return;
#endif
ReportAsyncAnnotations();
CaptureRgbData(cam);
void CleanupVisualization()
void OnDisable()
if (s_VisualizedPerceptionCamera == this)
SimulationManager.SimulationEnding -= OnSimulationEnding;
OnSimulationEnding();
m_ClassLabelingTextureReader?.Dispose();
m_ClassLabelingTextureReader = null;
if (segmentationTexture != null)
segmentationTexture.Release();
segmentationTexture = null;
if (labelingTexture != null)
labelingTexture.Release();
if (m_RenderedObjectInfoGenerator != null)
s_VisualizedPerceptionCamera = null;
World.DefaultGameObjectInjectionWorld?.GetExistingSystem<GroundTruthLabelSetupSystem>()?.Deactivate(m_RenderedObjectInfoGenerator);
m_RenderedObjectInfoGenerator?.Dispose();
m_RenderedObjectInfoGenerator = null;
if (SensorHandle.IsValid)
SensorHandle.Dispose();
SensorHandle = default;
labelingTexture = null;
#if URP_PRESENT
internal void MarkGroundTruthRendererFeatureAsPresent()
void OnSemanticSegmentationImageRead(int frameCount, NativeArray<short> data)
// only used to confirm that GroundTruthRendererFeature is present in URP
m_IsGroundTruthRendererFeaturePresent = true;
var findResult = FindAsyncCaptureInfo(frameCount);
var asyncCaptureInfo = findResult.asyncCaptureInfo;
var dxLocalPath = Path.Combine(k_SemanticSegmentationDirectory, k_SegmentationFilePrefix) + frameCount + ".png";
var path = Path.Combine(Manager.Instance.GetDirectoryFor(k_SemanticSegmentationDirectory), k_SegmentationFilePrefix) + frameCount + ".png";
var annotation = asyncCaptureInfo.SegmentationAsyncAnnotation;
if (!annotation.IsValid)
return;
annotation.ReportFile(dxLocalPath);
var asyncRequest = Manager.Instance.CreateRequest<AsyncRequest<AsyncSemanticSegmentationWrite>>();
asyncRequest.data = new AsyncSemanticSegmentationWrite()
{
dataArray = data.ToArray(),
width = labelingTexture.width,
height = labelingTexture.height,
path = path
};
asyncRequest.Start((r) =>
{
Profiler.EndSample();
Profiler.BeginSample("Encode");
var pngBytes = ImageConversion.EncodeArrayToPNG(r.data.dataArray, GraphicsFormat.R8G8B8A8_UNorm, (uint)r.data.width, (uint)r.data.height);
Profiler.EndSample();
Profiler.BeginSample("WritePng");
File.WriteAllBytes(r.data.path, pngBytes);
Manager.Instance.ConsumerFileProduced(r.data.path);
Profiler.EndSample();
return AsyncRequest.Result.Completed;
});
#endif
}
}

210
com.unity.perception/Runtime/GroundTruth/SimulationState.cs


using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using Newtonsoft.Json;
using Unity.Collections;
using Unity.Simulation;
// ReSharper disable NotAccessedField.Local
// ReSharper disable NonReadonlyMemberInGetHashCode
public string OutputDirectory { get; }
HashSet<SensorHandle> m_ActiveSensors = new HashSet<SensorHandle>();
Dictionary<SensorHandle, SensorData> m_Sensors = new Dictionary<SensorHandle, SensorData>();
HashSet<EgoHandle> m_Egos = new HashSet<EgoHandle>();

// Always use the property SequenceTimeMs instead
//Always use the property SequenceTimeMs instead
int m_FrameCountLastUpdatedSequenceTime;
float m_SequenceTimeDoNotUse;
float m_UnscaledSequenceTimeDoNotUse;

CustomSampler m_SerializeMetricsAsyncSampler = CustomSampler.Create("SerializeMetricsAsync");
CustomSampler m_GetOrCreatePendingCaptureForThisFrameSampler = CustomSampler.Create("GetOrCreatePendingCaptureForThisFrame");
float m_LastTimeScale;
readonly string m_OutputDirectoryName;
string m_OutputDirectoryPath;
public const string userBaseDirectoryKey = "userBaseDirectory";
public const string latestOutputDirectoryKey = "latestOutputDirectory";
public const string defaultOutputBaseDirectory = "defaultOutputBaseDirectory";
public string OutputDirectory
{
get
{
if (m_OutputDirectoryPath == null)
m_OutputDirectoryPath = Manager.Instance.GetDirectoryFor(m_OutputDirectoryName);
return m_OutputDirectoryPath;
}
}
const float k_SimulationTimingAccuracy = 0.01f;
const float k_IncludeInFrameThreshold = .01f;
const int k_MaxDeltaTime = 10;
PlayerPrefs.SetString(defaultOutputBaseDirectory, Configuration.Instance.GetStorageBasePath());
m_OutputDirectoryName = outputDirectory;
var basePath = PlayerPrefs.GetString(userBaseDirectoryKey, string.Empty);
if (basePath != string.Empty)
{
if (Directory.Exists(basePath))
{
Configuration.localPersistentDataPath = basePath;
}
else
{
Debug.LogWarning($"Passed in directory to store simulation artifacts: {basePath}, does not exist. Using default directory {Configuration.localPersistentDataPath} instead.");
basePath = Configuration.localPersistentDataPath;
}
}
PlayerPrefs.SetString(latestOutputDirectoryKey, Manager.Instance.GetDirectoryFor("", basePath));
OutputDirectory = outputDirectory;
IsRunning = true;
}

Values = values;
}
// ReSharper disable NotAccessedField.Local
public readonly SensorHandle SensorHandle;
public readonly SensorHandle SensorHandle;
public readonly Guid CaptureId;
public readonly Annotation Annotation;
public readonly Guid SequenceId;

{
public string modality;
public string description;
public float period;
public CaptureTriggerMode captureTriggerMode;
public float renderingDeltaTime;
public int framesBetweenCaptures;
public bool manualSensorAffectSimulationTiming;
public float sequenceTimeOfNextCapture;
public float sequenceTimeOfNextRender;
public float sequenceTimeNextCapture;
public int lastCaptureFrameCount;
public EgoHandle egoHandle;
}

{
unchecked
{
// ReSharper disable NonReadonlyMemberInGetHashCode
var hashCode = (name != null ? StringComparer.InvariantCulture.GetHashCode(name) : 0);
hashCode = (hashCode * 397) ^ (description != null ? StringComparer.InvariantCulture.GetHashCode(description) : 0);
hashCode = (hashCode * 397) ^ (format != null ? StringComparer.InvariantCulture.GetHashCode(format) : 0);

}
}
public string GetOutputDirectoryNoCreate() => Path.Combine(Configuration.Instance.GetStoragePath(), m_OutputDirectoryName);
void EnsureSequenceTimingsUpdated()
{
if (!m_HasStarted)

foreach (var kvp in m_Sensors.ToArray())
{
var sensorData = kvp.Value;
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = 0;
sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
m_Sensors[kvp.Key] = sensorData;
}

m_LastTimeScale = Time.timeScale;
}
public void AddSensor(EgoHandle egoHandle, string modality, string description, float firstCaptureFrame, CaptureTriggerMode captureTriggerMode, float renderingDeltaTime, int framesBetweenCaptures, bool manualSensorAffectSimulationTiming, SensorHandle sensor)
public void AddSensor(EgoHandle egoHandle, string modality, string description, float period, float firstCaptureTime, SensorHandle sensor)
firstCaptureTime = UnscaledSequenceTime + firstCaptureFrame * renderingDeltaTime,
captureTriggerMode = captureTriggerMode,
renderingDeltaTime = renderingDeltaTime,
framesBetweenCaptures = framesBetweenCaptures,
manualSensorAffectSimulationTiming = manualSensorAffectSimulationTiming,
period = period,
firstCaptureTime = firstCaptureTime,
sensorData.sequenceTimeOfNextCapture = GetSequenceTimeOfNextCapture(sensorData);
sensorData.sequenceTimeOfNextRender = UnscaledSequenceTime;
sensorData.sequenceTimeNextCapture = SequenceTimeOfNextCapture(sensorData);
float GetSequenceTimeOfNextCapture(SensorData sensorData)
float SequenceTimeOfNextCapture(SensorData sensorData)
// If the first capture hasn't happened yet, sequenceTimeNextCapture field won't be valid
{
return sensorData.captureTriggerMode == CaptureTriggerMode.Scheduled? sensorData.firstCaptureTime : float.MaxValue;
}
return sensorData.firstCaptureTime;
return sensorData.sequenceTimeOfNextCapture;
return sensorData.period - (UnscaledSequenceTime - sensorData.firstCaptureTime) % sensorData.period;
}
public bool Contains(Guid id) => m_Ids.Contains(id);

m_ActiveSensors.Add(sensorHandle);
}
static void CheckDatasetAllowed()
void CheckDatasetAllowed()
{
if (!Application.isPlaying)
{

m_HasStarted = true;
}
EnsureSequenceTimingsUpdated();
#if UNITY_EDITOR
if (UnityEditor.EditorApplication.isPaused)
{
//When the user clicks the 'step' button in the editor, frames will always progress at .02 seconds per step.
//In this case, just run all sensors each frame to allow for debugging
Debug.Log($"Frame step forced all sensors to synchronize, changing frame timings.");
sensorData.sequenceTimeOfNextRender = UnscaledSequenceTime;
sensorData.sequenceTimeOfNextCapture = UnscaledSequenceTime;
}
#endif
if (Mathf.Abs(sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime) < k_SimulationTimingAccuracy)
{
//means this frame fulfills this sensor's simulation time requirements, we can move target to next frame.
sensorData.sequenceTimeOfNextRender += sensorData.renderingDeltaTime;
}
if (!activeSensor.ShouldCaptureThisFrame)
continue;
if (activeSensor.ShouldCaptureThisFrame)
//Just in case we get in a situation where we are so far beyond sequenceTimeNextCapture that incrementing next time by the period still doesn't get us to a time past "now"
do
if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Scheduled))
{
sensorData.sequenceTimeOfNextCapture += sensorData.renderingDeltaTime * (sensorData.framesBetweenCaptures + 1);
Debug.Assert(sensorData.sequenceTimeOfNextCapture > UnscaledSequenceTime,
$"Next scheduled capture should be after {UnscaledSequenceTime} but is {sensorData.sequenceTimeOfNextCapture}");
while (sensorData.sequenceTimeOfNextCapture <= UnscaledSequenceTime)
sensorData.sequenceTimeOfNextCapture += sensorData.renderingDeltaTime * (sensorData.framesBetweenCaptures + 1);
}
else if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Manual))
{
sensorData.sequenceTimeOfNextCapture = float.MaxValue;
}
sensorData.lastCaptureFrameCount = Time.frameCount;
sensorData.sequenceTimeNextCapture += sensorData.period;
while (sensorData.sequenceTimeNextCapture <= UnscaledSequenceTime);
sensorData.lastCaptureFrameCount = Time.frameCount;
var nextFrameDt = float.PositiveInfinity;
float nextFrameDt = k_MaxDeltaTime;
float thisSensorNextFrameDt = -1;
if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Scheduled))
{
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to capture in the past but got skipped over.");
}
else if (sensorData.captureTriggerMode.Equals(CaptureTriggerMode.Manual) && sensorData.manualSensorAffectSimulationTiming)
{
thisSensorNextFrameDt = sensorData.sequenceTimeOfNextRender - UnscaledSequenceTime;
}
var thisSensorNextFrameDt = sensorData.sequenceTimeNextCapture - UnscaledSequenceTime;
Debug.Assert(thisSensorNextFrameDt > 0f, "Sensor was scheduled to run in the past but got skipped over.");
{
}
}
if (float.IsPositiveInfinity(nextFrameDt))
{
//means no sensor is controlling simulation timing, so we set Time.captureDeltaTime to 0 (default) which means the setting does not do anything
nextFrameDt = 0;
}
WritePendingCaptures();

}
public void SetNextCaptureTimeToNowForSensor(SensorHandle sensorHandle)
{
if (!m_Sensors.ContainsKey(sensorHandle))
return;
var data = m_Sensors[sensorHandle];
data.sequenceTimeOfNextCapture = UnscaledSequenceTime;
m_Sensors[sensorHandle] = data;
}
if (!m_Sensors.ContainsKey(sensorHandle))
return false;
return data.sequenceTimeOfNextCapture - UnscaledSequenceTime < k_SimulationTimingAccuracy;
return (data.sequenceTimeNextCapture - UnscaledSequenceTime) < k_IncludeInFrameThreshold;
}
public void End()

WritePendingCaptures(true, true);
WritePendingCaptures(true, writeCapturesFromThisFrame: true);
if (m_PendingCaptures.Count > 0)
Debug.LogError($"Simulation ended with pending annotations: {string.Join(", ", m_PendingCaptures.Select(c => $"id:{c.SensorHandle.Id} frame:{c.FrameCount}"))}");

return new AsyncAnnotation(ReportAnnotationFile(annotationDefinition, sensorHandle, null), this);
}
public void ReportAsyncAnnotationResult<T>(AsyncAnnotation asyncAnnotation, string filename = null, NativeSlice<T> values = default) where T : struct
{
var jArray = new JArray();
foreach (var value in values)
jArray.Add(new JRaw(DatasetJsonUtility.ToJToken(value)));
ReportAsyncAnnotationResult(asyncAnnotation, filename, jArray);
}
public void ReportAsyncAnnotationResult<T>(AsyncAnnotation asyncAnnotation, string filename = null, IEnumerable<T> values = null)
{
JArray jArray = null;
if (values != null)
{
jArray = new JArray();
foreach (var value in values)
{
if (value != null)
jArray.Add(new JRaw(DatasetJsonUtility.ToJToken(value)));
}
}
ReportAsyncAnnotationResult(asyncAnnotation, filename, jArray);
}
void ReportAsyncAnnotationResult(AsyncAnnotation asyncAnnotation, string filename, JArray jArray)
public void ReportAsyncAnnotationResult<T>(AsyncAnnotation asyncAnnotation, string filename = null, T[] values = null)
var annotationIndex = -1;
annotationIndex = pendingCapture.Annotations.FindIndex(a => a.Item1.Equals(asyncAnnotation.Annotation));
if (annotationIndex != -1)
break;
break;
Debug.Assert(pendingCapture != null && annotationIndex != -1);
Debug.Assert(pendingCapture != null);
var annotationIndex = pendingCapture.Annotations.FindIndex(a => a.Item1.Equals(asyncAnnotation.Annotation));
annotationData.ValuesJson = jArray;
annotationData.ValuesJson = values == null ? null : JArray.FromObject(values);
annotationTuple.Item2 = annotationData;
pendingCapture.Annotations[annotationIndex] = annotationTuple;

18
com.unity.perception/Runtime/GroundTruth/SimulationState_Json.cs


public void WriteReferences()
{
var egoReference = new JObject();
egoReference["version"] = DatasetCapture.SchemaVersion;
egoReference["version"] = SimulationManager.SchemaVersion;
egoReference["egos"] = new JArray(m_Egos.Select(e =>
{
var egoObj = new JObject();

WriteJObjectToFile(egoReference, "egos.json");
var sensorReferenceDoc = new JObject();
sensorReferenceDoc["version"] = DatasetCapture.SchemaVersion;
sensorReferenceDoc["version"] = SimulationManager.SchemaVersion;
sensorReferenceDoc["sensors"] = new JArray(m_Sensors.Select(kvp =>
{
var sensorReference = new JObject();

if (annotationDefinitionsJArray.Count > 0)
{
var annotationDefinitionsJObject = new JObject();
annotationDefinitionsJObject.Add("version", DatasetCapture.SchemaVersion);
annotationDefinitionsJObject.Add("version", SimulationManager.SchemaVersion);
annotationDefinitionsJObject.Add("annotation_definitions", annotationDefinitionsJArray);
WriteJObjectToFile(annotationDefinitionsJObject, "annotation_definitions.json");
}

var metricDefinitionsJObject = new JObject();
metricDefinitionsJObject.Add("version", DatasetCapture.SchemaVersion);
metricDefinitionsJObject.Add("version", SimulationManager.SchemaVersion);
metricDefinitionsJObject.Add("metric_definitions", metricDefinitionsJArray);
WriteJObjectToFile(metricDefinitionsJObject, "metric_definitions.json");
}

capturesJArray.Add(JObjectFromPendingCapture(pendingCapture));
var capturesJObject = new JObject();
capturesJObject.Add("version", DatasetCapture.SchemaVersion);
capturesJObject.Add("version", SimulationManager.SchemaVersion);
capturesJObject.Add("captures", capturesJArray);
simulationState.WriteJObjectToFile(capturesJObject,

PendingCaptures = pendingCapturesToWrite,
SimulationState = this
};
req.Enqueue(r =>
req.Start(r =>
req.Execute(AsyncRequest.ExecutionContext.JobSystem);
}
m_SerializeCapturesSampler.End();

jArray.Add(JObjectFromPendingMetric(pendingMetric));
var metricsJObject = new JObject();
metricsJObject.Add("version", DatasetCapture.SchemaVersion);
metricsJObject.Add("version", SimulationManager.SchemaVersion);
metricsJObject.Add("metrics", jArray);
WriteJObjectToFile(metricsJObject, $"metrics_{metricsFileIndex:000}.json");

MetricFileIndex = m_MetricsFileIndex,
PendingMetrics = pendingMetricsToWrite
};
req.Enqueue(r =>
req.Start(r =>
req.Execute();
}
m_MetricsFileIndex++;

6
com.unity.perception/Tests/Editor/BuildPerceptionPlayer.cs


[Test]
public void BuildPlayerStandaloneWindows64()
{
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneWindows64, m_BuildPath, BuildOptions.None, out _, out m_Summary);
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneWindows64, m_BuildPath, BuildOptions.IncludeTestAssemblies, out _, out m_Summary);
Assert.AreEqual(BuildResult.Succeeded, m_Summary.result, " BuildTarget.StandaloneWindows64 failed to build");
}

{
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneLinux64, m_BuildPath, BuildOptions.None, out _, out m_Summary);
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneLinux64, m_BuildPath, BuildOptions.IncludeTestAssemblies, out _, out m_Summary);
Assert.AreEqual(BuildResult.Succeeded, m_Summary.result, "BuildTarget.StandaloneLinux64 failed to build");
}

public void BuildPlayerOSX()
{
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneOSX, m_BuildPath, BuildOptions.None, out _, out m_Summary);
BuildPlayer(BuildTargetGroup.Standalone, BuildTarget.StandaloneOSX, m_BuildPath, BuildOptions.IncludeTestAssemblies, out _, out m_Summary);
Assert.AreEqual(BuildResult.Succeeded, m_Summary.result, "BuildTarget.StandaloneLinux64 failed to build");
}

16
com.unity.perception/Tests/Editor/Unity.Perception.Editor.Tests.asmdef


"Unity.Collections",
"Unity.Entities",
"Unity.Simulation.Core",
"Unity.RenderPipelines.HighDefinition.Runtime",
"Unity.RenderPipelines.Universal.Runtime"
"Unity.RenderPipelines.HighDefinition.Runtime"
],
"includePlatforms": [
"Editor"

"overrideReferences": true,
"precompiledReferences": [
"nunit.framework.dll",
"Moq.dll"
"nunit.framework.dll"
],
"autoReferenced": false,
"defineConstraints": [

"name": "com.unity.render-pipelines.high-definition",
"expression": "",
"define": "HDRP_PRESENT"
},
{
"name": "com.unity.render-pipelines.universal",
"expression": "",
"define": "URP_PRESENT"
},
{
"name": "nuget.moq",
"expression": "",
"define": "MOQ_PRESENT"
}
],
"noEngineReferences": false

175
com.unity.perception/Tests/Editor/PerceptionCameraEditorTests.cs


using UnityEngine.Perception.GroundTruth;
using UnityEngine.SceneManagement;
using UnityEngine.TestTools;
#if MOQ_PRESENT
using Moq;
using Moq.Protected;
using UnityEngine.Rendering;
#endif
namespace EditorTests
{

[UnityTest]
public IEnumerator EditorPause_DoesNotLogErrors()
{
ResetScene();
var cameraObject = SetupCamera(p =>
int sceneCount = SceneManager.sceneCount;
for (int i = sceneCount - 1; i >= 0; i--)
var idLabelConfig = ScriptableObject.CreateInstance<IdLabelConfig>();
p.captureRgbImages = true;
p.AddLabeler(new BoundingBox2DLabeler(idLabelConfig));
p.AddLabeler(new RenderedObjectInfoLabeler(idLabelConfig));
});
cameraObject.name = "Camera";
EditorSceneManager.CloseScene(SceneManager.GetSceneAt(i), true);
}
EditorSceneManager.NewScene(NewSceneSetup.EmptyScene);
SetupCamera(ScriptableObject.CreateInstance<LabelingConfiguration>());
yield return new EnterPlayMode();
var expectedFirstFrame = Time.frameCount;
yield return null;

var expectedLastFrame = Time.frameCount;
yield return null;
DatasetCapture.ResetSimulation();
SimulationManager.ResetSimulation();
var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesPath = Path.Combine(SimulationManager.OutputDirectory, "captures_000.json");
var imagePath = $"{GameObject.Find("Camera").GetComponent<PerceptionCamera>().rgbDirectory}/rgb_{iFrameCount}";
var imagePath = Path.Combine(PerceptionCamera.RgbDirectory, $"rgb_{iFrameCount}").Replace(@"\", @"\\");
StringAssert.Contains(imagePath, capturesJson);
}

static void ResetScene()
{
int sceneCount = SceneManager.sceneCount;
for (int i = sceneCount - 1; i >= 0; i--)
{
EditorSceneManager.CloseScene(SceneManager.GetSceneAt(i), true);
}
EditorSceneManager.NewScene(NewSceneSetup.EmptyScene);
}
#if MOQ_PRESENT
[UnityTest]
public IEnumerator AddLabelerAfterStart_ShouldInitialize()
{
ResetScene();
yield return new EnterPlayMode();
var camera = SetupCamera(null);
var mockLabeler = new Mock<CameraLabeler>();
yield return null;
camera.GetComponent<PerceptionCamera>().AddLabeler(mockLabeler.Object);
yield return null;
mockLabeler.Protected().Verify("Setup", Times.Once());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator Labeler_ShouldRunCallbacksInFirstFrame()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var camera = SetupCamera(null);
camera.GetComponent<PerceptionCamera>().AddLabeler(mockLabeler.Object);
yield return null;
mockLabeler.Protected().Verify("Setup", Times.Once());
mockLabeler.Protected().Verify("OnUpdate", Times.Once());
mockLabeler.Protected().Verify("OnBeginRendering", Times.Once(), ItExpr.IsAny<ScriptableRenderContext>());
mockLabeler.Protected().Verify("OnEndRendering", Times.Once(), ItExpr.IsAny<ScriptableRenderContext>());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator Labeler_ShouldNotRunCallbacksWhenCameraDisabled()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var camera = SetupCamera(null);
var perceptionCamera = camera.GetComponent<PerceptionCamera>();
perceptionCamera.AddLabeler(mockLabeler.Object);
yield return null;
perceptionCamera.enabled = false;
yield return null;
mockLabeler.Protected().Verify("Setup", Times.Once());
mockLabeler.Protected().Verify("OnUpdate", Times.Once());
mockLabeler.Protected().Verify("OnBeginRendering", Times.Once(), ItExpr.IsAny<ScriptableRenderContext>());
mockLabeler.Protected().Verify("OnEndRendering", Times.Once(), ItExpr.IsAny<ScriptableRenderContext>());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator AddAndRemoveLabelerInSameFrame_ShouldDoNothing()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var cameraObject = SetupCamera(null);
var perceptionCamera = cameraObject.GetComponent<PerceptionCamera>();
perceptionCamera.AddLabeler(mockLabeler.Object);
perceptionCamera.RemoveLabeler(mockLabeler.Object);
yield return null;
mockLabeler.Protected().Verify("Setup", Times.Never());
mockLabeler.Protected().Verify("OnUpdate", Times.Never());
mockLabeler.Protected().Verify("OnBeginRendering", Times.Never(), It.IsAny<ScriptableRenderContext>());
mockLabeler.Protected().Verify("OnEndRendering", Times.Never(), It.IsAny<ScriptableRenderContext>());
mockLabeler.Protected().Verify("Cleanup", Times.Never());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator RemoveLabeler_ShouldCallCleanup()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var cameraObject = SetupCamera(null);
var perceptionCamera = cameraObject.GetComponent<PerceptionCamera>();
perceptionCamera.AddLabeler(mockLabeler.Object);
yield return null;
Assert.IsTrue(perceptionCamera.RemoveLabeler(mockLabeler.Object));
mockLabeler.Protected().Verify("Cleanup", Times.Once());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator RemoveLabeler_OnLabelerNotAdded_ShouldNotCallCleanup()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var cameraObject = SetupCamera(null);
var perceptionCamera = cameraObject.GetComponent<PerceptionCamera>();
yield return null;
Assert.IsFalse(perceptionCamera.RemoveLabeler(mockLabeler.Object));
mockLabeler.Protected().Verify("Cleanup", Times.Never());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator DestroyPerceptionCameraObject_ShouldCallCleanup()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
var cameraObject = SetupCamera(null);
var perceptionCamera = cameraObject.GetComponent<PerceptionCamera>();
perceptionCamera.AddLabeler(mockLabeler.Object);
yield return null;
UnityEngine.Object.DestroyImmediate(cameraObject);
mockLabeler.Protected().Verify("Cleanup", Times.Once());
yield return new ExitPlayMode();
}
[UnityTest]
public IEnumerator SetupThrows_ShouldDisable()
{
ResetScene();
yield return new EnterPlayMode();
var mockLabeler = new Mock<CameraLabeler>();
mockLabeler.Protected().Setup("Setup").Throws<InvalidOperationException>();
var labeler = mockLabeler.Object;
var camera = SetupCamera(null);
camera.GetComponent<PerceptionCamera>().AddLabeler(labeler);
LogAssert.Expect(LogType.Exception, "InvalidOperationException: Operation is not valid due to the current state of the object.");
yield return null;
mockLabeler.Protected().Verify("Setup", Times.Once());
mockLabeler.Protected().Verify("OnUpdate", Times.Never());
mockLabeler.Protected().Verify("OnBeginRendering", Times.Never(), It.IsAny<ScriptableRenderContext>());
mockLabeler.Protected().Verify("OnEndRendering", Times.Never(), It.IsAny<ScriptableRenderContext>());
Assert.IsFalse(labeler.enabled);
yield return new ExitPlayMode();
}
#endif
static GameObject SetupCamera(Action<PerceptionCamera> initPerceptionCameraCallback)
static void SetupCamera(LabelingConfiguration labelingConfiguration)
{
var cameraObject = new GameObject();
cameraObject.SetActive(false);

#endif
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
initPerceptionCameraCallback?.Invoke(perceptionCamera);
perceptionCamera.LabelingConfiguration = labelingConfiguration;
perceptionCamera.captureRgbImages = true;
perceptionCamera.produceBoundingBoxAnnotations = true;
perceptionCamera.produceObjectCountAnnotations = true;
return cameraObject;
}
}
}

46
com.unity.perception/Tests/Editor/SimulationManagerEditorTests.cs


using System;
using System.Collections;
using System.IO;
using UnityEditor;
using UnityEngine;
using UnityEngine.TestTools;
namespace GroundTruthTests
namespace GroundTruth
[TestFixture]
[Serializable]
public class DatasetCaptureEditorTests
public class SimulationManagerEditorTests
[SerializeField]
string expectedDatasetPath;
Assert.Throws<InvalidOperationException>(() => DatasetCapture.RegisterEgo(""));
Assert.Throws<InvalidOperationException>(() => SimulationManager.RegisterEgo(""));
Assert.Throws<InvalidOperationException>(() => DatasetCapture.RegisterAnnotationDefinition(""));
Assert.Throws<InvalidOperationException>(() => SimulationManager.RegisterAnnotationDefinition(""));
Assert.Throws<InvalidOperationException>(() => DatasetCapture.RegisterMetricDefinition(""));
}
[UnityTest]
public IEnumerator SimpleData_GeneratesFullDataset_OnExitPlaymode()
{
yield return new EnterPlayMode();
DatasetCapture.ResetSimulation();
var ego = DatasetCapture.RegisterEgo("ego");
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0, CaptureTriggerMode.Scheduled, 0.1f, 0);
sensor.ReportCapture("file.txt", new SensorSpatialData());
expectedDatasetPath = DatasetCapture.OutputDirectory;
yield return new ExitPlayMode();
FileAssert.Exists(Path.Combine(expectedDatasetPath, "sensors.json"));
}
[UnityTest]
public IEnumerator StepFunction_OverridesSimulationDeltaTime_AndRunsSensors()
{
yield return new EnterPlayMode();
DatasetCapture.ResetSimulation();
var ego = DatasetCapture.RegisterEgo("ego");
var sensor = DatasetCapture.RegisterSensor(ego, "camera", "", 0, CaptureTriggerMode.Scheduled, 2f, 0);
yield return null;
var timeBeforeStep = Time.time;
EditorApplication.isPaused = true;
EditorApplication.Step();
Assert.True(Time.time - timeBeforeStep < .3f);
Assert.True(sensor.ShouldCaptureThisFrame);
yield return new ExitPlayMode();
Assert.Throws<InvalidOperationException>(() => SimulationManager.RegisterMetricDefinition(""));
}
}
}

2
com.unity.perception/Tests/Runtime/Unity.Perception.Runtime.Tests.asmdef


}
],
"noEngineReferences": false
}
}

11
com.unity.perception/Tests/Runtime/GroundTruthTests/DatasetJsonUtilityTests.cs


using System.IO;
using System.Text;
using Newtonsoft.Json;
using NUnit.Framework;
using Unity.Mathematics;
using UnityEngine;

public void Vector3ToJToken_ReturnsArrayFormat(float x, float y, float z, string jsonExpected)
{
var jsonActual = DatasetJsonUtility.ToJToken(new Vector3(x, y, z));
Assert.AreEqual(TestHelper.NormalizeJson(jsonExpected), TestHelper.NormalizeJson(jsonActual.ToString()));
Assert.AreEqual(jsonExpected, jsonActual.ToString());
}
[Test]

public void QuaternionToJToken_ReturnsArrayFormat(float x, float y, float z, float w, string jsonExpected)
{
var jsonActual = DatasetJsonUtility.ToJToken(new Quaternion(x, y, z, w)).ToString();
Assert.AreEqual(TestHelper.NormalizeJson(jsonExpected), TestHelper.NormalizeJson(jsonActual));
Assert.AreEqual(jsonExpected, jsonActual);
}
[Test]

public void Float3x3ToJToken_ReturnsArrayFormat(float m00, float m01, float m02, float m10, float m11, float m12, float m20, float m21, float m22, string jsonExpected)
{
var jsonActual = DatasetJsonUtility.ToJToken(new float3x3(m00, m01, m02, m10, m11, m12, m20, m21, m22)).ToString();
Assert.AreEqual(TestHelper.NormalizeJson(jsonExpected), TestHelper.NormalizeJson(jsonActual));
Assert.AreEqual(jsonExpected, jsonActual);
}
[TestCase(1, "1")]

public void Primitive_ReturnsValue(object o, string jsonExpected)
{
var jsonActual = DatasetJsonUtility.ToJToken(o).ToString();
Assert.AreEqual(TestHelper.NormalizeJson(jsonExpected), TestHelper.NormalizeJson(jsonActual));
Assert.AreEqual(jsonExpected, jsonActual);
}
}
}

36
com.unity.perception/Tests/Runtime/GroundTruthTests/TestHelper.cs


using System;
using System.Diagnostics;
using System.Text.RegularExpressions;
using Unity.Collections;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Perception.GroundTruth;
namespace GroundTruthTests

return planeObject;
}
public static GameObject CreateLabeledCube(float scale = 10, string label = "label", float x = 0, float y = 0, float z = 0, float roll = 0, float pitch = 0, float yaw = 0)
{
var cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
cube.transform.SetPositionAndRotation(new Vector3(x, y, z), Quaternion.Euler(pitch, yaw, roll));
cube.transform.localScale = new Vector3(scale, scale, scale);
var labeling = cube.AddComponent<Labeling>();
labeling.labels.Add(label);
return cube;
}
public static void ReadRenderTextureRawData<T>(RenderTexture renderTexture, Action<NativeArray<T>> callback) where T : struct
{
RenderTexture.active = renderTexture;
var cpuTexture = new Texture2D(renderTexture.width, renderTexture.height, renderTexture.graphicsFormat, TextureCreationFlags.None);
cpuTexture.ReadPixels(new Rect(
Vector2.zero,
new Vector2(renderTexture.width, renderTexture.height)),
0, 0);
RenderTexture.active = null;
var data = cpuTexture.GetRawTextureData<T>();
callback(data);
}
#if UNITY_EDITOR
public static void LoadAndStartRenderDocCapture(out UnityEditor.EditorWindow gameView)
{

}
#endif
public static string NormalizeJson(string json, bool normalizeFormatting = false)
{
if (normalizeFormatting)
json = Regex.Replace(json, "^\\s*", "", RegexOptions.Multiline);
return json.Replace("\r\n", "\n");
}
}
}

40
com.unity.perception/Tests/Runtime/GroundTruthTests/GroundTruthTestBase.cs


using NUnit.Framework;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.SceneManagement;
using Object = UnityEngine.Object;
namespace GroundTruthTests

List<Object> m_ObjectsToDestroy = new List<Object>();
List<string> m_ScenesToUnload = new List<string>();
List<GameObject> m_ObjectsToDestroy = new List<GameObject>();
[TearDown]
public void TearDown()
{

m_ObjectsToDestroy.Clear();
foreach (var s in m_ScenesToUnload)
SceneManager.UnloadSceneAsync(s);
m_ScenesToUnload.Clear();
DatasetCapture.ResetSimulation();
SimulationManager.ResetSimulation();
if (Directory.Exists(DatasetCapture.OutputDirectory))
Directory.Delete(DatasetCapture.OutputDirectory, true);
if (Directory.Exists(SimulationManager.OutputDirectory))
Directory.Delete(SimulationManager.OutputDirectory, true);
public void AddTestObjectForCleanup(Object @object) => m_ObjectsToDestroy.Add(@object);
public void AddTestObjectForCleanup(GameObject @object) => m_ObjectsToDestroy.Add(@object);
public void AddSceneForCleanup(string sceneName) => m_ScenesToUnload.Add(sceneName);
public void DestroyTestObject(Object @object)
public void DestroyTestObject(GameObject @object)
}
public GameObject SetupCamera(Action<PerceptionCamera> initPerceptionCamera, bool activate = true)
{
var cameraObject = new GameObject();
cameraObject.SetActive(false);
var camera = cameraObject.AddComponent<Camera>();
camera.orthographic = true;
camera.orthographicSize = 1;
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.captureRgbImages = false;
initPerceptionCamera?.Invoke(perceptionCamera);
if (activate)
cameraObject.SetActive(true);
AddTestObjectForCleanup(cameraObject);
return cameraObject;
}
}
}

122
com.unity.perception/Tests/Runtime/GroundTruthTests/PerceptionCameraIntegrationTests.cs


using UnityEngine.Perception.GroundTruth;
using UnityEngine.TestTools;
#if MOQ_PRESENT
using Moq;
#endif
namespace GroundTruthTests
{
#if HDRP_PRESENT

//give the screen a chance to resize
yield return null;
var jsonExpected = $@"[
{{
""label_id"": 100,
var jsonExpected = $@" {{
""label_id"": 0,
""label_name"": ""label"",
""instance_id"": 1,
""x"": 0.0,

}}
]";
}}";
SetupCamera(pc =>
SetupCamera(labelingConfiguration, pc =>
pc.AddLabeler(new BoundingBox2DLabeler(labelingConfiguration));
pc.produceBoundingBoxAnnotations = true;
});
var plane = TestHelper.CreateLabeledPlane();

plane.transform.localPosition = new Vector3(0, 0, 10);
var plane2 = TestHelper.CreateLabeledPlane(label: "nonmatching");
AddTestObjectForCleanup(plane2);
//place a smaller plane in front to test non-matching objects
plane2.transform.localScale = new Vector3(.1f, -1f, .1f);
plane2.transform.localPosition = new Vector3(0, 0, 5);
DatasetCapture.ResetSimulation();
SimulationManager.ResetSimulation();
var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesPath = Path.Combine(SimulationManager.OutputDirectory, "captures_000.json");
StringAssert.Contains(TestHelper.NormalizeJson(jsonExpected, true), TestHelper.NormalizeJson(capturesJson, true));
StringAssert.Contains(jsonExpected, capturesJson);
public IEnumerator EnableSemanticSegmentation_GeneratesCorrectDataset([Values(true, false)] bool enabled)
public IEnumerator EnableSemanticSegmentation_GeneratesCorrectDataset()
SemanticSegmentationLabeler semanticSegmentationLabeler = null;
SetupCamera(pc =>
{
semanticSegmentationLabeler = new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig());
pc.AddLabeler(semanticSegmentationLabeler);
}, enabled);
string expectedImageFilename = $"segmentation_{Time.frameCount}.png";
this.AddTestObjectForCleanup(TestHelper.CreateLabeledPlane());
yield return null;
DatasetCapture.ResetSimulation();
if (enabled)
{
var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesJson = File.ReadAllText(capturesPath);
var imagePath = $"{semanticSegmentationLabeler.semanticSegmentationDirectory}/{expectedImageFilename}";
StringAssert.Contains(imagePath, capturesJson);
}
else
{
DirectoryAssert.DoesNotExist(DatasetCapture.OutputDirectory);
}
}
[UnityTest]
public IEnumerator Disabled_GeneratesCorrectDataset()
{
SemanticSegmentationLabeler semanticSegmentationLabeler = null;
SetupCamera(pc =>
{
semanticSegmentationLabeler = new SemanticSegmentationLabeler(CreateSemanticSegmentationLabelConfig());
pc.AddLabeler(semanticSegmentationLabeler);
});
var labelingConfiguration = CreateLabelingConfiguration();
SetupCamera(labelingConfiguration, pc => pc.produceSegmentationImages = true);
DatasetCapture.ResetSimulation();
SimulationManager.ResetSimulation();
var capturesPath = Path.Combine(DatasetCapture.OutputDirectory, "captures_000.json");
var capturesPath = Path.Combine(SimulationManager.OutputDirectory, "captures_000.json");
var imagePath = $"{semanticSegmentationLabeler.semanticSegmentationDirectory}/{expectedImageFilename}";
var imagePath = Path.Combine("SemanticSegmentation", expectedImageFilename).Replace(@"\", @"\\");
static IdLabelConfig CreateLabelingConfiguration()
static LabelingConfiguration CreateLabelingConfiguration()
var labelConfig = ScriptableObject.CreateInstance<IdLabelConfig>();
var labelingConfiguration = ScriptableObject.CreateInstance<LabelingConfiguration>();
labelConfig.Init(new List<IdLabelEntry>
labelingConfiguration.LabelEntries = new List<LabelEntry>
new IdLabelEntry
new LabelEntry
id = 100,
label = label
id = 1,
label = label,
value = 500
});
return labelConfig;
};
return labelingConfiguration;
static SemanticSegmentationLabelConfig CreateSemanticSegmentationLabelConfig()
GameObject SetupCamera(LabelingConfiguration labelingConfiguration, Action<PerceptionCamera> initPerceptionCamera)
var label = "label";
var labelingConfiguration = ScriptableObject.CreateInstance<SemanticSegmentationLabelConfig>();
var cameraObject = new GameObject();
cameraObject.SetActive(false);
var camera = cameraObject.AddComponent<Camera>();
camera.orthographic = true;
camera.orthographicSize = 1;
labelingConfiguration.Init(new List<SemanticSegmentationLabelEntry>
{
new SemanticSegmentationLabelEntry()
{
label = label,
color = Color.blue
}
});
return labelingConfiguration;
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.produceSegmentationImages = false;
perceptionCamera.produceRenderedObjectInfoMetric = false;
perceptionCamera.produceBoundingBoxAnnotations = false;
perceptionCamera.produceObjectCountAnnotations = false;
perceptionCamera.captureRgbImages = false;
perceptionCamera.LabelingConfiguration = labelingConfiguration;
initPerceptionCamera(perceptionCamera);
cameraObject.SetActive(true);
AddTestObjectForCleanup(cameraObject);
return cameraObject;
}
}
}

520
com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs


using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using Unity.Simulation;
using UnityEngine.Rendering;
using UnityEngine.Rendering.HighDefinition;
#elif URP_PRESENT
using UnityEngine.Rendering.Universal;
using UnityEngine.Rendering.HighDefinition;
using UnityEngine.Experimental.Rendering;
using UnityEngine.TestTools;
using Object = UnityEngine.Object;

{
public RenderTexture source;
public Camera cameraSource;
RenderTextureReader<Color32> m_Reader;
RenderTextureReader<uint> m_Reader;
public event Action<int, NativeArray<Color32>> SegmentationImageReceived;
public event Action<int, NativeArray<uint>> SegmentationImageReceived;
m_Reader = new RenderTextureReader<Color32>(source);
RenderPipelineManager.endCameraRendering += (context, camera) =>
m_Reader.Capture(context,
(frameCount, data, renderTexture) => ImageReadCallback(frameCount, data, renderTexture));
m_Reader = new RenderTextureReader<uint>(source, cameraSource, ImageReadCallback);
void ImageReadCallback(int frameCount, NativeArray<Color32> data, RenderTexture renderTexture)
void ImageReadCallback(int frameCount, NativeArray<uint> data, RenderTexture renderTexture)
{
if (SegmentationImageReceived != null)
SegmentationImageReceived(frameCount, data);

}
}
public enum RendererType
{
MeshRenderer,
SkinnedMeshRenderer,
Terrain
}
static readonly Color32 k_SemanticPixelValue = new Color32(10, 20, 30, Byte.MaxValue);
private static readonly Color32 k_InstanceSegmentationPixelValue = new Color32(255,0,0, 255);
private static readonly Color32 k_SkyValue = new Color32(10, 20, 30, 40);
public enum SegmentationKind
{
Instance,
Semantic
}
public IEnumerator SegmentationPassTestsWithEnumeratorPasses(
[Values(RendererType.MeshRenderer, RendererType.SkinnedMeshRenderer, RendererType.Terrain)] RendererType rendererType,
[Values(SegmentationKind.Instance, SegmentationKind.Semantic)] SegmentationKind segmentationKind)
public IEnumerator SegmentationPassTestsWithEnumeratorPasses()
GameObject cameraObject = null;
object expectedPixelValue;
void OnSegmentationImageReceived<T>(int frameCount, NativeArray<T> data, RenderTexture tex) where T : struct
Action<int, NativeArray<uint>> onSegmentationImageReceived = (frameCount, data) =>
if (frameStart == null || frameStart > frameCount) return;
if (frameStart == null || frameStart > frameCount)
return;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
}
switch (segmentationKind)
{
case SegmentationKind.Instance:
expectedPixelValue = k_InstanceSegmentationPixelValue;
cameraObject = SetupCameraInstanceSegmentation(OnSegmentationImageReceived);
break;
case SegmentationKind.Semantic:
expectedPixelValue = k_SemanticPixelValue;
cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.frameCount, a.data, a.sourceTexture), false);
break;
}
//Put a plane in front of the camera
GameObject planeObject;
if (rendererType == RendererType.Terrain)
{
var terrainData = new TerrainData();
AddTestObjectForCleanup(terrainData);
//look down because terrains cannot be rotated
cameraObject.transform.rotation = Quaternion.LookRotation(Vector3.down, Vector3.forward);
planeObject = Terrain.CreateTerrainGameObject(terrainData);
planeObject.transform.SetPositionAndRotation(new Vector3(-10, -10, -10), Quaternion.identity);
}
else
{
planeObject = GameObject.CreatePrimitive(PrimitiveType.Plane);
if (rendererType == RendererType.SkinnedMeshRenderer)
{
var oldObject = planeObject;
planeObject = new GameObject();
var meshFilter = oldObject.GetComponent<MeshFilter>();
var meshRenderer = oldObject.GetComponent<MeshRenderer>();
var skinnedMeshRenderer = planeObject.AddComponent<SkinnedMeshRenderer>();
skinnedMeshRenderer.sharedMesh = meshFilter.sharedMesh;
skinnedMeshRenderer.material = meshRenderer.material;
Object.DestroyImmediate(oldObject);
}
planeObject.transform.SetPositionAndRotation(new Vector3(0, 0, 10), Quaternion.Euler(90, 0, 0));
planeObject.transform.localScale = new Vector3(10, -1, 10);
}
var labeling = planeObject.AddComponent<Labeling>();
labeling.labels.Add("label");
frameStart = Time.frameCount;
AddTestObjectForCleanup(planeObject);
yield return null;
yield return null;
yield return null;
yield return null;
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
DestroyTestObject(planeObject);
Assert.AreEqual(4, timesSegmentationImageReceived);
}
// Lens Distortion is only applicable in URP or HDRP pipelines
// As such, this test will always fail if URP or HDRP are not present (and also not really compile either)
#if HDRP_PRESENT || URP_PRESENT
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithLensDistortion()
{
GameObject cameraObject = null;
PerceptionCamera perceptionCamera;
var fLensDistortionEnabled = false;
var fDone = false;
var frames = 0;
#if false
var dataBBox = new Color32[]
{
Color.blue, Color.blue,
Color.blue, Color.blue
CollectionAssert.AreEqual(Enumerable.Repeat(1, data.Length), data);
#endif
var boundingBoxWithoutLensDistortion = new Rect();
var boundingBoxWithLensDistortion = new Rect();
var cameraObject = SetupCamera(onSegmentationImageReceived);
//
// // Arbitrary wait for 5 frames for shaders to load. Workaround for issue with Shader.WarmupAllShaders()
// for (int i=0 ; i<5 ; ++i)
// yield return new WaitForSeconds(1);
void OnSegmentationImageReceived(int frameCount, NativeArray<Color32> data, RenderTexture tex)
{
frames++;
frameStart = Time.frameCount;
if (frames < 10)
return;
// Calculate the bounding box
if (fLensDistortionEnabled == false)
{
fLensDistortionEnabled = true;
var renderedObjectInfoGenerator = new RenderedObjectInfoGenerator();
renderedObjectInfoGenerator.Compute(data, tex.width, BoundingBoxOrigin.TopLeft, out var boundingBoxes, Allocator.Temp);
boundingBoxWithoutLensDistortion = boundingBoxes[0].boundingBox;
// Add lens distortion
perceptionCamera.OverrideLensDistortionIntensity(0.715f);
frames = 0;
}
else
{
var renderedObjectInfoGenerator = new RenderedObjectInfoGenerator();
renderedObjectInfoGenerator.Compute(data, tex.width, BoundingBoxOrigin.TopLeft, out var boundingBoxes, Allocator.Temp);
boundingBoxWithLensDistortion = boundingBoxes[0].boundingBox;
Assert.AreNotEqual(boundingBoxWithoutLensDistortion, boundingBoxWithLensDistortion);
Assert.Greater(boundingBoxWithLensDistortion.width, boundingBoxWithoutLensDistortion.width);
fDone = true;
}
}
cameraObject = SetupCamera(out perceptionCamera, false);
perceptionCamera.InstanceSegmentationImageReadback += OnSegmentationImageReceived;
cameraObject.SetActive(true);
// Put a plane in front of the camera
//Put a plane in front of the camera
planeObject.transform.localScale = new Vector3(0.1f, -1, 0.1f);
var labeling = planeObject.AddComponent<Labeling>();
labeling.labels.Add("label");
planeObject.transform.localScale = new Vector3(10, -1, 10);
planeObject.AddComponent<Labeling>();
perceptionCamera.OverrideLensDistortionIntensity(0.5f);
while (fDone != true)
{
yield return null;
}
// Destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
DestroyTestObject(planeObject);
}
#endif // ! HDRP_PRESENT || URP_PRESENT
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithLabeledButNotMatchingObject_ProducesBlack()
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = new Color32(0, 0, 0, 255);
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
timesSegmentationImageReceived++;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
}
var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), false, k_SkyValue);
AddTestObjectForCleanup(TestHelper.CreateLabeledPlane(label: "non-matching"));
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithMatchingButDisabledLabel_ProducesBlack()
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = new Color32(0, 0, 0, 255);
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
timesSegmentationImageReceived++;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
}
var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), false, k_SkyValue);
var gameObject = TestHelper.CreateLabeledPlane();
gameObject.GetComponent<Labeling>().enabled = false;
AddTestObjectForCleanup(gameObject);
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator InstanceSegmentationPass_WithMatchingButDisabledLabel_ProducesBlack()
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = new Color32(0, 0, 0, 255);
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data);
timesSegmentationImageReceived++;
}
var cameraObject = SetupCameraInstanceSegmentation((frame, data, renderTexture) => OnSegmentationImageReceived(data));
var gameObject = TestHelper.CreateLabeledPlane();
gameObject.GetComponent<Labeling>().enabled = false;
AddTestObjectForCleanup(gameObject);
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithEmptyFrame_ProducesSky([Values(false, true)] bool showVisualizations)
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = k_SkyValue;
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
timesSegmentationImageReceived++;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
}
var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), showVisualizations, expectedPixelValue);
//TestHelper.LoadAndStartRenderDocCapture(out var gameView);
yield return null;
var segLabeler = (SemanticSegmentationLabeler)cameraObject.GetComponent<PerceptionCamera>().labelers[0];
var request = AsyncGPUReadback.Request(segLabeler.targetTexture, callback: r =>
{
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, segLabeler.targetTexture.width * segLabeler.targetTexture.height), r.GetData<Color32>());
});
AsyncGPUReadback.WaitAllRequests();
//RenderDoc.EndCaptureRenderDoc(gameView);
//request.WaitForCompletion();
Assert.IsTrue(request.done);
Assert.IsFalse(request.hasError);
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithNoObjects_ProducesSky()
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = k_SkyValue;
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
timesSegmentationImageReceived++;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data.ToArray());
}
var cameraObject = SetupCameraSemanticSegmentation(
a => OnSegmentationImageReceived(a.data), false, expectedPixelValue);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithTextureOverride_RendersToOverride([Values(true, false)] bool showVisualizations)
{
var expectedPixelValue = new Color32(0, 0, 255, 255);
var targetTextureOverride = new RenderTexture(2, 2, 1, RenderTextureFormat.R8);
var cameraObject = SetupCamera(out var perceptionCamera, showVisualizations);
var labelConfig = ScriptableObject.CreateInstance<SemanticSegmentationLabelConfig>();
labelConfig.Init(new List<SemanticSegmentationLabelEntry>()
{
new SemanticSegmentationLabelEntry()
{
label = "label",
color = expectedPixelValue
}
});
var semanticSegmentationLabeler = new SemanticSegmentationLabeler(labelConfig, targetTextureOverride);
perceptionCamera.AddLabeler(semanticSegmentationLabeler);
cameraObject.SetActive(true);
AddTestObjectForCleanup(cameraObject);
AddTestObjectForCleanup(TestHelper.CreateLabeledPlane());
yield return null;
TestHelper.ReadRenderTextureRawData<Color32>(targetTextureOverride, data =>
{
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, targetTextureOverride.width * targetTextureOverride.height), data);
});
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithMultiMaterial_ProducesCorrectValues([Values(true, false)] bool showVisualizations)
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = k_SemanticPixelValue;
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
timesSegmentationImageReceived++;
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data);
}
var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), false);
var plane = TestHelper.CreateLabeledPlane();
var meshRenderer = plane.GetComponent<MeshRenderer>();
var baseMaterial = meshRenderer.material;
meshRenderer.materials = new[] { baseMaterial, baseMaterial };
MaterialPropertyBlock mpb = new MaterialPropertyBlock();
mpb.SetFloat("float", 1f);
for (int i = 0; i < 2; i++)
{
meshRenderer.SetPropertyBlock(mpb, i);
}
AddTestObjectForCleanup(plane);
yield return null;
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SemanticSegmentationPass_WithChangingLabeling_ProducesCorrectValues([Values(true, false)] bool showVisualizations)
{
int timesSegmentationImageReceived = 0;
var expectedPixelValue = k_SemanticPixelValue;
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
if (timesSegmentationImageReceived == 1)
{
CollectionAssert.AreEqual(Enumerable.Repeat(expectedPixelValue, data.Length), data);
}
timesSegmentationImageReceived++;
}
var cameraObject = SetupCameraSemanticSegmentation(a => OnSegmentationImageReceived(a.data), false);
var plane = TestHelper.CreateLabeledPlane(label: "non-matching");
AddTestObjectForCleanup(plane);
yield return null;
var labeling = plane.GetComponent<Labeling>();
labeling.labels = new List<string> { "label" };
labeling.RefreshLabeling();
yield return null;
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
Assert.AreEqual(2, timesSegmentationImageReceived);
}
DestroyTestObject(planeObject);
[UnityTest]
public IEnumerator InstanceSegmentationPass_WithSeparateDisabledPerceptionCamera_ProducesCorrectValues()
{
int timesSegmentationImageReceived = 0;
void OnSegmentationImageReceived(NativeArray<Color32> data)
{
CollectionAssert.AreEqual(Enumerable.Repeat(k_InstanceSegmentationPixelValue, data.Length), data);
timesSegmentationImageReceived++;
}
var cameraObject = SetupCameraInstanceSegmentation((frame, data, renderTexture) => OnSegmentationImageReceived(data));
var cameraObject2 = SetupCameraInstanceSegmentation(null);
cameraObject2.SetActive(false);
var plane = TestHelper.CreateLabeledPlane();
AddTestObjectForCleanup(plane);
yield return null;
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
DestroyTestObject(cameraObject2);
Assert.AreEqual(1, timesSegmentationImageReceived);
Assert.AreEqual(4, timesSegmentationImageReceived);
public IEnumerator SegmentationPassProducesCorrectValuesEachFrame(
[Values(SegmentationKind.Instance, SegmentationKind.Semantic)] SegmentationKind segmentationKind)
public IEnumerator SegmentationPassProducesCorrectValuesEachFrame()
Dictionary<int, object> expectedLabelAtFrame = null;
Dictionary<int, int> expectedLabelAtFrame = null;
void OnSegmentationImageReceived<T>(int frameCount, NativeArray<T> data, RenderTexture tex) where T : struct
Action<int, NativeArray<uint>> onSegmentationImageReceived = (frameCount, data) =>
if (expectedLabelAtFrame == null || !expectedLabelAtFrame.ContainsKey(frameCount)) return;
if (expectedLabelAtFrame == null || !expectedLabelAtFrame.ContainsKey(frameCount))
return;
timesSegmentationImageReceived++;

{
CollectionAssert.AreEqual(Enumerable.Repeat(expectedLabelAtFrame[frameCount], data.Length), data.ToArray());
CollectionAssert.AreEqual(Enumerable.Repeat(expectedLabelAtFrame[frameCount], data.Length), data);
//UnityEditorInternal.RenderDoc.EndCaptureRenderDoc(gameView);
//RenderDoc.EndCaptureRenderDoc(gameView);
}
var cameraObject = segmentationKind == SegmentationKind.Instance ?
SetupCameraInstanceSegmentation(OnSegmentationImageReceived) :
SetupCameraSemanticSegmentation((a) => OnSegmentationImageReceived(a.frameCount, a.data, a.sourceTexture), false);
};
//object expectedPixelValue = segmentationKind == SegmentationKind.Instance ? (object) new Color32(0, 74, 255, 255) : k_SemanticPixelValue;
var expectedPixelValue = segmentationKind == SegmentationKind.Instance ? (object) k_InstanceSegmentationPixelValue : k_SemanticPixelValue;
var cameraObject = SetupCamera(onSegmentationImageReceived);
expectedLabelAtFrame = new Dictionary<int, object>
expectedLabelAtFrame = new Dictionary<int, int>
{Time.frameCount , expectedPixelValue},
{Time.frameCount + 1, expectedPixelValue},
{Time.frameCount + 2, expectedPixelValue}
{Time.frameCount , 1},
{Time.frameCount + 1, 1},
{Time.frameCount + 2, 1}
};
GameObject planeObject;

Assert.AreEqual(3, timesSegmentationImageReceived);
}
GameObject SetupCameraInstanceSegmentation(Action<int, NativeArray<Color32>, RenderTexture> onSegmentationImageReceived)
{
var cameraObject = SetupCamera(out var perceptionCamera, false);
perceptionCamera.InstanceSegmentationImageReadback += onSegmentationImageReceived;
cameraObject.SetActive(true);
return cameraObject;
}
GameObject SetupCameraSemanticSegmentation(Action<SemanticSegmentationLabeler.ImageReadbackEventArgs> onSegmentationImageReceived, bool showVisualizations, Color? backgroundColor = null)
{
var cameraObject = SetupCamera(out var perceptionCamera, showVisualizations);
var labelConfig = ScriptableObject.CreateInstance<SemanticSegmentationLabelConfig>();
labelConfig.Init(new List<SemanticSegmentationLabelEntry>()
{
new SemanticSegmentationLabelEntry()
{
label = "label",
color = k_SemanticPixelValue
}
});
if (backgroundColor != null)
{
labelConfig.skyColor = backgroundColor.Value;
}
var semanticSegmentationLabeler = new SemanticSegmentationLabeler(labelConfig);
semanticSegmentationLabeler.imageReadback += onSegmentationImageReceived;
perceptionCamera.AddLabeler(semanticSegmentationLabeler);
cameraObject.SetActive(true);
return cameraObject;
}
GameObject SetupCamera(out PerceptionCamera perceptionCamera, bool showVisualizations)
GameObject SetupCamera(Action<int, NativeArray<uint>> onSegmentationImageReceived)
{
var cameraObject = new GameObject();
cameraObject.SetActive(false);

perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.captureRgbImages = false;
perceptionCamera.showVisualizations = showVisualizations;
#if HDRP_PRESENT
cameraObject.AddComponent<HDAdditionalCameraData>();
var customPassVolume = cameraObject.AddComponent<CustomPassVolume>();
customPassVolume.isGlobal = true;
var rt = new RenderTexture(128, 128, 1, GraphicsFormat.R8G8B8A8_UNorm);
rt.Create();
var instanceSegmentationPass = new InstanceSegmentationPass();
instanceSegmentationPass.targetCamera = camera;
instanceSegmentationPass.targetTexture = rt;
customPassVolume.customPasses.Add(instanceSegmentationPass);
instanceSegmentationPass.name = nameof(instanceSegmentationPass);
instanceSegmentationPass.EnsureInit();
var reader = cameraObject.AddComponent<ImageReaderBehaviour>();
reader.source = rt;
reader.cameraSource = camera;
reader.SegmentationImageReceived += onSegmentationImageReceived;
#endif
#if URP_PRESENT
var labelingConfiguration = ScriptableObject.CreateInstance<LabelingConfiguration>();
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.LabelingConfiguration = labelingConfiguration;
perceptionCamera.captureRgbImages = false;
perceptionCamera.produceBoundingBoxAnnotations = false;
perceptionCamera.produceObjectCountAnnotations = true;
perceptionCamera.segmentationImageReceived += onSegmentationImageReceived;
#endif
cameraObject.SetActive(true);
return cameraObject;
}
}

32
com.unity.perception/package.json


{
"dependencies": {
"com.unity.burst": "1.4.6",
"com.unity.collections": "0.9.0-preview.6",
"com.unity.simulation.capture": "0.0.10-preview.22",
"com.unity.simulation.client": "0.0.10-preview.10",
"com.unity.simulation.core": "0.0.10-preview.22"
},
"description": "Tools for generating large-scale data sets for perception-based machine learning training and validation",
"displayName": "Perception",
"name": "com.unity.perception",
"unity": "2019.4",
"version": "0.8.0-preview.3",
"samples": [
{
"displayName": "Tutorial Files",
"description": "These files accompany the Perception Tutorial, found at https://github.com/Unity-Technologies/com.unity.perception",
"path": "Samples~/Tutorial Files"
},
{
"displayName": "Human Pose Labeling and Randomization",
"description": "These files accompany the Human Pose Labeling and Randomization Tutorial, found at https://github.com/Unity-Technologies/com.unity.perception",
"path": "Samples~/Human Pose Labeling and Randomization"
}
]
"com.unity.entities": "0.8.0-preview.8",
"com.unity.simulation.capture": "0.0.10-preview.6",
"com.unity.simulation.core": "0.0.10-preview.8"
},
"description": "Tools for authoring and executing autonomous vehicle simulations.",
"displayName": "Perception",
"name": "com.unity.perception",
"unity": "2019.3",
"version": "0.1.0-preview.3"
}

2
com.unity.perception/Editor/GroundTruth/ObjectCountPassEditor.cs.meta


fileFormatVersion: 2
guid: 695e410829600ff40bcdd76fa0818f6a
guid: d28b6eb98e988b34b9da6e73fccc09bb
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/GroundTruthInfo.cs.meta


fileFormatVersion: 2
guid: 431b0b04c9f64b442b5d704e0cea4ce0
guid: 013782ed5fce31d46b9849bbb3cc3da8
MonoImporter:
externalObjects: {}
serializedVersion: 2

8
com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs.meta


fileFormatVersion: 2
guid: 440ceee0f1833054db2f1a666296971d
guid: 67ff3a1956c5bb14487beccf559cdd49
MonoImporter:
externalObjects: {}
serializedVersion: 2

userData:
assetBundleName:
assetBundleVariant:
userData:
assetBundleName:
assetBundleVariant:

8
com.unity.perception/Runtime/GroundTruth/GroundTruthPass.cs.meta


fileFormatVersion: 2
guid: d18e395d4335b5d4ba0e568736f581c4
guid: 0d7678390ae7b4844ad760a9e39dfd7d
MonoImporter:
externalObjects: {}
serializedVersion: 2

userData:
assetBundleName:
assetBundleVariant:
userData:
assetBundleName:
assetBundleVariant:

2
com.unity.perception/Runtime/GroundTruth/ObjectCountPass.cs.meta


fileFormatVersion: 2
guid: 2bac8a8673454861a0532ef0a7ee968f
guid: 0e6829808d75dc849a5c29b6b547c0e1
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/SimulationManagementComponentSystem.cs.meta


fileFormatVersion: 2
guid: 8404035e89b3b1b4e87136d8a512b6f5
guid: 8dfdda78cfa74c6991ac2d757d8e7019
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/Labeling/LabelingConfiguration.cs.meta


fileFormatVersion: 2
guid: c62092ba10e4e4a80a0ec03e6e92593a
guid: bad10bec3eccd8e49a9d725b2c30f74c
MonoImporter:
externalObjects: {}
serializedVersion: 2

35
com.unity.perception/Runtime/GroundTruth/GroundTruthPass.cs


#if HDRP_PRESENT
using Unity.Entities;
abstract class GroundTruthPass : CustomPass, IGroundTruthGenerator
public abstract class GroundTruthPass : CustomPass, IGroundTruthGenerator
public abstract void SetupMaterialProperties(
MaterialPropertyBlock mpb, Renderer meshRenderer, Labeling labeling, uint instanceId);
public abstract void ClearMaterialProperties(
MaterialPropertyBlock mpb, Renderer meshRenderer, Labeling labeling, uint instanceId);
public abstract void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId);
protected GroundTruthPass(Camera targetCamera)
{

// If we are forced to activate here we will get zeroes in the first frame.
EnsureActivated();
targetColorBuffer = TargetBuffer.Custom;
targetDepthBuffer = TargetBuffer.Custom;
this.targetColorBuffer = TargetBuffer.Custom;
this.targetDepthBuffer = TargetBuffer.Custom;
//overrides obsolete member in HDRP on 2020.1+. Re-address when removing 2019.4 support or the API is dropped
#if HDRP_9_OR_NEWER
protected override void Execute(CustomPassContext ctx)
protected sealed override void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult)
ScriptableRenderContext renderContext = ctx.renderContext;
var cmd = ctx.cmd;
var hdCamera = ctx.hdCamera;
var cullingResult = ctx.cullingResults;
#else
protected override void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult)
{
#endif
// CustomPasses are executed for each camera. We only want to run for the target camera
if (hdCamera.camera != targetCamera)
return;

protected abstract void ExecutePass(
ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult);
protected abstract void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult);
LabelManager.singleton.Activate(this);
var labelSetupSystem = World.DefaultGameObjectInjectionWorld?.GetExistingSystem<GroundTruthLabelSetupSystem>();
labelSetupSystem?.Activate(this);
m_IsActivated = true;
}
}

LabelManager.singleton.Deactivate(this);
var labelSetupSystem = World.DefaultGameObjectInjectionWorld?.GetExistingSystem<GroundTruthLabelSetupSystem>();
labelSetupSystem?.Deactivate(this);
}
}
}

14
com.unity.perception/Runtime/GroundTruth/InstanceSegmentationPass.cs


#if HDRP_PRESENT
using System;
using JetBrains.Annotations;
using UnityEngine.Rendering.HighDefinition;

public Camera targetCamera;
[UsedImplicitly]
public InstanceSegmentationPass() {}
public InstanceSegmentationPass()
{}
//overrides obsolete member in HDRP on 2020.1+. Re-address when removing 2019.4 support or the API is dropped
#if HDRP_9_OR_NEWER
protected override void Execute(CustomPassContext ctx)
{
ScriptableRenderContext renderContext = ctx.renderContext;
var cmd = ctx.cmd;
var hdCamera = ctx.hdCamera;
var cullingResult = ctx.cullingResults;
#else
#endif
CoreUtils.SetRenderTarget(cmd, targetTexture, ClearFlag.All);
m_InstanceSegmentationCrossPipelinePass.Execute(renderContext, cmd, hdCamera.camera, cullingResult);
}

25
com.unity.perception/Runtime/GroundTruth/SemanticSegmentationPass.cs


#if HDRP_PRESENT
using System;
using UnityEngine.Rendering;
using UnityEngine.Rendering.HighDefinition;

public class SemanticSegmentationPass : CustomPass
{
public RenderTexture targetTexture;
public SemanticSegmentationLabelConfig semanticSegmentationLabelConfig;
public LabelingConfiguration labelingConfiguration;
public SemanticSegmentationPass(Camera targetCamera, RenderTexture targetTexture, SemanticSegmentationLabelConfig semanticSegmentationLabelConfig)
public SemanticSegmentationPass(Camera targetCamera, RenderTexture targetTexture, LabelingConfiguration labelingConfiguration)
this.semanticSegmentationLabelConfig = semanticSegmentationLabelConfig;
this.labelingConfiguration = labelingConfiguration;
this.targetCamera = targetCamera;
EnsureInit();
}

if (m_SemanticSegmentationCrossPipelinePass == null)
{
m_SemanticSegmentationCrossPipelinePass = new SemanticSegmentationCrossPipelinePass(targetCamera, semanticSegmentationLabelConfig);
m_SemanticSegmentationCrossPipelinePass = new SemanticSegmentationCrossPipelinePass(targetCamera, labelingConfiguration);
}
public SemanticSegmentationPass()
{
}
protected override void Setup(ScriptableRenderContext renderContext, CommandBuffer cmd)

}
//overrides obsolete member in HDRP on 2020.1+. Re-address when removing 2019.4 support or the API is dropped
#if HDRP_9_OR_NEWER
protected override void Execute(CustomPassContext ctx)
{
ScriptableRenderContext renderContext = ctx.renderContext;
var cmd = ctx.cmd;
var hdCamera = ctx.hdCamera;
var cullingResult = ctx.cullingResults;
#else
#endif
CoreUtils.SetRenderTarget(cmd, targetTexture);
CoreUtils.SetRenderTarget(cmd, targetTexture, ClearFlag.All);
m_SemanticSegmentationCrossPipelinePass.Execute(renderContext, cmd, hdCamera.camera, cullingResult);
}
}

2
com.unity.perception/Runtime/GroundTruth/SemanticSegmentationPass.cs.meta


fileFormatVersion: 2
guid: 84b5f1182e5f64c49954fc264cd2e46f
guid: 6d4bfcc857cac4d4ab70b19c87508947
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/GroundTruthCrossPipelinePass.cs.meta


fileFormatVersion: 2
guid: bc2a459f27b0e93409b0ef30b7924d35
guid: a3a8d5a7497d49a2a2c8f0c1c52aae18
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs.meta


fileFormatVersion: 2
guid: 799b5ba08ee3e6d479c2aac3a89507f0
guid: b575537c5b92d6949bb233707280a614
MonoImporter:
externalObjects: {}
serializedVersion: 2

2
com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs.meta


fileFormatVersion: 2
guid: cce1552fd4183b64f983461e6e07a317
guid: a6740ca70f44ece45901dc26511f9aa0
MonoImporter:
externalObjects: {}
serializedVersion: 2

8
TestProjects/PerceptionHDRP/Assets/LabelingConfiguration.asset.meta


fileFormatVersion: 2
guid: be3971a848968144e8d07d9136a5bf49
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

26
TestProjects/PerceptionHDRP/Assets/LabelingConfiguration.asset


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bad10bec3eccd8e49a9d725b2c30f74c, type: 3}
m_Name: LabelingConfiguration
m_EditorClassIdentifier:
AutoAssignIds: 1
StartingLabelId: 1
LabelEntries:
- id: 1
label: Box
value: 10000
- id: 2
label: Crate
value: 20000
- id: 3
label: Cube
value: 30000

8
TestProjects/PerceptionURP/Assets/ExampleLabelingConfiguration.asset.meta


fileFormatVersion: 2
guid: e74234fe725079e4aa7ecd74797ceb79
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

8
TestProjects/PerceptionURP/Assets/Settings/UniversalRP-HighQuality.asset.meta


fileFormatVersion: 2
guid: 19ba41d7c0026c3459d37c2fe90c55a0
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

26
TestProjects/PerceptionURP/Assets/ExampleLabelingConfiguration.asset


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bad10bec3eccd8e49a9d725b2c30f74c, type: 3}
m_Name: ExampleLabelingConfiguration
m_EditorClassIdentifier:
AutoAssignIds: 1
StartingLabelId: 1
LabelEntries:
- id: 1
label: Box
value: 10000
- id: 2
label: Cube
value: 20000
- id: 3
label: Crate
value: 30000

48
com.unity.perception/Documentation~/GettingStarted.md


# Getting Started with SynthDet
This will provide a step by step instructions on creating a new scene using the Perception features to create semantic data and image captures. These steps should work with both options for setup steps of using a existing project or creating a new project. The goal is to have a working scene by the end of these instructions that will provide you with a dataset, rgb images captures, and segmentic data.
If you have not already done the setup steps for the project, click [here](Documentation~/SetupSteps.md) to start project setup.
## Step 1: Create a new scene and camera
1. Create a new scene File-> New Scene
2. Save the Scene File-> Save and give it a name, i.e created scene name is PerceptionScene
3. Select the Main Camera and reset the Position transform to 0
4. In the Hierarchy window select the main camera
1. In the inspector panel of the main camera select Add Component
2. Add a **Perception Camera** component
<img src="images/MainCameraConfig.PNG" align="middle"/>
## Step 2: Create labeled objects
1. In the Hierarchy window right click -> Go to 3D Object -> Select Cube
1. Create 3 Cubes
2. Change the names of the cubes to have 3 seperate names Cube, Box, Crate
3. Position the Cubes in front of the FOV of the main Camera, example image of the completed scene for reference down below
<img src="images/CompletedScene.PNG" align="middle"/>
2. For each object in the scene that was created, from the inspector panel add the script called **Labeling**
1. Click the **+**
2. In the text field add the name of the object i.e Crate
<img src="images/LabeledObject.PNG" align="middle"/>
3. In the Project panel right click -> Perception -> Labeling Configuration
4. Select the **Labeling Configuration** created in the project panel
1. Click the **+**
2. In the label text field add the same text that the Label script contains on the objects created in the scene (i.e Cube, Box, Crate)
3. Add a numerical value to the value field
1. Make sure the labels all have different values, for this example use values of 10,000
<img src="images/LabelingConfigurationFinished.PNG" align="middle"/>
9. Select the Main Camera in the Hierarchy panel
1. In the Perception Camera script in the Labeling Configuration field add the Labeling Configuration script created in previous step
<img src="images/MainCameraConfig.PNG" align="middle"/>
## Step 3: Checking local files
1. Press play in the editor and allow the scene to run for 10 seconds before ending playmode
2. In the console log you will see a Shutdown in Progress message that will show a file path to the location of the generated dataset
3. The file path is the Application Persistent Path + /Defaultcompany/UnityTestFramework/<Hash Key>
1. Example file path on a Windows PC : *C:/Users/<User Name>/AppData/LocalLow/DefaultCompany/UnityTestFramework\2e10ec21-9d97-4cee-b5a2-7e95e299afa4\RGB18f61842-ef8d-4b31-acb5-cb1da36fb7b1*
4. In the output path for the Labeling content you can verify the following data is present:
1. RGB captures
2. Semantic segmentation images
3. Logs
4. JSON Dataset
<img src="images/rgb_2.png" align="middle"/>
<img src="images/segmentation_2.png" align="middle"/>

27
com.unity.perception/Documentation~/GroundTruth-Labeling.md


_Note: This document is a work in progress_
# Labeling
Accurately labeling assets with a predefined taxonomy will inform training and testing of algorithms as to which objects in a dataset have importance. Example: assets labeled with “table” and “chair” will provide an algorithm with the information it needs to train on identifying these objects separately within a scene.
You can add a Labeling component to individual GameModels within a scene although it is a good practice to create a prefab of a GameModel and apply the Labeling component to it.
The Labeling components contain properties that control the number of labels applied to the GameModel. “Classes” has a property named “size”, this identifies how many labels are applied to a GameModel. Default = 0 (no label). Setting “size” to 1 will expose an “Element 0” parameter and an input field allowing for a custom label as text or numbers (combination of both) that can be used to label the asset.
Multiple labels can be used by setting “size” to 2 or more. These additional Elements (labels) can be used for any purpose in development. For example in SynthDet labels have a hierarchy where Element0 is the highest level label identifying an GameModel in a very general category. Subsequent categories become more focused in identifying what types and groups an object can be classified. The last Element is reserved for the specific name (or label) the asset is defined as.
## Labeling Configuration
Semantic segmentation (and other metrics) require a labeling configuration file located here:
This file gives a list of all labels currently being used in the data set and what RGB value they are associated with. This file can be used as is or created by the developer. When a Semantic segmentation output is generated the per pixel RGB value can be used to identify the object for the algorithm.
Note: the labeling configuration file is not validated and must be managed by the developer.
## Best practices
Generally algorithm testing and training requires a single label on an asset for proper identification (“chair”, “table”, “door, “window”, etc.) In Unity SynthDet a labeling hierarchy is used to identify assets at a higher level and/or more granularly.
Example
An asset representing a box of Rice Krispies cereal is labeled as: food\cereal\kellogs\ricekrispies
“food” - type
“cereal” - subtype
“kellogs” - main descriptor
“ricekrispies” - sub descriptor
If the goal of the algorithm is to identify all objects in a scene that is “food” that label is available and can be used. Conversely if the goal is to identify only Rice Krispies cereal within a scene that label is also available. Depending on the goal of the algorithm any mix of labels in the hierarchy can be used at the discretion of the developer.

15
com.unity.perception/Documentation~/index.md


# About the Perception SDK
com.unity.perception provides a toolkit for generating large-scale datasets for perception-based machine learning training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
# Technical details
## Requirements
This version of _Perception_ is compatible Unity Editor 2019.3 and later
## Package contents
|Ground Truth|Captures semantic segmentation, bounding boxes, and other forms of ground truth.|
|---|---|
|Labeling|MonoBehaviour which marks an object and its descendants with a set of labels|
|Labeling Configuration|Asset which defines a taxonomy of labels used for ground truth generation |
|Perception Camera|Captures RGB images and ground truth on a Unity Camera|
|---|---|

21
com.unity.perception/Editor/GroundTruth/ObjectCountPassEditor.cs


#if HDRP_PRESENT
using System;
using UnityEditor.Rendering.HighDefinition;
using UnityEngine.Perception.GroundTruth;
namespace UnityEditor.Perception.GroundTruth
{
[CustomPassDrawer(typeof(ObjectCountPass))]
public class ObjectCountPassEditor : BaseCustomPassDrawer
{
protected override void Initialize(SerializedProperty customPass)
{
AddProperty(customPass.FindPropertyRelative(nameof(GroundTruthPass.targetCamera)));
AddProperty(customPass.FindPropertyRelative(nameof(ObjectCountPass.SegmentationTexture)));
AddProperty(customPass.FindPropertyRelative(nameof(ObjectCountPass.LabelingConfiguration)));
base.Initialize(customPass);
}
}
}
#endif

157
com.unity.perception/Editor/GroundTruth/LabelingConfigurationEditor.cs


using System;
using Unity.Mathematics;
using UnityEditorInternal;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
namespace UnityEditor.Perception.GroundTruth
{
[CustomEditor(typeof(LabelingConfiguration))]
class LabelingConfigurationEditor : Editor
{
ReorderableList m_LabelsList;
const float k_Margin = 5f;
public void OnEnable()
{
m_LabelsList = new ReorderableList(this.serializedObject, this.serializedObject.FindProperty(nameof(LabelingConfiguration.LabelEntries)), true, false, true, true);
m_LabelsList.elementHeight = EditorGUIUtility.singleLineHeight * 3 + k_Margin;
m_LabelsList.drawElementCallback = DrawElement;
m_LabelsList.onAddCallback += OnAdd;
m_LabelsList.onRemoveCallback += OnRemove;
m_LabelsList.onReorderCallbackWithDetails += OnReorder;
}
void OnReorder(ReorderableList list, int oldIndex, int newIndex)
{
if (!autoAssign)
return;
AutoAssignIds();
}
void OnRemove(ReorderableList list)
{
if (list.index != -1)
list.serializedProperty.DeleteArrayElementAtIndex(list.index);
if (autoAssign)
AutoAssignIds();
this.serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty(target);
}
void OnAdd(ReorderableList list)
{
int maxLabel = Int32.MinValue;
if (list.serializedProperty.arraySize == 0)
maxLabel = -1;
for (int i = 0; i < list.serializedProperty.arraySize; i++)
{
var item = list.serializedProperty.GetArrayElementAtIndex(i);
maxLabel = math.max(maxLabel, item.FindPropertyRelative(nameof(LabelEntry.id)).intValue);
}
var index = list.serializedProperty.arraySize;
list.serializedProperty.InsertArrayElementAtIndex(index);
var element = list.serializedProperty.GetArrayElementAtIndex(index);
var idProperty = element.FindPropertyRelative(nameof(LabelEntry.id));
idProperty.intValue = maxLabel + 1;
var labelProperty = element.FindPropertyRelative(nameof(LabelEntry.label));
labelProperty.stringValue = "";
var valueProperty = element.FindPropertyRelative(nameof(LabelEntry.value));
valueProperty.intValue = 0;
if (autoAssign)
AutoAssignIds();
serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty(target);
}
void DrawElement(Rect rect, int index, bool isactive, bool isfocused)
{
var element = m_LabelsList.serializedProperty.GetArrayElementAtIndex(index);
var idProperty = element.FindPropertyRelative(nameof(LabelEntry.id));
var labelProperty = element.FindPropertyRelative(nameof(LabelEntry.label));
var valueProperty = element.FindPropertyRelative(nameof(LabelEntry.value));
using (var change = new EditorGUI.ChangeCheckScope())
{
var contentRect = new Rect(rect.position, new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
using (new EditorGUI.DisabledScope(autoAssign))
{
var newLabel = EditorGUI.IntField(contentRect, nameof(LabelEntry.id), idProperty.intValue);
if (change.changed)
{
idProperty.intValue = newLabel;
if (autoAssign)
AutoAssignIds();
}
}
}
using (var change = new EditorGUI.ChangeCheckScope())
{
var contentRect = new Rect(rect.position + new Vector2(0, EditorGUIUtility.singleLineHeight), new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
var newLabel = EditorGUI.TextField(contentRect, nameof(LabelEntry.label), labelProperty.stringValue);
if (change.changed)
{
labelProperty.stringValue = newLabel;
}
}
using (var change = new EditorGUI.ChangeCheckScope())
{
var contentRect = new Rect(rect.position + new Vector2(0, EditorGUIUtility.singleLineHeight * 2), new Vector2(rect.width, EditorGUIUtility.singleLineHeight));
var newValue = EditorGUI.IntField(contentRect, nameof(LabelEntry.value), valueProperty.intValue);
if (change.changed)
valueProperty.intValue = newValue;
}
}
bool autoAssign => serializedObject.FindProperty(nameof(LabelingConfiguration.AutoAssignIds)).boolValue;
public override void OnInspectorGUI()
{
serializedObject.Update();
var autoAssignIdsProperty = serializedObject.FindProperty(nameof(LabelingConfiguration.AutoAssignIds));
using (var change = new EditorGUI.ChangeCheckScope())
{
EditorGUILayout.PropertyField(autoAssignIdsProperty, new GUIContent("Auto Assign IDs"));
if (change.changed && autoAssignIdsProperty.boolValue)
AutoAssignIds();
}
if (autoAssignIdsProperty.boolValue)
{
using (var change = new EditorGUI.ChangeCheckScope())
{
var startingLabelIdProperty = serializedObject.FindProperty(nameof(LabelingConfiguration.StartingLabelId));
EditorGUILayout.PropertyField(startingLabelIdProperty, new GUIContent("Starting Label ID"));
if (change.changed)
AutoAssignIds();
}
}
m_LabelsList.DoLayoutList();
this.serializedObject.ApplyModifiedProperties();
}
void AutoAssignIds()
{
var serializedProperty = serializedObject.FindProperty(nameof(LabelingConfiguration.LabelEntries));
var size = serializedProperty.arraySize;
if (size == 0)
return;
var startingLabelId = (StartingLabelId)serializedObject.FindProperty(nameof(LabelingConfiguration.StartingLabelId)).enumValueIndex;
var nextId = startingLabelId == StartingLabelId.One ? 1 : 0;
for (int i = 0; i < size; i++)
{
serializedProperty.GetArrayElementAtIndex(i).FindPropertyRelative(nameof(LabelEntry.id)).intValue = nextId;
nextId++;
}
}
}
}

107
com.unity.perception/Runtime/GroundTruth/GroundTruthCrossPipelinePass.cs


using System;
using Unity.Entities;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
namespace UnityEngine.Perception.GroundTruth
{
abstract class GroundTruthCrossPipelinePass : IGroundTruthGenerator
{
public Camera targetCamera;
bool m_IsActivated;
protected GroundTruthCrossPipelinePass(Camera targetCamera)
{
this.targetCamera = targetCamera;
}
public virtual void Setup()
{
if (targetCamera == null)
throw new InvalidOperationException("targetCamera may not be null");
// If we are forced to activate here we will get zeroes in the first frame.
EnsureActivated();
}
public void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult)
{
// CustomPasses are executed for each camera. We only want to run for the target camera
if (camera != targetCamera)
return;
ExecutePass(renderContext, cmd, camera, cullingResult);
}
protected abstract void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult);
public void EnsureActivated()
{
if (!m_IsActivated)
{
var labelSetupSystem = World.DefaultGameObjectInjectionWorld?.GetExistingSystem<GroundTruthLabelSetupSystem>();
labelSetupSystem?.Activate(this);
m_IsActivated = true;
}
}
public void Cleanup()
{
var labelSetupSystem = World.DefaultGameObjectInjectionWorld?.GetExistingSystem<GroundTruthLabelSetupSystem>();
labelSetupSystem?.Deactivate(this);
}
protected RendererListDesc CreateRendererListDesc(Camera camera, CullingResults cullingResult, string overrideMaterialPassName, int overrideMaterialPassIndex, Material overrideMaterial, LayerMask layerMask /*, PerObjectData perObjectData*/)
{
var shaderPasses = new[]
{
new ShaderTagId("Forward"), // HD Lit shader
new ShaderTagId("ForwardOnly"), // HD Unlit shader
new ShaderTagId("SRPDefaultUnlit"), // Cross SRP Unlit shader
new ShaderTagId("UniversalForward"), // URP Forward
new ShaderTagId("LightweightForward"), // LWRP Forward
new ShaderTagId(overrideMaterialPassName), // The override material shader
};
var stateBlock = new RenderStateBlock(0)
{
depthState = new DepthState(true, CompareFunction.LessEqual),
};
var result = new RendererListDesc(shaderPasses, cullingResult, camera)
{
rendererConfiguration = PerObjectData.None,
renderQueueRange = new RenderQueueRange { lowerBound = 0, upperBound = 5000 },
sortingCriteria = SortingCriteria.CommonOpaque,
excludeObjectMotionVectors = false,
overrideMaterial = overrideMaterial,
overrideMaterialPassIndex = overrideMaterialPassIndex,
stateBlock = stateBlock,
layerMask = layerMask,
};
return result;
}
public static void DrawRendererList(ScriptableRenderContext renderContext, CommandBuffer cmd, RendererList rendererList)
{
if (!rendererList.isValid)
throw new ArgumentException("Invalid renderer list provided to DrawRendererList");
// This is done here because DrawRenderers API lives outside command buffers so we need to make call this before doing any DrawRenders or things will be executed out of order
renderContext.ExecuteCommandBuffer(cmd);
cmd.Clear();
if (rendererList.stateBlock == null)
renderContext.DrawRenderers(rendererList.cullingResult, ref rendererList.drawSettings, ref rendererList.filteringSettings);
else
{
var renderStateBlock = rendererList.stateBlock.Value;
renderContext.DrawRenderers(rendererList.cullingResult, ref rendererList.drawSettings, ref rendererList.filteringSettings, ref renderStateBlock);
}
}
public abstract void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId);
}
}

15
com.unity.perception/Runtime/GroundTruth/GroundTruthInfo.cs


using Unity.Entities;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Information regarding a Labeling instance. Generated by <see cref="GroundTruthLabelSetupSystem"/>
/// </summary>
public struct GroundTruthInfo : IComponentData
{
/// <summary>
/// The instanceId assigned to the <see cref="Labeling"/>
/// </summary>
public uint instanceId;
}
}

106
com.unity.perception/Runtime/GroundTruth/GroundTruthLabelSetupSystem.cs


using System.Collections.Generic;
using System.Threading;
using Unity.Entities;
namespace UnityEngine.Perception.GroundTruth
{
struct IdAssignmentParameters : IComponentData
{
public uint idStart;
public uint idStep;
}
/// <summary>
/// System which notifies the registered <see cref="IGroundTruthGenerator"/> about <see cref="Labeling"/> additions.
/// </summary>
public class GroundTruthLabelSetupSystem : ComponentSystem
{
List<IGroundTruthGenerator> m_ActiveGenerators = new List<IGroundTruthGenerator>();
ThreadLocal<MaterialPropertyBlock> m_MaterialPropertyBlocks = new ThreadLocal<MaterialPropertyBlock>();
int m_CurrentObjectIndex = -1;
/// <inheritdoc/>
protected override void OnCreate()
{
//These are here to inform the system runner the queries we are interested in. Without these calls, OnUpdate() might not be called
GetEntityQuery(ComponentType.Exclude<GroundTruthInfo>(), ComponentType.ReadOnly<Labeling>());
GetEntityQuery(ComponentType.ReadOnly<GroundTruthInfo>(), ComponentType.ReadOnly<Labeling>());
}
/// <inheritdoc/>
protected override void OnUpdate()
{
var entityQuery = Entities.WithAll<IdAssignmentParameters>().ToEntityQuery();
IdAssignmentParameters idAssignmentParameters;
if (entityQuery.CalculateEntityCount() == 1)
idAssignmentParameters = entityQuery.GetSingleton<IdAssignmentParameters>();
else
idAssignmentParameters = new IdAssignmentParameters {idStart = 1, idStep = 1};
var entityCount = Entities.WithAll<Labeling, GroundTruthInfo>().ToEntityQuery().CalculateEntityCount();
if (entityCount == 0)
m_CurrentObjectIndex = -1;
Entities.WithNone<GroundTruthInfo>().ForEach((Entity e, Labeling labeling) =>
{
var objectIndex = (uint)Interlocked.Increment(ref m_CurrentObjectIndex);
var instanceId = idAssignmentParameters.idStart + objectIndex * idAssignmentParameters.idStep;
var gameObject = labeling.gameObject;
if (!m_MaterialPropertyBlocks.IsValueCreated)
m_MaterialPropertyBlocks.Value = new MaterialPropertyBlock();
InitGameObjectRecursive(gameObject, m_MaterialPropertyBlocks.Value, labeling, instanceId);
EntityManager.AddComponentData(e, new GroundTruthInfo
{
instanceId = instanceId
});
});
}
void InitGameObjectRecursive(GameObject gameObject, MaterialPropertyBlock mpb, Labeling labeling, uint instanceId)
{
var meshRenderer = gameObject.GetComponent<MeshRenderer>();
if (meshRenderer != null)
{
meshRenderer.GetPropertyBlock(mpb);
foreach (var pass in m_ActiveGenerators)
pass.SetupMaterialProperties(mpb, meshRenderer, labeling, instanceId);
meshRenderer.SetPropertyBlock(mpb);
}
for (var i = 0; i < gameObject.transform.childCount; i++)
{
var child = gameObject.transform.GetChild(i).gameObject;
if (child.GetComponent<Labeling>() != null)
continue;
InitGameObjectRecursive(child, mpb, labeling, instanceId);
}
}
/// <summary>
/// Activates the given <see cref="IGroundTruthGenerator"/>. <see cref="IGroundTruthGenerator.SetupMaterialProperties"/>
/// will be called for all <see cref="MeshRenderer"/> instances under each object containing a <see cref="Labeling"/> component.
/// </summary>
/// <param name="generator">The generator to register</param>
public void Activate(IGroundTruthGenerator generator)
{
m_ActiveGenerators.Add(generator);
Entities.ForEach((Labeling labeling, ref GroundTruthInfo info) =>
{
var gameObject = labeling.gameObject;
InitGameObjectRecursive(gameObject, m_MaterialPropertyBlocks.Value, labeling, info.instanceId);
});
}
/// <summary>
/// Deactivates the given <see cref="IGroundTruthGenerator"/>. It will no longer receive calls when <see cref="Labeling"/> instances are created.
/// </summary>
/// <param name="generator">The generator to deactivate</param>
/// <returns>True if the <see cref="generator"/> was successfully removed. False if <see cref="generator"/> was not active.</returns>
public bool Deactivate(IGroundTruthGenerator generator)
{
return m_ActiveGenerators.Remove(generator);
}
}
}

74
com.unity.perception/Runtime/GroundTruth/InstanceSegmentationCrossPipelinePass.cs


using System;
using System.Collections.Generic;
using Unity.Profiling;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
namespace UnityEngine.Perception.GroundTruth
{
class InstanceSegmentationCrossPipelinePass : GroundTruthCrossPipelinePass
{
static readonly int k_SegmentationIdProperty = Shader.PropertyToID("_SegmentationId");
const string k_SegmentationPassShaderName = "Perception/InstanceSegmentation";
static ProfilerMarker s_ExecuteMarker = new ProfilerMarker("SegmentationPass_Execute");
/// <summary>
/// The LayerMask to apply when rendering objects.
/// </summary>
public LayerMask layerMask = -1;
Shader m_SegmentationShader;
Material m_OverrideMaterial;
int m_NextObjectIndex;
Dictionary<uint, uint> m_Ids;
/// <summary>
/// Create a new <see cref="InstanceSegmentationCrossPipelinePass"/> referencing the given
/// </summary>
/// <param name="targetCamera"></param>
/// <exception cref="ArgumentNullException"></exception>
public InstanceSegmentationCrossPipelinePass(Camera targetCamera)
: base(targetCamera)
{
if (targetCamera == null)
throw new ArgumentNullException(nameof(targetCamera));
//Activating in the constructor allows us to get correct labeling in the first frame.
EnsureActivated();
}
public override void Setup()
{
base.Setup();
m_SegmentationShader = Shader.Find(k_SegmentationPassShaderName);
var shaderVariantCollection = new ShaderVariantCollection();
shaderVariantCollection.Add(new ShaderVariantCollection.ShaderVariant(m_SegmentationShader, PassType.ScriptableRenderPipeline));
shaderVariantCollection.WarmUp();
m_OverrideMaterial = new Material(m_SegmentationShader);
}
//Render all objects to our target RenderTexture using `overrideMaterial` to use our shader
protected override void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult)
{
using (s_ExecuteMarker.Auto())
{
cmd.ClearRenderTarget(true, true, Color.clear);
var result = CreateRendererListDesc(camera, cullingResult, "FirstPass", 0, m_OverrideMaterial, layerMask);
DrawRendererList(renderContext, cmd, RendererList.Create(result));
}
}
public override void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId)
{
mpb.SetInt(k_SegmentationIdProperty, (int)instanceId);
#if PERCEPTION_DEBUG
Debug.Log($"Assigning id. Frame {Time.frameCount} id {id}");
#endif
}
}
}

108
com.unity.perception/Runtime/GroundTruth/Labeling/LabelingConfiguration.cs


using System;
using System.Collections.Generic;
using UnityEngine.Serialization;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// A definition for how a <see cref="Labeling"/> should be resolved to a single label and id for ground truth generation.
/// </summary>
[CreateAssetMenu(fileName = "LabelingConfiguration", menuName = "Perception/Labeling Configuration", order = 1)]
public class LabelingConfiguration : ScriptableObject
{
/// <summary>
/// Whether the inspector will auto-assign ids based on the id of the first element.
/// </summary>
public bool AutoAssignIds = true;
/// <summary>
/// Whether the inspector will start label ids at zero or one when <see cref="AutoAssignIds"/> is enabled.
/// </summary>
public StartingLabelId StartingLabelId = StartingLabelId.One;
/// <summary>
/// A sequence of <see cref="LabelEntry"/> which defines the labels relevant for this configuration and their values.
/// </summary>
[FormerlySerializedAs("LabelingConfigurations")]
[SerializeField]
public List<LabelEntry> LabelEntries = new List<LabelEntry>();
/// <summary>
/// Attempts to find the matching index in <see cref="LabelEntries"/> for the given <see cref="Labeling"/>.
/// </summary>
/// <remarks>
/// The matching index is the first class name in the given Labeling which matches an entry in <see cref="LabelEntries"/>.
/// </remarks>
/// <param name="labeling">The <see cref="Labeling"/> to match </param>
/// <param name="labelEntry">When this method returns, contains the matching <see cref="LabelEntry"/>, or <code>default</code> if no match was found.</param>
/// <returns>Returns true if a match was found. False if not.</returns>
public bool TryGetMatchingConfigurationEntry(Labeling labeling, out LabelEntry labelEntry)
{
return TryGetMatchingConfigurationEntry(labeling, out labelEntry, out int _);
}
/// <summary>
/// Attempts to find the matching index in <see cref="LabelEntries"/> for the given <see cref="Labeling"/>.
/// </summary>
/// <remarks>
/// The matching index is the first class name in the given Labeling which matches an entry in <see cref="LabelEntries"/>.
/// </remarks>
/// <param name="labeling">The <see cref="Labeling"/> to match </param>
/// <param name="labelEntry">When this method returns, contains the matching <see cref="LabelEntry"/>, or <code>default</code> if no match was found.</param>
/// <param name="labelEntryIndex">When this method returns, contains the index of the matching <see cref="LabelEntry"/>, or <code>-1</code> if no match was found.</param>
/// <returns>Returns true if a match was found. False if not.</returns>
public bool TryGetMatchingConfigurationEntry(Labeling labeling, out LabelEntry labelEntry, out int labelEntryIndex)
{
foreach (var labelingClass in labeling.labels)
{
for (var i = 0; i < LabelEntries.Count; i++)
{
var entry = LabelEntries[i];
if (string.Equals(entry.label, labelingClass))
{
labelEntry = entry;
labelEntryIndex = i;
return true;
}
}
}
labelEntryIndex = -1;
labelEntry = default;
return false;
}
}
/// <summary>
/// Structure defining a label configuration for <see cref="LabelingConfiguration"/>.
/// </summary>
[Serializable]
public struct LabelEntry
{
/// <summary>
/// The id associated with the label. Used to associate objects with labels in various forms of ground truth.
/// </summary>
public int id;
/// <summary>
/// The label string
/// </summary>
public string label;
/// <summary>
/// The value to use when generating semantic segmentation images.
/// </summary>
public int value;
/// <summary>
/// Creates a new LabelingConfigurationEntry with the given values.
/// </summary>
/// <param name="id">The id associated with the label. Used to associate objects with labels in various forms of ground truth.</param>
/// <param name="label">The label string.</param>
/// <param name="value">The value to use when generating semantic segmentation images.</param>
public LabelEntry(int id, string label, int value)
{
this.id = id;
this.label = label;
this.value = value;
}
}
}

181
com.unity.perception/Runtime/GroundTruth/ObjectCountPass.cs


#if HDRP_PRESENT
using Unity.Collections.LowLevel.Unsafe;
using System;
using System.Collections.Generic;
using Unity.Collections;
using UnityEngine.Rendering.HighDefinition;
using UnityEngine.Rendering;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// CustomPass which computes object count for each label in the given LabelingConfiguration in the frame.
/// Requires the texture produced by an InstanceSegmentationPass.
/// </summary>
public class ObjectCountPass : GroundTruthPass
{
const int k_StartingObjectCount = 1 << 8;
public RenderTexture SegmentationTexture;
public LabelingConfiguration LabelingConfiguration;
ComputeShader m_ComputeShader;
ComputeBuffer m_InstanceIdPresenceMask;
ComputeBuffer m_InstanceIdToClassId;
ComputeBuffer m_ClassCounts;
NativeList<int> m_InstanceIdToLabelIndexLookup;
HashSet<Camera> m_CamerasRendered = new HashSet<Camera>();
bool m_IdBuffersNeedUpdating;
bool m_DidComputeLastFrame;
public ObjectCountPass(Camera camera) : base(camera)
{
}
// ReSharper disable once UnusedMember.Global
public ObjectCountPass() : base(null)
{
}
public override void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId)
{
if (!m_InstanceIdToLabelIndexLookup.IsCreated)
{
m_InstanceIdToLabelIndexLookup = new NativeList<int>(k_StartingObjectCount, Allocator.Persistent);
}
if (LabelingConfiguration.TryGetMatchingConfigurationEntry(labeling, out LabelEntry labelEntry, out var index))
{
if (m_InstanceIdToLabelIndexLookup.Length <= instanceId)
{
m_InstanceIdToLabelIndexLookup.Resize((int)instanceId + 1, NativeArrayOptions.ClearMemory);
}
m_IdBuffersNeedUpdating = true;
m_InstanceIdToLabelIndexLookup[(int)instanceId] = index + 1;
}
}
protected override void Setup(ScriptableRenderContext renderContext, CommandBuffer cmd)
{
base.Setup(renderContext, cmd);
m_ComputeShader = Resources.Load<ComputeShader>("LabeledObjectHistogram");
var objectCount = k_StartingObjectCount;
UpdateIdBufferSizes(objectCount);
m_ClassCounts = new ComputeBuffer(LabelingConfiguration.LabelEntries.Count + 1, UnsafeUtility.SizeOf<uint>(), ComputeBufferType.Structured);
RenderPipelineManager.endCameraRendering += OnEndCameraRendering;
}
void OnEndCameraRendering(ScriptableRenderContext renderContext, Camera camera)
{
}
void UpdateIdBufferSizes(int objectCount)
{
var presenceMaskSizeNeeded = objectCount;
if (m_InstanceIdPresenceMask == null || presenceMaskSizeNeeded > m_InstanceIdPresenceMask.count)
{
m_InstanceIdPresenceMask?.Release();
m_InstanceIdPresenceMask = new ComputeBuffer(presenceMaskSizeNeeded, UnsafeUtility.SizeOf<uint>(), ComputeBufferType.Structured);
}
if (m_InstanceIdToClassId == null || m_InstanceIdToClassId.count < objectCount)
{
m_InstanceIdToClassId?.Release();
m_InstanceIdToClassId = new ComputeBuffer(objectCount, UnsafeUtility.SizeOf<uint>(), ComputeBufferType.Structured);
}
}
protected override void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult)
{
//If there are no objects to label, skip the pass
if (!m_InstanceIdToLabelIndexLookup.IsCreated || m_InstanceIdToLabelIndexLookup.Length == 0)
{
var counts = new NativeArray<uint>(LabelingConfiguration.LabelEntries.Count + 1, Allocator.Temp);
OnClassCountReadback(Time.frameCount, counts);
counts.Dispose();
return;
}
m_CamerasRendered.Add(hdCamera.camera);
if (m_IdBuffersNeedUpdating)
{
UpdateIdBufferSizes(m_InstanceIdToLabelIndexLookup.Capacity);
m_InstanceIdToClassId.SetData(m_InstanceIdToLabelIndexLookup.AsArray());
}
//The following section kicks off the four kernels in LabeledObjectHistogram.compute
//clear ClassCounts
cmd.SetComputeBufferParam(m_ComputeShader, 1, "ClassCounts", m_ClassCounts);
cmd.DispatchCompute(m_ComputeShader, 1, m_ClassCounts.count, 1, 1);
//clear InstanceIdPresenceMask
cmd.SetComputeBufferParam(m_ComputeShader, 2, "InstanceIdPresenceMask", m_InstanceIdPresenceMask);
cmd.DispatchCompute(m_ComputeShader, 2, m_InstanceIdPresenceMask.count, 1, 1);
//clear InstanceIdPresenceMask
cmd.SetComputeTextureParam(m_ComputeShader, 0, "SegmentationTexture", SegmentationTexture);
cmd.SetComputeBufferParam(m_ComputeShader, 0, "InstanceIdPresenceMask", m_InstanceIdPresenceMask);
cmd.SetComputeIntParam(m_ComputeShader, "Width", SegmentationTexture.width);
cmd.SetComputeIntParam(m_ComputeShader, "Height", SegmentationTexture.height);
cmd.DispatchCompute(m_ComputeShader, 0, SegmentationTexture.width, SegmentationTexture.height, 1);
//clear InstanceIdPresenceMask
cmd.SetComputeBufferParam(m_ComputeShader, 3, "InstanceIdPresenceMask", m_InstanceIdPresenceMask);
cmd.SetComputeBufferParam(m_ComputeShader, 3, "InstanceIdToClassId", m_InstanceIdToClassId);
cmd.SetComputeBufferParam(m_ComputeShader, 3, "ClassCounts", m_ClassCounts);
cmd.DispatchCompute(m_ComputeShader, 3, m_InstanceIdToLabelIndexLookup.Length, 1, 1);
var requestFrameCount = Time.frameCount;
cmd.RequestAsyncReadback(m_ClassCounts, request => OnClassCountReadback(requestFrameCount, request.GetData<uint>()));
}
protected override void Cleanup()
{
base.Cleanup();
m_InstanceIdPresenceMask?.Dispose();
m_InstanceIdPresenceMask = null;
m_InstanceIdToClassId?.Dispose();
m_InstanceIdToClassId = null;
m_ClassCounts?.Dispose();
m_ClassCounts = null;
WaitForAllRequests();
if (m_InstanceIdToLabelIndexLookup.IsCreated)
{
m_InstanceIdToLabelIndexLookup.Dispose();
m_InstanceIdToLabelIndexLookup = default;
}
}
internal event Action<NativeSlice<uint>, IReadOnlyList<LabelEntry>, int> ClassCountsReceived;
void OnClassCountReadback(int requestFrameCount, NativeArray<uint> counts)
{
#if PERCEPTION_DEBUG
StringBuilder sb = new StringBuilder();
sb.AppendFormat("Histogram data. Frame {0}", requestFrameCount.ToString());
for (int i = 0; i < LabelingConfiguration.LabelingConfigurations.Count; i++)
{
sb.AppendFormat("{0}: {1}", LabelingConfiguration.LabelingConfigurations[i].label,
counts[i + 1].ToString());
sb.AppendLine();
}
Debug.Log(sb);
#endif
ClassCountsReceived?.Invoke(new NativeSlice<uint>(counts, 1), LabelingConfiguration.LabelEntries, requestFrameCount);
}
public void WaitForAllRequests()
{
var commandBuffer = CommandBufferPool.Get("LabelHistorgramCleanup");
commandBuffer.WaitAllAsyncReadbackRequests();
Graphics.ExecuteCommandBuffer(commandBuffer);
CommandBufferPool.Release(commandBuffer);
}
}
}
#endif

64
com.unity.perception/Runtime/GroundTruth/SemanticSegmentationCrossPipelinePass.cs


using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Custom Pass which renders labeled images where each object labeled with a Labeling component is drawn with the
/// value specified by the given LabelingConfiguration.
/// </summary>
class SemanticSegmentationCrossPipelinePass : GroundTruthCrossPipelinePass
{
const string k_ShaderName = "Perception/SemanticSegmentation";
static readonly int k_LabelingId = Shader.PropertyToID("LabelingId");
LabelingConfiguration m_LabelingConfiguration;
//Serialize the shader so that the shader asset is included in player builds when the SemanticSegmentationPass is used.
//Currently commented out and shaders moved to Resources folder due to serialization crashes when it is enabled.
//See https://fogbugz.unity3d.com/f/cases/1187378/
//[SerializeField]
Shader m_ClassLabelingShader;
Material m_OverrideMaterial;
public SemanticSegmentationCrossPipelinePass(Camera targetCamera, LabelingConfiguration labelingConfiguration) : base(targetCamera)
{
this.m_LabelingConfiguration = labelingConfiguration;
}
public override void Setup()
{
base.Setup();
m_ClassLabelingShader = Shader.Find(k_ShaderName);
var shaderVariantCollection = new ShaderVariantCollection();
shaderVariantCollection.Add(new ShaderVariantCollection.ShaderVariant(m_ClassLabelingShader, PassType.ScriptableRenderPipeline));
shaderVariantCollection.WarmUp();
m_OverrideMaterial = new Material(m_ClassLabelingShader);
}
protected override void ExecutePass(ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult)
{
var renderList = CreateRendererListDesc(camera, cullingResult, "FirstPass", 0, m_OverrideMaterial, -1);
cmd.ClearRenderTarget(true, true, Color.clear);
DrawRendererList(renderContext, cmd, RendererList.Create(renderList));
}
public override void SetupMaterialProperties(MaterialPropertyBlock mpb, MeshRenderer meshRenderer, Labeling labeling, uint instanceId)
{
var entry = new LabelEntry();
foreach (var l in m_LabelingConfiguration.LabelEntries)
{
if (labeling.labels.Contains(l.label))
{
entry = l;
break;
}
}
//Set the labeling ID so that it can be accessed in ClassSemanticSegmentationPass.shader
mpb.SetInt(k_LabelingId, entry.value);
}
}
}

13
com.unity.perception/Runtime/GroundTruth/SimulationManagementComponentSystem.cs


using System;
using Unity.Entities;
namespace UnityEngine.Perception.GroundTruth
{
class SimulationManagementComponentSystem : ComponentSystem
{
protected override void OnUpdate()
{
SimulationManager.SimulationState?.Update();
}
}
}

192
com.unity.perception/Tests/Runtime/GroundTruthTests/BoundingBox2DTests.cs


using System;
using System.Collections;
using System.Collections.Generic;
using NUnit.Framework;
using Unity.Collections;
using Unity.Entities;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.TestTools;
namespace GroundTruthTests
{
[TestFixture]
public class BoundingBox2DTests : GroundTruthTestBase
{
public class ProducesCorrectBoundingBoxesData
{
public uint[] classCountsExpected;
public RenderedObjectInfo[] boundingBoxesExpected;
public uint[] data;
public BoundingBoxOrigin boundingBoxOrigin;
public int stride;
public string name;
public ProducesCorrectBoundingBoxesData(uint[] data, RenderedObjectInfo[] boundingBoxesExpected, uint[] classCountsExpected, int stride, BoundingBoxOrigin boundingBoxOrigin, string name)
{
this.data = data;
this.boundingBoxesExpected = boundingBoxesExpected;
this.classCountsExpected = classCountsExpected;
this.stride = stride;
this.name = name;
this.boundingBoxOrigin = boundingBoxOrigin;
}
public override string ToString()
{
return name;
}
}
public static IEnumerable ProducesCorrectBoundingBoxesTestCases()
{
yield return new ProducesCorrectBoundingBoxesData(
new uint[]
{
1, 1,
1, 1
}, new[]
{
new RenderedObjectInfo()
{
boundingBox = new Rect(0, 0, 2, 2),
instanceId = 1,
labelId = 1,
pixelCount = 4
}
}, new uint[]
{
1,
0
},
2,
BoundingBoxOrigin.BottomLeft,
"SimpleBox");
yield return new ProducesCorrectBoundingBoxesData(
new uint[]
{
1, 0, 2,
1, 0, 0
}, new[]
{
new RenderedObjectInfo()
{
boundingBox = new Rect(0, 0, 1, 2),
instanceId = 1,
labelId = 1,
pixelCount = 2
},
new RenderedObjectInfo()
{
boundingBox = new Rect(2, 0, 1, 1),
instanceId = 2,
labelId = 2,
pixelCount = 1
}
}, new uint[]
{
1,
1
},
3,
BoundingBoxOrigin.BottomLeft,
"WithGaps");
yield return new ProducesCorrectBoundingBoxesData(
new uint[]
{
1, 2, 1,
1, 2, 1
}, new[]
{
new RenderedObjectInfo()
{
boundingBox = new Rect(0, 0, 3, 2),
instanceId = 1,
labelId = 1,
pixelCount = 4
},
new RenderedObjectInfo()
{
boundingBox = new Rect(1, 0, 1, 2),
instanceId = 2,
labelId = 2,
pixelCount = 2
}
}, new uint[]
{
1,
1
},
3,
BoundingBoxOrigin.BottomLeft,
"Interleaved");
yield return new ProducesCorrectBoundingBoxesData(
new uint[]
{
0, 0,
0, 0,
0, 1
}, new[]
{
new RenderedObjectInfo()
{
boundingBox = new Rect(1, 0, 1, 1),
instanceId = 1,
labelId = 1,
pixelCount = 1
},
}, new uint[]
{
1,
0
},
2,
BoundingBoxOrigin.TopLeft,
"TopLeft");
}
[UnityTest]
public IEnumerator ProducesCorrectBoundingBoxes([ValueSource(nameof(ProducesCorrectBoundingBoxesTestCases))] ProducesCorrectBoundingBoxesData producesCorrectBoundingBoxesData)
{
var label = "label";
var label2 = "label2";
var labelingConfiguration = ScriptableObject.CreateInstance<LabelingConfiguration>();
labelingConfiguration.LabelEntries = new List<LabelEntry>
{
new LabelEntry
{
id = 1,
label = label,
value = 500
},
new LabelEntry
{
id = 2,
label = label2,
value = 500
}
};
var renderedObjectInfoGenerator = new RenderedObjectInfoGenerator(labelingConfiguration);
var groundTruthLabelSetupSystem = World.DefaultGameObjectInjectionWorld.GetExistingSystem<GroundTruthLabelSetupSystem>();
groundTruthLabelSetupSystem.Activate(renderedObjectInfoGenerator);
//Put a plane in front of the camera
AddTestObjectForCleanup(TestHelper.CreateLabeledPlane(.1f, label));
AddTestObjectForCleanup(TestHelper.CreateLabeledPlane(.1f, label2));
yield return null;
var dataNativeArray = new NativeArray<uint>(producesCorrectBoundingBoxesData.data, Allocator.Persistent);
renderedObjectInfoGenerator.Compute(dataNativeArray, producesCorrectBoundingBoxesData.stride, producesCorrectBoundingBoxesData.boundingBoxOrigin, out var boundingBoxes, out var classCounts, Allocator.Temp);
CollectionAssert.AreEqual(producesCorrectBoundingBoxesData.boundingBoxesExpected, boundingBoxes.ToArray());
CollectionAssert.AreEqual(producesCorrectBoundingBoxesData.classCountsExpected, classCounts.ToArray());
dataNativeArray.Dispose();
boundingBoxes.Dispose();
classCounts.Dispose();
groundTruthLabelSetupSystem.Deactivate(renderedObjectInfoGenerator);
renderedObjectInfoGenerator.Dispose();
}
}
}

253
com.unity.perception/Tests/Runtime/GroundTruthTests/Main Camera.prefab


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!1 &2608298934752318752
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 2608298934752318763}
- component: {fileID: 2608298934752318756}
- component: {fileID: 2608298934752318757}
- component: {fileID: 2608298934752318758}
- component: {fileID: 2608298934752318759}
m_Layer: 0
m_Name: Main Camera
m_TagString: MainCamera
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &2608298934752318763
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 2608298934752318752}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 1, z: -10}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!20 &2608298934752318756
Camera:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 2608298934752318752}
m_Enabled: 1
serializedVersion: 2
m_ClearFlags: 1
m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0}
m_projectionMatrixMode: 1
m_GateFitMode: 2
m_FOVAxisMode: 0
m_SensorSize: {x: 36, y: 24}
m_LensShift: {x: 0, y: 0}
m_FocalLength: 50
m_NormalizedViewPortRect:
serializedVersion: 2
x: 0
y: 0
width: 1
height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 60
orthographic: 0
orthographic size: 5
m_Depth: -1
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295
m_RenderingPath: -1
m_TargetTexture: {fileID: 0}
m_TargetDisplay: 0
m_TargetEye: 3
m_HDR: 0
m_AllowMSAA: 0
m_AllowDynamicResolution: 0
m_ForceIntoRT: 0
m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
--- !u!81 &2608298934752318757
AudioListener:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 2608298934752318752}
m_Enabled: 1
--- !u!114 &2608298934752318758
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 2608298934752318752}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 23c1ce4fb46143f46bc5cb5224c934f6, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Version: 6
m_ObsoleteRenderingPath: 0
m_ObsoleteFrameSettings:
overrides: 0
enableShadow: 0
enableContactShadows: 0
enableShadowMask: 0
enableSSR: 0
enableSSAO: 0
enableSubsurfaceScattering: 0
enableTransmission: 0
enableAtmosphericScattering: 0
enableVolumetrics: 0
enableReprojectionForVolumetrics: 0
enableLightLayers: 0
enableExposureControl: 1
diffuseGlobalDimmer: 0
specularGlobalDimmer: 0
shaderLitMode: 0
enableDepthPrepassWithDeferredRendering: 0
enableTransparentPrepass: 0
enableMotionVectors: 0
enableObjectMotionVectors: 0
enableDecals: 0
enableRoughRefraction: 0
enableTransparentPostpass: 0
enableDistortion: 0
enablePostprocess: 0
enableOpaqueObjects: 0
enableTransparentObjects: 0
enableRealtimePlanarReflection: 0
enableMSAA: 0
enableAsyncCompute: 0
runLightListAsync: 0
runSSRAsync: 0
runSSAOAsync: 0
runContactShadowsAsync: 0
runVolumeVoxelizationAsync: 0
lightLoopSettings:
overrides: 0
enableDeferredTileAndCluster: 0
enableComputeLightEvaluation: 0
enableComputeLightVariants: 0
enableComputeMaterialVariants: 0
enableFptlForForwardOpaque: 0
enableBigTilePrepass: 0
isFptlEnabled: 0
clearColorMode: 0
backgroundColorHDR: {r: 0.025, g: 0.07, b: 0.19, a: 0}
clearDepth: 1
volumeLayerMask:
serializedVersion: 2
m_Bits: 1
volumeAnchorOverride: {fileID: 0}
antialiasing: 0
SMAAQuality: 2
dithering: 0
stopNaNs: 0
physicalParameters:
m_Iso: 200
m_ShutterSpeed: 0.005
m_Aperture: 16
m_BladeCount: 5
m_Curvature: {x: 2, y: 11}
m_BarrelClipping: 0.25
m_Anamorphism: 0
flipYMode: 0
fullscreenPassthrough: 0
allowDynamicResolution: 0
customRenderingSettings: 0
invertFaceCulling: 0
probeLayerMask:
serializedVersion: 2
m_Bits: 4294967295
m_RenderingPathCustomFrameSettings:
bitDatas:
data1: 69730941533981
data2: 4539628424926265344
lodBias: 1
lodBiasMode: 0
lodBiasQualityLevel: 0
maximumLODLevel: 0
maximumLODLevelMode: 0
maximumLODLevelQualityLevel: 0
materialQuality: 0
renderingPathCustomFrameSettingsOverrideMask:
mask:
data1: 0
data2: 0
defaultFrameSettings: 0
--- !u!114 &2608298934752318759
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 2608298934752318752}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 26d6499a6bd256e47b859377446493a1, type: 3}
m_Name:
m_EditorClassIdentifier:
isGlobal: 1
customPasses:
- id: 0
- id: 1
- id: 2
injectionPoint: 0
references:
version: 1
00000000:
type: {class: InstanceSegmentationPass, ns: UnityEngine.SimViz.Sensors, asm: Unity.SimViz.GroundTruth}
data:
name: Custom Pass
enabled: 1
targetColorBuffer: 1
targetDepthBuffer: 1
clearFlags: 0
passFoldout: 0
targetCamera: {fileID: 0}
layerMask:
serializedVersion: 2
m_Bits: 4294967295
targetTexture: {fileID: 8400000, guid: 6519b622da084c14ba19d257dbc156d8, type: 2}
reassignIds: 0
idStart: 1
idStep: 1
00000001:
type: {class: SemanticSegmentationPass, ns: UnityEngine.SimViz.Sensors, asm: Unity.SimViz.GroundTruth}
data:
name: Custom Pass
enabled: 1
targetColorBuffer: 1
targetDepthBuffer: 1
clearFlags: 0
passFoldout: 0
targetCamera: {fileID: 0}
targetTexture: {fileID: 8400000, guid: 7498338473af7ff4fbbfb55598b6d24e, type: 2}
labelingConfiguration: {fileID: 11400000, guid: 16a81d3f01f4f4345b113509e93fdab6,
type: 2}
00000002:
type: {class: LabelHistogramPass, ns: UnityEngine.SimViz.Sensors, asm: Unity.SimViz.GroundTruth}
data:
name: Custom Pass
enabled: 1
targetColorBuffer: 1
targetDepthBuffer: 1
clearFlags: 0
passFoldout: 0
targetCamera: {fileID: 0}
SegmentationTexture: {fileID: 8400000, guid: 6519b622da084c14ba19d257dbc156d8,
type: 2}
LabelingConfiguration: {fileID: 11400000, guid: 16a81d3f01f4f4345b113509e93fdab6,
type: 2}

7
com.unity.perception/Tests/Runtime/GroundTruthTests/Main Camera.prefab.meta


fileFormatVersion: 2
guid: b423c67e7ddf25444b8b047b1e9c7735
PrefabImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

147
com.unity.perception/Tests/Runtime/GroundTruthTests/ObjectCountTests.cs


using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using Unity.Collections;
#if UNITY_EDITOR
#endif
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.TestTools;
using Object = UnityEngine.Object;
#if HDRP_PRESENT
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.HighDefinition;
#endif
namespace GroundTruthTests
{
//Graphics issues with OpenGL Linux Editor. https://jira.unity3d.com/browse/AISV-422
[UnityPlatform(exclude = new[] {RuntimePlatform.LinuxEditor, RuntimePlatform.LinuxPlayer})]
[TestFixture]
class ObjectCountTests : GroundTruthTestBase
{
[UnityTest]
public IEnumerator ProducesCorrectValuesWithChangingObjects()
{
var label = "label";
var labelingConfiguration = ScriptableObject.CreateInstance<LabelingConfiguration>();
labelingConfiguration.LabelEntries = new List<LabelEntry>
{
new LabelEntry
{
id = 1,
label = label,
value = 500
}
};
var receivedResults = new List<(uint[] counts, LabelEntry[] labels, int frameCount)>();
var cameraObject = SetupCamera(labelingConfiguration, (counts, labels, frameCount) =>
{
receivedResults.Add((counts.ToArray(), labels.ToArray(), frameCount));
});
AddTestObjectForCleanup(cameraObject);
//TestHelper.LoadAndStartRenderDocCapture(out EditorWindow gameView);
var startFrameCount = Time.frameCount;
var expectedFramesAndCounts = new Dictionary<int, int>()
{
{Time.frameCount , 0},
{startFrameCount + 1, 1},
{startFrameCount + 2, 1},
{startFrameCount + 3, 2},
{startFrameCount + 4, 1},
{startFrameCount + 5, 1},
};
yield return null;
//Put a plane in front of the camera
var planeObject = TestHelper.CreateLabeledPlane(.1f, label);
yield return null;
Object.DestroyImmediate(planeObject);
planeObject = TestHelper.CreateLabeledPlane(.1f, label);
yield return null;
var planeObject2 = TestHelper.CreateLabeledPlane(.1f, label);
planeObject2.transform.Translate(.5f, 0, 0);
yield return null;
Object.DestroyImmediate(planeObject);
yield return null;
yield return null;
Object.DestroyImmediate(planeObject2);
#if HDRP_PRESENT
//TODO: Remove this when DestroyImmediate properly calls Cleanup on the pass
var labelHistogramPass = (ObjectCountPass)cameraObject.GetComponent<CustomPassVolume>().customPasses.First(p => p is ObjectCountPass);
labelHistogramPass.WaitForAllRequests();
#endif
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
//RenderDoc.EndCaptureRenderDoc(gameView);
foreach (var result in receivedResults)
{
Assert.AreEqual(1, result.counts.Length);
Assert.AreEqual(1, result.labels.Length);
Assert.Contains(result.frameCount, expectedFramesAndCounts.Keys, "Received event with unexpected frameCount.");
var expectedCount = expectedFramesAndCounts[result.frameCount];
var errorString = $"Wrong count in frame {result.frameCount - startFrameCount}. {string.Join(", ", receivedResults.Select(r => $"count: {r.counts[0]}"))}";
Assert.AreEqual(expectedCount, result.counts[0], errorString);
expectedFramesAndCounts.Remove(result.frameCount);
}
CollectionAssert.IsEmpty(expectedFramesAndCounts);
}
static GameObject SetupCamera(LabelingConfiguration labelingConfiguration,
Action<NativeSlice<uint>, IReadOnlyList<LabelEntry>, int> onClassCountsReceived)
{
var cameraObject = new GameObject();
cameraObject.SetActive(false);
var camera = cameraObject.AddComponent<Camera>();
camera.orthographic = true;
camera.orthographicSize = 1;
#if HDRP_PRESENT
cameraObject.AddComponent<HDAdditionalCameraData>();
var customPassVolume = cameraObject.AddComponent<CustomPassVolume>();
customPassVolume.isGlobal = true;
var rt = new RenderTexture(128, 128, 1, GraphicsFormat.R8G8B8A8_UNorm);
rt.Create();
var instanceSegmentationPass = new InstanceSegmentationPass()
{
targetCamera = camera,
targetTexture = rt
};
instanceSegmentationPass.name = nameof(instanceSegmentationPass);
instanceSegmentationPass.EnsureInit();
customPassVolume.customPasses.Add(instanceSegmentationPass);
var objectCountPass = new ObjectCountPass(camera);
objectCountPass.SegmentationTexture = rt;
objectCountPass.LabelingConfiguration = labelingConfiguration;
objectCountPass.name = nameof(objectCountPass);
customPassVolume.customPasses.Add(objectCountPass);
objectCountPass.ClassCountsReceived += onClassCountsReceived;
#endif
#if URP_PRESENT
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.LabelingConfiguration = labelingConfiguration;
perceptionCamera.captureRgbImages = false;
perceptionCamera.produceBoundingBoxAnnotations = false;
perceptionCamera.produceObjectCountAnnotations = true;
perceptionCamera.classCountsReceived += onClassCountsReceived;
#endif
cameraObject.SetActive(true);
return cameraObject;
}
}
}

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存