浏览代码

Merge branch 'master' into categorical-parameter-fixes

/main
Steven Leal 4 年前
当前提交
d68a833f
共有 51 个文件被更改,包括 4087 次插入281 次删除
  1. 10
      .yamato/environments.yml
  2. 6
      .yamato/upm-ci-performance.yml
  3. 22
      README.md
  4. 4
      com.unity.perception/CHANGELOG.md
  5. 8
      com.unity.perception/Documentation~/DatasetCapture.md
  6. 118
      com.unity.perception/Documentation~/PerceptionCamera.md
  7. 50
      com.unity.perception/Documentation~/Randomization/Index.md
  8. 16
      com.unity.perception/Documentation~/Randomization/Parameters.md
  9. 6
      com.unity.perception/Documentation~/Randomization/RandomizerTags.md
  10. 20
      com.unity.perception/Documentation~/Randomization/Randomizers.md
  11. 8
      com.unity.perception/Documentation~/Randomization/Samplers.md
  12. 34
      com.unity.perception/Documentation~/Randomization/Scenarios.md
  13. 146
      com.unity.perception/Documentation~/Schema/Synthetic_Dataset_Schema.md
  14. 14
      com.unity.perception/Documentation~/Tutorial/Phase1.md
  15. 6
      com.unity.perception/Documentation~/Tutorial/Phase2.md
  16. 14
      com.unity.perception/Documentation~/Tutorial/Phase3.md
  17. 6
      com.unity.perception/Documentation~/Tutorial/TUTORIAL.md
  18. 882
      com.unity.perception/Documentation~/images/PerceptionCameraFinished.png
  19. 22
      com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs
  20. 6
      com.unity.perception/Runtime/Randomization/Parameters/CategoricalParameter.cs
  21. 122
      com.unity.perception/Documentation~/images/keypoint_template_header.png
  22. 194
      com.unity.perception/Documentation~/images/keypoint_template_keypoints.png
  23. 99
      com.unity.perception/Documentation~/images/keypoint_template_skeleton.png
  24. 32
      com.unity.perception/Editor/Randomization/Editors/RandomizerTagEditor.cs
  25. 3
      com.unity.perception/Editor/Randomization/Editors/RandomizerTagEditor.cs.meta
  26. 61
      com.unity.perception/Runtime/GroundTruth/Labelers/AnimationPoseLabel.cs
  27. 3
      com.unity.perception/Runtime/GroundTruth/Labelers/AnimationPoseLabel.cs.meta
  28. 142
      com.unity.perception/Runtime/GroundTruth/Labelers/CocoKeypointTemplate.asset
  29. 8
      com.unity.perception/Runtime/GroundTruth/Labelers/CocoKeypointTemplate.asset.meta
  30. 34
      com.unity.perception/Runtime/GroundTruth/Labelers/JointLabel.cs
  31. 3
      com.unity.perception/Runtime/GroundTruth/Labelers/JointLabel.cs.meta
  32. 437
      com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointLabeler.cs
  33. 3
      com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointLabeler.cs.meta
  34. 80
      com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointTemplate.cs
  35. 3
      com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointTemplate.cs.meta
  36. 109
      com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/VisualizationHelper.cs
  37. 3
      com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/VisualizationHelper.cs.meta
  38. 72
      com.unity.perception/Runtime/GroundTruth/Resources/AnimationRandomizerController.controller
  39. 8
      com.unity.perception/Runtime/GroundTruth/Resources/AnimationRandomizerController.controller.meta
  40. 1001
      com.unity.perception/Runtime/GroundTruth/Resources/PlayerIdle.anim
  41. 8
      com.unity.perception/Runtime/GroundTruth/Resources/PlayerIdle.anim.meta
  42. 6
      com.unity.perception/Runtime/GroundTruth/Resources/joint_circle.png
  43. 108
      com.unity.perception/Runtime/GroundTruth/Resources/joint_circle.png.meta
  44. 10
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/CategorialParameters/AnimationClipParameter.cs
  45. 3
      com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/CategorialParameters/AnimationClipParameter.cs.meta
  46. 46
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/AnimationRandomizer.cs
  47. 3
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/AnimationRandomizer.cs.meta
  48. 48
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Tags/AnimationRandomizerTag.cs
  49. 3
      com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Tags/AnimationRandomizerTag.cs.meta
  50. 307
      com.unity.perception/Tests/Runtime/GroundTruthTests/KeyPointGroundTruthTests.cs
  51. 11
      com.unity.perception/Tests/Runtime/GroundTruthTests/KeyPointGroundTruthTests.cs.meta

10
.yamato/environments.yml


# sticking to 2019.4.6f1 for testing for now because Linux Editor fails to open PerceptionHDRP on 2019.4.8f1
# see https://fogbugz.unity3d.com/default.asp?1273518_d68j5lb6eucglb84
coverage_editors:
- version: 2019.4.6f1
- version: 2019.4.18f1
- version: 2019.4.6f1
- version: 2019.4.18f1
- version: 2019.4.6f1
- version: 2020.1.3f1
- version: 2019.4.18f1
# - version: 2020.1.3f1
- version: 2019.4.6f1
- version: 2019.4.18f1
# - version: 2020.1.15f1
# - version: 2020.2.0a21

6
.yamato/upm-ci-performance.yml


---
{% for editor in complete_editors %}
{% for editor in performance_editors %}
{% for suite in performance_suites %}
{% for project in projects %}
{{project.name}}_linux_{{suite.name}}_{{editor.version}}:

{% endfor %}
{% endfor %}
{% for editor in complete_editors %}
{% for editor in performance_editors %}
{% for suite in performance_suites %}
{% for project in projects %}
{{project.name}}_windows_{{suite.name}}_{{editor.version}}:

model: rtx2080
image: package-ci/win10:stable
image: graphics-foundation/win10-dxr:stable
flavor: b1.large
commands:
- git submodule update --init --recursive

22
README.md


> com.unity.perception is in active development. Its features and API are subject to significant change as development progresses.
# Perception
# Perception Package (Unity Computer Vision)
The Perception package provides a toolkit for generating large-scale datasets for perception-based machine learning training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
The Perception package provides a toolkit for generating large-scale datasets for computer vision training and validation. It is focused on a handful of camera-based use cases for now and will ultimately expand to other forms of sensors and machine learning tasks.
## Getting Started

**[Perception Tutorial](com.unity.perception/Documentation~/Tutorial/TUTORIAL.md)**
Detailed instructions covering all the important steps from installing Unity Editor, to creating your first Perception project, building a randomized Scene, and generating large-scale synthetic datasets by leveraging the power of Unity Simulation. No prior Unity experience required.
Detailed instructions covering all the important steps from installing Unity Editor, to creating your first computer vision data generation project, building a randomized Scene, and generating large-scale synthetic datasets by leveraging the power of Unity Simulation. No prior Unity experience required.
In-depth documentation on inidividual components of the package.
In-depth documentation on individual components of the package.
|[LabelConfig](com.unity.perception/Documentation~/GroundTruthLabeling.md#label-config)|An asset that defines a taxonomy of labels for ground truth generation|
|[Label Config](com.unity.perception/Documentation~/GroundTruthLabeling.md#label-config)|An asset that defines a taxonomy of labels for ground truth generation|
|[DatasetCapture](com.unity.perception/Documentation~/DatasetCapture.md)|Ensures sensors are triggered at proper rates and accepts data for the JSON dataset.|
|[Dataset Capture](com.unity.perception/Documentation~/DatasetCapture.md)|Ensures sensors are triggered at proper rates and accepts data for the JSON dataset.|
|[Randomization (Experimental)](com.unity.perception/Documentation~/Randomization/Index.md)|The Randomization tool set lets you integrate domain randomization principles into your simulation.|
## Example Projects

### Unity Simulation Smart Camera example
<img src="com.unity.perception/Documentation~/images/smartcamera.png"/>
The [Unity Simulation Smart Camera Example](https://github.com/Unity-Technologies/Unity-Simulation-Smart-Camera-Outdoor) illustrates how Perception could be used in a smart city or autonomous vehicle simulation. You can generate datasets locally or at scale in [Unity Simulation](https://unity.com/products/unity-simulation).
The [Unity Simulation Smart Camera Example](https://github.com/Unity-Technologies/Unity-Simulation-Smart-Camera-Outdoor) illustrates how the Perception package could be used in a smart city or autonomous vehicle simulation. You can generate datasets locally or at scale in [Unity Simulation](https://unity.com/products/unity-simulation).
## Local development
The repository includes two projects for local development in `TestProjects` folder, one set up for HDRP and the other for URP.

## License
* [License](com.unity.perception/LICENSE.md)
## Support
For general questions or concerns please contact the Computer Vision team at computer-vision@unity3d.com.
For feedback, bugs, or other issues please file a GitHub issue and the Computer Vision team will investigate the issue as soon as possible.
@misc{com.unity.perception2020,
@misc{com.unity.perception2021,
title={Unity {P}erception Package},
author={{Unity Technologies}},
howpublished={\url{https://github.com/Unity-Technologies/com.unity.perception}},

4
com.unity.perception/CHANGELOG.md


### Added
Added keypoint ground truth labeling
Added animation randomization
Added ScenarioConstants base class for all scenario constants objects
Added ScenarioBase.SerializeToConfigFile()

8
com.unity.perception/Documentation~/DatasetCapture.md


## Sensor scheduling
While sensors are registered, `DatasetCapture` ensures that frame timing is deterministic and run at the appropriate simulation times to let each sensor run at its own rate.
While sensors are registered, `DatasetCapture` ensures that frame timing is deterministic and run at the appropriate simulation times to let each sensor render and capture at its own rate.
Using [Time.CaptureDeltaTime](https://docs.unity3d.com/ScriptReference/Time-captureDeltaTime.html), it also decouples wall clock time from simulation time, allowing the simulation to run as fast as possible.
Using [Time.captureDeltaTime](https://docs.unity3d.com/ScriptReference/Time-captureDeltaTime.html), it also decouples wall clock time from simulation time, allowing the simulation to run as fast as possible.
You can register custom sensors using `DatasetCapture.RegisterSensor()`. The `period` you pass in at registration time determines how often (in simulation time) frames should be scheduled for the sensor to run. The sensor implementation then checks `ShouldCaptureThisFrame` on the returned `SensorHandle` each frame to determine whether it is time for the sensor to perform a capture. `SensorHandle.ReportCapture` should then be called in each of these frames to report the state of the sensor to populate the dataset.
You can register custom sensors using `DatasetCapture.RegisterSensor()`. The `simulationDeltaTime` you pass in at registration time is used as `Time.captureDeltaTime` and determines how often (in simulation time) frames should be simulated for the sensor to run. This and the `framesBetweenCaptures` value determine at which exact times the sensor should capture the simulated frames. The decoupling of simulation delta time and capture frequency based on frames simulated allows you to render frames in-between captures. If no in-between frames are desired, you can set `framesBetweenCaptures` to 0. When it is time to capture, the `ShouldCaptureThisFrame` check of the `SensorHandle` returns true. `SensorHandle.ReportCapture` should then be called in each of these frames to report the state of the sensor to populate the dataset.
`Time.captureDeltaTime` is set at every frame in order to precisely fall on the next sensor that requires simulation, and this includes multi-sensor simulations. For instance, if one sensor has a `simulationDeltaTime` of 2 and another 3, the first five values for `Time.captureDeltaTime` will be 2, 1, 1, 2, and 3, meaning simulation will happen on the timestamps 0, 2, 3, 4, 6, and 9.
## Custom annotations and metrics
In addition to the common annotations and metrics produced by [PerceptionCamera](PerceptionCamera.md), scripts can produce their own via `DatasetCapture`. You must first register annotation and metric definitions using `DatasetCapture.RegisterAnnotationDefinition()` or `DatasetCapture.RegisterMetricDefinition()`. These return `AnnotationDefinition` and `MetricDefinition` instances which you can then use to report values during runtime.

118
com.unity.perception/Documentation~/PerceptionCamera.md


# The Perception Camera component
# The Perception Camera Component
![Perception Camera component](images/PerceptionCameraFinished.png)
<br/>_Perception Camera component_
<p align="center">
<img src="images/PerceptionCameraFinished.png" width="600"/>
<br><i>The Inspector view of the Perception Camera component</i>
</p>
| Period | The amount of simulation time in seconds between frames for this Camera. For more information on sensor scheduling, see [DatasetCapture](DatasetCapture.md). |
| Start Time | The simulation time at which to run the first frame. This time offsets the period, which allows multiple Cameras to run at the correct times relative to each other. |
| Capture Rgb Images | When you enable this property, Unity captures RGB images as PNG files in the dataset each frame. |
| Show Visualizations | Display realtime visualizations for labelers that are currently active on this camera. |
| Capture RGB Images | When you enable this property, Unity captures RGB images as PNG files in the dataset each frame. |
| Capture Trigger Mode | The method of triggering captures for this camera. In `Scheduled` mode, captures happen automatically based on a start frame and frame delta time. In `Manual` mode, captures should be triggered manually through calling the `RequestCapture` method of `PerceptionCamera`. |
### Properties for Scheduled Capture Mode
| Property: | Function: |
|--|--|
| Simulation Delta Time | The simulation frame time (seconds) for this camera. E.g. 0.0166 translates to 60 frames per second. This will be used as Unity's `Time.captureDeltaTime`, causing a fixed number of frames to be generated for each second of elapsed simulation time regardless of the capabilities of the underlying hardware. For more information on sensor scheduling, see [DatasetCapture](DatasetCapture.md). |
| First Capture Frame | Frame number at which this camera starts capturing. |
| Frames Between Captures | The number of frames to simulate and render between the camera's scheduled captures. Setting this to 0 makes the camera capture every frame. |
### Properties for Manual Capture Mode
| Property: | Function: |
|--|--|
| Affect Simulation Timing | Have this camera affect simulation timings (similar to a scheduled camera) by requesting a specific frame delta time. Enabling this option will let you set the `Simulation Delta Time` property described above.|
### SemanticSegmentationLabeler
### Semantic Segmentation Labeler
### InstanceSegmentationLabeler
### Instance Segmentation Labeler
### BoundingBox2DLabeler
### Bounding Box 2D Labeler
![Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook](images/bounding_boxes.png)
<br/>_Example bounding box visualization from SynthDet generated by the `SynthDet_Statistics` Jupyter notebook_

The Bounding Box 3D Ground Truth Labeler prouces 3D ground truth bounding boxes for each labeled game object in the scene. Unlike the 2D bounding boxes, 3D bounding boxes are calculated from the labeled meshes in the scene and all objects (independent of their occlusion state) are recorded.
The Bounding Box 3D Ground Truth Labeler produces 3D ground truth bounding boxes for each labeled game object in the scene. Unlike the 2D bounding boxes, 3D bounding boxes are calculated from the labeled meshes in the scene and all objects (independent of their occlusion state) are recorded.
### ObjectCountLabeler
### Object Count Labeler
```
{

The ObjectCountLabeler records object counts for each label you define in the IdLabelConfig. Unity only records objects that have at least one visible pixel in the Camera frame.
### RenderedObjectInfoLabeler
### Rendered Object Info Labeler
```
{
"label_id": 24,

_Example rendered object info for a single object_
The RenderedObjectInfoLabeler records a list of all objects visible in the Camera image, including its instance ID, resolved label ID and visible pixels. If Unity cannot resolve objects to a label in the IdLabelConfig, it does not record these objects.
### KeypointLabeler
The keypoint labeler captures keypoints of a labeled gameobject. The typical use of this labeler is capturing human pose
estimation data. The labeler uses a [keypoint template](#KeypointTemplate) which defines the keypoints to capture for the
model and the skeletal connections between those keypoints. The positions of the keypoints are recorded in pixel coordinates
and saved to the captures json file.
```
keypoints {
label_id: <int> -- Integer identifier of the label
instance_id: <str> -- UUID of the instance.
template_guid: <str> -- UUID of the keypoint template
pose: <str> -- Pose ground truth information
keypoints [ -- Array of keypoint data, one entry for each keypoint defined in associated template file.
{
index: <int> -- Index of keypoint in template
x: <float> -- X pixel coordinate of keypoint
y: <float> -- Y pixel coordinate of keypoint
state: <int> -- 0: keypoint does not exist, 1 keypoint exists
}, ...
]
}
```
#### Keypoint Template
keypoint templates are used to define the keypoints and skeletal connections captured by the KeypointLabeler. The keypoint
template takes advantage of Unity's humanoid animation rig, and allows the user to automatically associate template keypoints
to animation rig joints. Additionally, the user can choose to ignore the rigged points, or add points not defined in the rig.
A Coco keypoint template is included in the perception package.
##### Editor
The keypoint template editor allows the user to create/modify a keypoint template. The editor consists of the header information,
the keypoint array, and the skeleton array.
![Header section of the keypoint template](images/keypoint_template_header.png)
<br/>_Header section of the keypoint template_
In the header section, a user can change the name of the template and supply textures that they would like to use for the keypoint
visualization.
![The keypoint section of the keypoint template](images/keypoint_template_keypoints.png)
<br/>_Keypoint section of the keypoint template_
The keypoint section allows the user to create/edit keypoints and associate them with Unity animation rig points. Each keypoint record
has 4 fields: label (the name of the keypoint), Associate to Rig (a boolean value which, if true, automatically maps the keypoint to
the gameobject defined by the rig), Rig Label (only needed if Associate To Rig is true, defines which rig component to associate with
the keypoint), and Color (RGB color value of the keypoint in the visualization).
![Skeleton section of the keypoint template](images/keypoint_template_skeleton.png)
<br/>_Skeleton section of the keypoint template_
The skeleton section allows the user to create connections between joints, basically defining the skeleton of a labeled object.
##### Format
```
annotation_definition.spec {
template_id: <str> -- The UUID of the template
template_name: <str> -- Human readable name of the template
key_points [ -- Array of joints defined in this template
{
label: <str> -- The label of the joint
index: <int> -- The index of the joint
}, ...
]
skeleton [ -- Array of skeletal connections (which joints have connections between one another) defined in this template
{
joint1: <int> -- The first joint of the connection
joint2: <int> -- The second joint of the connection
}, ...
]
}
```
#### Animation Pose Label
This file is used to define timestamps in an animation to a pose label.
## Limitations

50
com.unity.perception/Documentation~/Randomization/Index.md


*NOTE: The Perception package's randomization toolset is currently marked as experimental and is subject to change.*
The randomization toolset simplifies randomizing aspects of generating synthetic data. It facilitates exposing parameters for randomization, offers samplers to pick random values from parameters, and provides scenarios to coordinate a full randomization process. Each of these also allows for custom implementations to fit particular randomization needs.
The randomization toolset simplifies randomizing aspects of generating synthetic data. It facilitates exposing parameters for randomization, offers samplers to pick random values from parameters, and provides Scenarios to coordinate a full randomization process. Each of these also allows for custom implementations to fit particular randomization needs.
Our use of domain randomization draws from Tobin et al. (2017) work training robotic pick and place using purely synthetic data.
Our use of domain randomization draws from Tobin et al.'s (2017) work on training robotic pick and place using purely synthetic data.
1. Create a scenario
2. Define and add randomizers to the scenario
3. Customize parameters and samplers in the randomizers
4. Generate randomized perception data
1. Create a Scenario
2. Define and add Randomizers to the Scenario
3. Customize Parameters and Samplers in the Randomizers
4. Generate randomized computer vision training data
Beginning with step 1, add a scenario component to your simulation. This scenario will act as the central hub for all randomization activities that occur when your scene is executed.
Beginning with step 1, add a Scenario component to your simulation. This Scenario will act as the central hub for all randomization activities that occur when your scene is executed.
Next, add a few randomizers to the scenario. The randomizers, in conjunction with the scenario, will perform the actual randomization activities within the simulation.
Next, add a few Randomizers to the Scenario. The Randomizers, in conjunction with the Scenario, will perform the actual randomization activities within the simulation.
After adding the necessary randomizers, configure the random parameters assigned to each randomizer to further customize how the simulation is randomized. The random parameters and samplers exposed in each randomizer's inspector can be manipulated to specify different probabilty distributions to use when generating random values.
After adding the necessary Randomizers, configure the random Parameters assigned to each Randomizer to further customize how the simulation is Randomized. The random Parameters and Samplers exposed in each Randomizer's inspector can be manipulated to specify different probability distributions to use when generating random values.
Once the project has been randomized and your scene has been configured with the data capture tools available in the perception package, enter play mode in the editor or execute your scenario through the Unity Simulation Cloud service to generate domain randomized perception data.
Once the project has been randomized and your scene has been configured with the data capture tools available in the Perception package, enter play mode in the editor or execute your Scenario through the Unity Simulation cloud service to generate domain randomized perception data.
Continue reading for more details concerning the primary components driving randomizations in the perception package, including:
Continue reading for more details concerning the primary components driving randomizations in the Perception package, including:
1. Scenarios
2. Randomizers
3. Randomizer Tags

## Scenarios
Within a randomized simulation, the scenario component has three responsibilities:
Within a randomized simulation, the Scenario component has three responsibilities:
2. Defining a list of randomizers
2. Defining a list of Randomizers
The fundamental principle of domain randomization is to simulate environments under a variety of randomized conditions. Each **iteration** of a scenario is intended to encapsulate one complete run of a simulated environment under uniquely randomized conditions. Scenarios futher define what conditions determine the end of an iteration and how many iterations to perform.
The fundamental principle of domain randomization is to simulate environments under a variety of randomized conditions. Each Iteration of a Scenario is intended to encapsulate one complete run of a simulated environment under uniquely randomized conditions. Scenarios further define what conditions determine the end of an Iteration and how many Iterations to perform.
To actually randomize a simulation, randomizers can be added to a scenario to vary different simulation properties. At runtime, the scenario will execute each randomizer according to its place within the randomizers list.
To actually randomize a simulation, Randomizers can be added to a Scenario to vary different simulation properties. At runtime, the Scenario will execute each Randomizer according to its place within the Randomizer list.
To read more about scenarios and how to customize them, navigate over to the [scenarios doc](Scenarios.md).
To read more about Scenarios and how to customize them, navigate over to the **[Scenarios documentation](Scenarios.md)**.
Randomizers encapsulate specific randomization activities to perform during the lifecycle of a randomized simulation. For example, randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations.
Randomizers encapsulate specific randomization activities to perform during the lifecycle of a randomized simulation. For example, Randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random Parameters to their inspector interface to further customize these variations.
To read more about how to create custom parameter types, navigate over to the **[Randomizers doc](Randomizers.md)**.
To read more about how to create custom Parameter types, navigate over to the **[Randomizers documentation](Randomizers.md)**.
RandomizerTags are the primary mechanism by which randomizers query for a certain subset of GameObjects to randoize within a simulation. For example, a rotation randomizer could query for all GameObjects with a RotationRandomizerTag component to obtain an array of all objects the randomizer should vary for the given simulation iteration.
RandomizerTags are the primary mechanism by which Randomizers query for a certain subset of GameObjects to randomize within a simulation. For example, a rotation Randomizer could query for all GameObjects with a RotationRandomizerTag component to obtain an array of all objects the Randomizer should vary for the given simulation Iteration.
To read more about how to use RandomizerTags, navigate over to the **[RandomizerTags doc](RandomizerTags.md)**.
To read more about how to use RandomizerTags, navigate over to the **[RandomizerTags documentation](RandomizerTags.md)**.
Parameters are classes that utilize samplers to deterministically generate random typed values. Parameters are often exposed within the inspector interface of randomizers to allow users to customize said randomizer's behavior. To accomplish this, parameters combine and transform the float values produced by one or more samplers into various C# types. For example, a Vector3 parameter can be used to map three samplers to the x, y, and z dimensions of a GameObject. Or a material parameter can utilize a sampler to randomly select one material from a list of possible options.
Parameters are classes that utilize Samplers to deterministically generate random typed values. Parameters are often exposed within the inspector interface of Randomizers to allow users to customize said Randomizer's behavior. To accomplish this, Parameters combine and transform the float values produced by one or more Samplers into various C# types. For example, a Vector3 Parameter can be used to map three Samplers to the x, y, and z dimensions of a GameObject. Or a material Parameter can utilize a Sampler to randomly select one material from a list of possible options.
To read more about how to create custom parameter types, navigate over to the **[Parameters doc](Parameters.md)**.
To read more about how to create custom Parameter types, navigate over to the **[Parameters documentation](Parameters.md)**.
Samplers generate bounded random float values by sampling from probability distributions. They are considered bounded since each random sampler generates float values within a range defined by a minumum and maximum value.
Samplers generate bounded random float values by sampling from probability distributions. They are considered bounded since each random sampler generates float values within a range defined by a minimum and maximum value.
Take a look at the **[Samplers doc](Samplers.md)** to learn more about implementing custom probability distributions and samplers that can integrate with the perception package.
Take a look at the **[Samplers doc](Samplers.md)** to learn more about implementing custom probability distributions and samplers that can integrate with the Perception package.
Visit the [Perception Tutorial](../Tutorial/TUTORIAL.md) to get started using the perception package's randomization tools in an example project.
Visit the [Perception Tutorial](../Tutorial/TUTORIAL.md) to get started using the Perception package's randomization tools in an example project.

16
com.unity.perception/Documentation~/Randomization/Parameters.md


## Creating and Sampling Parameters
Parameters are often defined as fields of a randomizer class, but they can also be instanced just like any other C# class:
Parameters are often defined as fields of a Randomizer class, but they can also be instanced just like any other C# class:
// Create a color parameter
// Create a color Parameter
var colorParameter = new HsvaColorParameter();
// Generate one color sample

Note that parameters, like samplers, generate new random values for each call to the Sample() method:
Note that Parameters, like Samplers, generate new random values for each call to the Sample() method:
```
var color1 = colorParameter.Sample();
var color2 = colorParameter.Sample();

## Defining Custom Parameters
All parameters derive from the `Parameter` abstract class, but all included perception package parameter types derive from two specialized Parameter base classes:
All Parameters derive from the `Parameter` abstract class. Additionally, the Parameters included in the Perception package types derive from two specialized Parameter base classes:
1. `CategoricalParameter`
2. `NumericParameter`

### Categorical Parameters
Categorical parameters choose a value from a list of options that have no intrinsic ordering. For example, a material paramater randomly chooses from a list of material options, but the list of material options itself can be rearranged into any particular order without affecting the distribution of materials selected.
Categorical Parameters choose a value from a list of options that have no intrinsic ordering. For example, a material Parameter randomly chooses from a list of material options, but the list of material options itself can be rearranged into any particular order without affecting the distribution of materials selected.
If your custom parameter is a categorical in nature, take a look at the [StringParameter]() class included in the perception package as a reference for how to derive the `CategoricalParameter` class.
If your custom Parameter is categorical in nature, take a look at the [StringParameter]() class included in the perception package as a reference for how to derive the `CategoricalParameter` class.
```
using UnityEngine.Perception.Randomization.Parameters.Attributes;

}
```
**Note:** the AddComponentMenu attribute with an empty string prevents parameters from appearing in the Add Component GameObject menu. Randomization parameters should only be created with by a `ParameterConfiguration`
**Note:** the AddComponentMenu attribute with an empty string prevents Parameters from appearing in the Add Component GameObject menu. Randomization Parameters should only be created with by a `ParameterConfiguration`
Numeric parameters use samplers to generate randomized structs. Take a look at the [ColorHsvaParameter]() class included in the perception package for an example on how to implement a numeric parameter.
Numeric Parameters use samplers to generate randomized structs. Take a look at the [ColorHsvaParameter]() class included in the Perception package for an example on how to implement a numeric Parameter.

6
com.unity.perception/Documentation~/Randomization/RandomizerTags.md


# Randomizer Tags
RandomizerTags are the primary mechanism by which randomizers query for a certain subset of GameObjects to randomize within a simulation.
RandomizerTags are the primary mechanism by which Randomizers query for a certain subset of GameObjects to randomize within a simulation.
More specifically, RandomizerTags are components that can be added to GameObjects to register them with the active scenario's TagManager. This TagManager is aware of all objects with tags in the scene and can be queried to find all GameObjects that contain a specific tag. Below is a simple example of a ColorRandomizer querying for all GameObjects with a ColorRandomizerTag that it will apply a random material base color to:
More specifically, RandomizerTags are components that can be added to GameObjects to register them with the active Scenario's TagManager. This TagManager is aware of all objects with tags in the scene and can be queried to find all GameObjects that contain a specific tag. Below is a simple example of a ColorRandomizer querying for all GameObjects with a ColorRandomizerTag that it will apply a random material base color to:
```
[Serializable]

}
```
RandomizerTags can also be used to customize how randomizers apply their randomizations to a particular GameObject. Visit [Phase 2 of the Perception Tutorial](../Tutorial/TUTORIAL.md) to explore an in depth example of implementing a LightRandomizer that does exactly this.
RandomizerTags can also be used to customize how Randomizers apply their randomizations to a particular GameObject. Visit [Phase 2 of the Perception Tutorial](../Tutorial/TUTORIAL.md) to explore an in depth example of implementing a LightRandomizer that does exactly this.

20
com.unity.perception/Documentation~/Randomization/Randomizers.md


# Randomizers
Randomizers encapsulate specific randomization activities to perform during the execution of a randomized simulation. For example, randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations. Users can add a set of randomizers to a scenario in order to define an ordered list randomization activities to perform during the lifecycle of a simulation.
Randomizers encapsulate specific randomization activities to perform during the execution of a randomized simulation. For example, Randomizers exist for spawning objects, repositioning lights, varying the color of objects, etc. Randomizers expose random parameters to their inspector interface to further customize these variations. Users can add a set of Randomizers to a Scenario in order to define an ordered list of randomization activities to perform during the lifecycle of a simulation.
To define an entirely new randomizer, derive the Randomizer class and implement one or more of the methods listed in the section below to randomize GameObjects during the runtime of a simulation.
To define an entirely new Randomizer, derive the Randomizer class and implement one or more of the methods listed in the section below to randomize GameObjects during the runtime of a simulation.
1. OnCreate() - called when the Randomizer is added or loaded to a scenario
2. OnIterationStart() - called at the start of a new scenario iteration
3. OnIterationEnd() - called the after a scenario iteration has completed
4. OnScenarioComplete() - called the after the entire scenario has completed
1. OnCreate() - called when the Randomizer is added or loaded to a Scenario
2. OnIterationStart() - called at the start of a new Scenario Iteration
3. OnIterationEnd() - called the after a Scenario Iteration has completed
4. OnScenarioComplete() - called the after the entire Scenario has completed
5. OnStartRunning() - called on the first frame a Randomizer is enabled
6. OnStopRunning() - called on the first frame a disabled Randomizer is updated
7. OnUpdate() - executed every frame for enabled Randomizers

Below is the code for the sample rotation randomizer included with the perception package:
Below is the code for the sample rotation Randomizer included with the Perception package:
```
[Serializable]

```
There are a few key things to note from this example:
1. Make sure to add the [Serializable] tag to all randomizer implementations to ensure that the randomizer can be customized and saved within the Unity Editor.
2. The [AddRandomizerMenu] attribute customizes the "Add Randomizer" sub menu path in the scenario inspector for a particular randomizer. In this example, the RotationRandomizer can be added to a scenario by opening the add randomizer menu and clicking `Perception -> Rotation Randomizer`.
3. The line `var taggedObjects = tagManager.Query<RotationRandomizerTag>();` uses RandomizerTags in combination with the current Scenario's tagManager to query for all objects with RotationRandomizerTags to obtain the subset of GameObjects within the simulation that need to have their rotations randomzied. To learn more about how RandomizerTags work, visit the [RandomizerTags doc](RandomizerTags.md).
1. Make sure to add the [Serializable] tag to all Randomizer implementations to ensure that the Randomizer can be customized and saved within the Unity Editor.
2. The [AddRandomizerMenu] attribute customizes the "Add Randomizer" sub menu path in the Scenario inspector for a particular Randomizer. In this example, the RotationRandomizer can be added to a Scenario by opening the _**Add Randomizer**_ menu and clicking `Perception -> Rotation Randomizer`.
3. The line `var taggedObjects = tagManager.Query<RotationRandomizerTag>();` uses RandomizerTags in combination with the current Scenario's tagManager to query for all objects with RotationRandomizerTags and obtain the subset of GameObjects within the simulation that need to have their rotations randomzied. To learn more about how RandomizerTags work, visit the [RandomizerTags documentation page](RandomizerTags.md).

8
com.unity.perception/Documentation~/Randomization/Samplers.md


# Samplers
Samplers in the perception package are classes that deterministically generate random float values from bounded probability distributions. Although samplers are often used in conjunction with parameters to generate arrays of typed random values, samplers can be instantiated and used from any ordinary script:
Samplers in the perception package are classes that deterministically generate random float values from bounded probability distributions. Although Samplers are often used in conjunction with Parameters to generate arrays of typed random values, Samplers can be instantiated and used from any ordinary script:
```
var sampler = new NormalSampler();
sampler.mean = 3;

## Random Seeding
Samplers generate random values that are seeded by the active scenario's current random state. Changing the scenario's random seed will result in samplers generating different values. Changing the order of samplers, parameters, or randomizers will also result in different values being sampled during a simulation.
Samplers generate random values that are seeded by the active Scenario's current random state. Changing the Scenario's random seed will result in Samplers generating different values. Changing the order of Samplers, Parameters, or Randomizers will also result in different values being sampled during a simulation.
It is recommended that users do not generate random values using the UnityEngine.Random() class or the System.Random() class within a simulation since both of these classes can potentially generate non-determinisitic or improperly seeded random values. Using only Perception Samplers to generate random values will help ensure that Perception simulations generate consistent results during local execution and on Unity Simulation in the cloud.
It is recommended that users do not generate random values using the UnityEngine.Random() class or the System.Random() class within a simulation since both of these classes can potentially generate non-deterministic or improperly seeded random values. Using only Perception Samplers to generate random values will help ensure that Perception simulations generate consistent results during local execution and on Unity Simulation in the cloud.
## Custom Samplers

Samplers have a NativeSamples() method that can schedule a ready-made multi-threaded job intended for generating a large array of samples. Below is an example of how to combine two job handles returned by NativeSamples() to generate two arrays of samples simultaneously:
```
// Create samplers
// Create Samplers
var uniformSampler = new UniformSampler
{
range = new FloatRange(0, 1),

34
com.unity.perception/Documentation~/Randomization/Scenarios.md


Scenarios have three responsibilities:
1. Controlling the execution flow of your simulation
2. Defining a list of randomizers
2. Defining a list of Randomizers
By default, the perception package includes one ready-made scenario, the `FixedLengthScenario` class. This scenario runs each iteration for a fixed number of frames and is compatible with the Run in Unity Simulation window for cloud simulation execution.
By default, the Perception package includes one ready-made Scenario, the `FixedLengthScenario` class. This Scenario runs each Iteration for a fixed number of frames and is compatible with the Run in Unity Simulation window for cloud simulation execution.
Users can utilize Unity's Unity Simulation service to execute a scenario in the cloud through the perception package's Run in Unity Simulation window. To open this window from the Unity editor using the top menu bar, navigate to `Window -> Run in Unity Simulation`.
Users can utilize Unity's Unity Simulation service to execute a Scenario in the cloud through the perception package's Run in Unity Simulation window. To open this window from the Unity editor using the top menu bar, navigate to `Window -> Run in Unity Simulation`.
2. **Total Iterations** - The number of scenario iterations to complete during the run
2. **Total Iterations** - The number of Scenario Iterations to complete during the run
5. **Scenario** - The scenario to execute
6. **Sys-Param** - The system parameters or the hardware configuration of Unity Simulation worker instances to execute the scenario with. Determines per instance specifications such as the number of CPU cores, amount of memory, and presence of a GPU for accelerated execution.
5. **Scenario** - The Scenario to execute
6. **Sys-Param** - The system parameters or the hardware configuration of Unity Simulation worker instances to execute the Scenario with. Determines per instance specifications such as the number of CPU cores, amount of memory, and presence of a GPU for accelerated execution.
NOTE: To execute a scenario using the Run in Unity Simulation window, the scenario class must implement the UnitySimulationScenario class.
NOTE: To execute a Scenario using the Run in Unity Simulation window, the Scenario class must implement the UnitySimulationScenario class.
For use cases where the scenario should run for an arbitrary number of frames, implementing a custom scenario may be necessary. Below are the two most common scenario properties a user might want to override to implement custom scenario iteration conditions:
1. **isIterationComplete** - determines the conditions that cause the end of a scenario iteration
2. **isScenarioComplete** - determines the conditions that cause the end of a scenario
For use cases where the Scenario should run for an arbitrary number of frames, implementing a custom Scenario may be necessary. Below are the two most common Scenario properties a user might want to override to implement custom Scenario Iteration conditions:
1. **isIterationComplete** - determines the conditions that cause the end of a Scenario Iteration
2. **isScenarioComplete** - determines the conditions that cause the end of a Scenario
Scenarios can be serialized to JSON, modified, and reimported at runtime to configure simulation behavior even after a Unity player has been built. Constants and randomizer sampler settings are the two primary sections generated when serializing a scenario. Note that currently, only numerical samplers are serialized. Below is the contents of a JSON configuration file created when serializing the scenario used in Phase 1 of the [Perception Tutorial](../Tutorial/TUTORIAL.md):
Scenarios can be serialized to JSON, modified, and reimported at runtime to configure simulation behavior even after a Unity player has been built. Constants and Randomizer Sampler settings are the two primary sections generated when serializing a Scenario. Note that currently, only numerical samplers are serialized. Below is the contents of a JSON configuration file created when serializing the Scenario used in Phase 1 of the [Perception Tutorial](../Tutorial/TUTORIAL.md):
```
{
"constants": {

### Constants
Constants can include properties such as starting iteration value or total iteration count, and you can always add your own custom constants. Below is an example of the constants class used in the `FixedLengthScenario` class:
Constants can include properties such as starting Iteration value or total Iteration count, and you can always add your own custom constants. Below is an example of the Constants class used in the `FixedLengthScenario` class:
```
[Serializable]
public class Constants : UnitySimulationScenarioConstants

```
There are a few key things to note here:
1. The constants class will need to inherit from `UnitySimulationScenarioConstants` to be compatible with the Run in Unity Simulation window. Deriving from `UnitySimulationScenarioConstants` will add a few key properties to the constants class that are needed to coordinate a Unity Simulation run.
2. Make sure to include the `[Serializable]` attribute on a constant class. This will ensure that the constants can be manipulated from the Unity inspector.
3. A scenario class's `SerializeToJson()` and `DeserializeFromJson()` methods can be overriden to implement custom serialization strategies.
1. The Constants class will need to inherit from `UnitySimulationScenarioConstants` to be compatible with the Run in Unity Simulation window. Deriving from `UnitySimulationScenarioConstants` will add a few key properties to the Constants class that are needed to coordinate a Unity Simulation run.
2. Make sure to include the `[Serializable]` attribute on a constant class. This will ensure that the Constants can be manipulated from the Unity inspector.
3. A Scenario class's `SerializeToJson()` and `DeserializeFromJson()` methods can be overridden to implement custom serialization strategies.
Follow the instructions below to generate a scenario configuration file to modify your scenario constants and randomizers in a built player:
1. Click the serialize constants button in the scenario's inspector window. This will generate a `scenario_configuration.json` file and place it in the project's Assets/StreamingAssets folder.
Follow the instructions below to generate a Scenario configuration file to modify your Scenario Constants and Randomizers in a built player:
1. Click the _**Serialize Constants**_ button in the Scenario's inspector window. This will generate a `scenario_configuration.json` file and place it in the project's Assets/StreamingAssets folder.
2. Build your player. The new player will have a [ProjectName]_Data/StreamingAssets folder. A copy of the `scenario_configuration.json` file previously constructed in the editor will be found in this folder.
3. Change the contents of the `scenario_configuration.json` file. Any running player thereafter will utilize the newly authored values.

146
com.unity.perception/Documentation~/Schema/Synthetic_Dataset_Schema.md


#### capture.annotation.values
<!-- Not yet implemented annotations
##### instance segmentation - polygon
##### instance segmentation - color image
A json object that stores collections of polygons. Each polygon record maps a tuple of (instance, label) to a list of
K pixel coordinates that forms a polygon. This object can be directly stored in annotation.values
A color png file that stores instance ids as a color value per pixel. The png files are located in the "filename" location.
semantic_segmentation_polygon {
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
polygon: [<int, int>,...] -- List of points in pixel coordinates of the outer edge. Connecting these points in order should create a polygon that identifies the object.
instance_segmentation {
instance_id: <int> -- The instance ID of the labeled object
color { -- The pixel color that correlates with the instance ID
r: <int> -- The red value of the pixel between 0 and 255
g: <int> -- The green value of the pixel between 0 and 255
b: <int> -- The blue value of the pixel between 0 and 255
a: <int> -- The alpha value of the pixel between 0 and 255
}
-->
##### 2D bounding box

```
bounding_box_2d {
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
x: <float> -- x coordinate of the upper left corner.
y: <float> -- y coordinate of the upper left corner.
width: <float> -- number of pixels in the x direction

<!-- Not yet implemented annotations
A json file that stored collections of 3D bounding boxes.
Each bounding box record maps a tuple of (instance, label) to translation, size and rotation that draws a 3D bounding box, as well as velocity and acceleration (optional) of the 3D bounding box.
All location data is given with respect to the **sensor coordinate system**.
3D bounding box information. Unlike the 2D bounding box, 3D bounding boxes coordinates are captured in **sensor coordinate system**.
Each bounding box record maps a tuple of (instance, label) to translation, size and rotation that draws a 3D bounding box, as well as velocity and acceleration (optional) of the 3D bounding box.
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
translation: <float, float, float> -- 3d bounding box's center location in meters as center_x, center_y, center_z with respect to global coordinate system.
size: <float, float, float> -- 3d bounding box size in meters as width, length, height.
rotation: <float, float, float, float> -- 3d bounding box orientation as quaternion: w, x, y, z.
velocity: <float, float, float> -- 3d bounding box velocity in meters per second as v_x, v_y, v_z.
acceleration: <float, float, float> [optional] -- 3d bounding box acceleration in meters per second^2 as a_x, a_y, a_z.
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
instance_id: <str> -- UUID of the instance.
translation { -- 3d bounding box's center location in meters with respect to global coordinate system.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
size { -- 3d bounding box size in meters
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
rotation { -- 3d bounding box orientation as quaternion: w, x, y, z.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
w: <float> -- The w coordinate
}
velocity { -- [Optional] 3d bounding box velocity in meters per second.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
acceleration { -- [Optional] 3d bounding box acceleration in meters per second^2.
x: <float> -- The x coordinate
y: <float> -- The y coordinate
z: <float> -- The z coordinate
}
##### Keypoints
Keypoint data, commonly used for human pose estimation. A keypoint capture is associated to a template that defines the keypoints (see annotation.definition file).
Each keypoint record maps a tuple of (instance, label) to template, pose, and an array of keypoints. A keypoint will exist in this record for each keypoint defined in the template file.
If a given keypoint doesn't exist in the labeled gameobject, then that keypoint will have a state value of 0; if it does exist then it will have a keypoint value of 2.
```
keypoints {
label_id: <int> -- Integer identifier of the label
instance_id: <str> -- UUID of the instance.
template_guid: <str> -- UUID of the keypoint template
pose: <str> -- Pose ground truth information
keypoints [ -- Array of keypoint data, one entry for each keypoint defined in associated template file.
{
index: <int> -- Index of keypoint in template
x: <float> -- X pixel coordinate of keypoint
y: <float> -- Y pixel coordinate of keypoint
state: <int> -- 0: keypoint does not exist, 2 keypoint exists
}, ...
]
}
```
<!-- Not yet implemented annotations
#### instances (V2, WIP)

Typically, the `spec` key describes all labels_id and label_name used by the annotation.
Some special cases like semantic segmentation might assign additional values (e.g. pixel value) to record the mapping between label_id/label_name and pixel color in the annotated PNG files.
##### annotation definition header
id: <int> -- Integer identifier of the annotation definition.
name: <str> -- Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
description: <str, optional> -- Description of this annotation specifications.
format: <str> -- The format of the annotation files. (e.g. png, json, etc.)
spec: [<obj>...] -- Format-specific specification for the annotation values (ex. label-value mappings for semantic segmentation images)
id: <int> -- Integer identifier of the annotation definition.
name: <str> -- Human readable annotation spec name (e.g. sementic_segmentation, instance_segmentation, etc.)
description: <str> -- [Optional] Description of this annotation specifications.
format: <str> -- The format of the annotation files. (e.g. png, json, etc.)
spec: [<obj>...] -- Format-specific specification for the annotation values (ex. label-value mappings for semantic segmentation images)
# semantic segmentation
```
##### semantic segmentation
Annotation spec for semantic [segmentation labeler](#semantic-segmentation---grayscale-image)
```
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
pixel_value: <int> -- Grayscale pixel value
color_pixel_value: <int, int, int> [optional] -- Color pixel value
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
pixel_value: <int> -- Grayscale pixel value
color_pixel_value: <int, int, int> -- [Optional] Color pixel value
}
```
##### label enumeration spec
This spec is used for annotations like [bounding box 2d](#2d-bounding-box). This might be a subset of all labels used in simulation.
```
annotation_definition.spec {
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
# label enumeration spec, used for annotations like bounding box 2d. This might be a subset of all labels used in simulation.
```
##### keypoint template
keypoint templates are used to define the keypoints and skeletal connections captured by the [keypoint labeler](#keypoints).
```
label_id: <int> -- Integer identifier of the label
label_name: <str> -- String identifier of the label
template_id: <str> -- The UUID of the template
template_name: <str> -- Human readable name of the template
key_points [ -- Array of joints defined in this template
{
label: <str> -- The label of the joint
index: <int> -- The index of the joint
}, ...
]
skeleton [ -- Array of skeletal connections (which joints have connections between one another) defined in this template
{
joint1: <int> -- The first joint of the connection
joint2: <int> -- The second joint of the connection
}, ...
]
}
```

14
com.unity.perception/Documentation~/Tutorial/Phase1.md


> :information_source: If you would like to install a specific version of the package, you can append the version to the end of the url. For example `com.unity.perception@0.1.0-preview.5`. For this tutorial, **we do not need to add a version**. You can also install the package from a local clone of the Perception repository. More information on installing local packages is available [here](https://docs.unity3d.com/Manual/upm-ui-local.html).
It will take some time for the manager to download and import the package. Once the operation finishes, you will see the newly download Perception package automatically selected in the _**Package Manager**_, as depicted below:
It will take some time for the manager to download and import the package. Once the operation finishes, you will see the newly downloaded Perception package automatically selected in the _**Package Manager**_, as depicted below:
<p align="center">
<img src="Images/package_manager.png" width="600"/>

Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, randomizers, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those that the eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
Each package can come with a set of samples. As seen in the righthand panel, the Perception package includes a sample named _**Tutorial Files**_, which will be required for completing this tutorial. The sample files consist of example foreground and background objects, Randomizer, shaders, and other useful elements to work with during this tutorial. **Foreground** objects are those that the eventual machine learning model will try to detect, and **background** objects will be placed in the background as distractors for the model.
* **:green_circle: Action**: In the _**Package Manager**_ window, from the list of _**Samples**_ for the Perception package, click on the _**Import into Project**_ button for the sample named _**Tutorial Files**_.

* **:green_circle: Action**: Click on `Directional Light` and in the _**Inspector**_ tab, set `Shadow Type` to `No Shadows`.
We will now add the necessary components to the camera in order to equip it for the perception workflow. To do this, we need to add a `Perception Camera` component to it, and then define which types of ground-truth we wish to generate using this camera.
We will now add the necessary components to the camera in order to equip it for the Perception workflow. To do this, we need to add a `Perception Camera` component to it, and then define which types of ground-truth we wish to generate using this camera.
* **:green_circle: Action**: Select `Main Camera` again and in the _**Inspector**_ tab, click on the _**Add Component**_ button.
* **:green_circle: Action**: Start typing `Perception Camera` in the search bar that appears, until the `Perception Camera` script is found, with a **#** icon to the left:

As seen in the UI for `Perception Camera`, the list of `Camera Labelers` is currently empty. For each type of ground-truth you wish to generate along-side your captured frames (e.g. 2D bounding boxes around objects), you will need to add a corresponding `Camera Labeler` to this list.
To speed-up your perception workflow, the Perception package comes with five common labelers for object-detection tasks; however, if you are comfortable with code, you can also add your own custom labelers. The labelers that come with the Perception package cover **3D bounding boxes, 2D bounding boxes, object counts, object information (pixel counts and ids), and semantic segmentation images (each object rendered in a unique colour)**. We will use four of these in this tutorial.
To speed-up your workflow, the Perception package comes with five common labelers for object-detection tasks; however, if you are comfortable with code, you can also add your own custom labelers. The labelers that come with the Perception package cover **3D bounding boxes, 2D bounding boxes, object counts, object information (pixel counts and ids), and semantic segmentation images (each object rendered in a unique colour)**. We will use four of these in this tutorial.
* **:green_circle: Action**: Click on the _**+**_ button at the bottom right corner of the empty labeler list and select `BoundingBox2DLabeler`.
* **:green_circle: Action**: Repeat the above step to add `ObjectCountLabeler`, `RenderedObjectInfoLabeler`, `SemanticSegmentationLabeler`.

For this tutorial, we have already prepared the foreground Prefabs for you and added the `Labeling` component to all of them. These Prefabs were based on 3D scans of the actual grocery items. If you are making your own Prefabs, you can easily add a `Labeling` component to them using the _**Add Component**_ button visible in the bottom right corner of the screenshot above.
> :information_source: If you are interested in knowing more about the process of creating Unity compatible 3D models for use in Perception, you can visit [this page](https://github.com/Unity-Technologies/SynthDet/blob/master/docs/CreatingAssets.md). Once you have 3D models in `.fbx` format, the Perception package lets you quickly create Prefabs from multiple models. Just select all your models and from the top menu bar select _**Assets -> Perception -> Create Prefabs from Selected Models**_. The newly created Prefabs will be placed in the same folders as their corresponding models.
> :information_source: If you are interested in knowing more about the process of creating Unity compatible 3D models for use with the Perception package, you can visit [this page](https://github.com/Unity-Technologies/SynthDet/blob/master/docs/CreatingAssets.md). Once you have 3D models in `.fbx` format, the Perception package lets you quickly create Prefabs from multiple models. Just select all your models and from the top menu bar select _**Assets -> Perception -> Create Prefabs from Selected Models**_. The newly created Prefabs will be placed in the same folders as their corresponding models.
Even though the sample Prefabs already have a label manually added, to learn more about how to use the Labeling component, we will now use automatic labeling to label all our foreground objects. This will overwrite their manually added labels.

In this folder, you will find a few types of data, depending on your `Perception Camera` settings. These can include:
- Logs
- JSON data
- RGB images (raw camera output) (if the `Save Camera Output to Disk` checkmark is enabled on `Perception Camera`)
- RGB images (raw camera output) (if the `Save Camera Output to Disk` check mark is enabled on `Perception Camera`)
- Semantic segmentation images (if the `SemanticSegmentationLabeler` is added and active on `Perception Camera`)
The output dataset includes a variety of information about different aspects of the active sensors in the Scene (currently only one), as well as the ground-truth generated by all active labelers. [This page](https://github.com/Unity-Technologies/com.unity.perception/blob/master/com.unity.perception/Documentation%7E/Schema/Synthetic_Dataset_Schema.md) provides a comprehensive explanation on the schema of this dataset. We strongly recommend having a look at the page once you have completed this tutorial.

* **:green_circle: Action**: Follow the instructions laid out in the notebook and run each code block to view its outputs.
This concludes Phase 1 of the Perception tutorial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer.
This concludes Phase 1 of the Perception Tutorial. In the next phase, you will dive a little bit into randomization code and learn how to build your own custom Randomizer.
**[Continue to Phase 2: Custom Randomizations](Phase2.md)**

6
com.unity.perception/Documentation~/Tutorial/Phase2.md


}
```
The purpose of this piece of code is to obtain a random float parameter and assign it to the light's `Intensity` field on the start of every Iteration. Let's go through the code above and understand each part. The `FloatParameter` field makes it possible for us to define a randomized float parameter and modify its properties from the editor UI, similar to how we already modified the properties for the previous Randomizers we used.
The purpose of this piece of code is to obtain a random float Parameter and assign it to the light's `Intensity` field on the start of every Iteration. Let's go through the code above and understand each part. The `FloatParameter` field makes it possible for us to define a randomized float Parameter and modify its properties from the editor UI, similar to how we already modified the properties for the previous Randomizers we used.
> :information_source: If you look at the _**Console**_ tab of the editor now, you will see an error regarding `MyLightRandomizerTag` not being found. This is to be expected, since we have not yet created this class; the error will go away once we create the class later.

}
```
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So, let's increase the minimum a bit to avoid such a scenario.
If you now check the UI snippet for `MyLightRandomizer`, you will notice that `Color Parameter` is added. This Parameter includes four separate randomized values for `Red`, `Green`, `Blue` and `Alpha`. Note that the meaningful range for all of these values is 0-1 (and not 0-255). You can see that the sampling range for red, green, and blue is currently also set to 0-1, which means the Parameter covers a full range of colors. A color with (0,0,0) RGB components essentially emits no light. So, let's increase the minimum a bit to avoid such a scenario.
* **:green_circle: Action**: Increase the minimum value for red, green, and blue components to 0.4 (this is an arbitrary number that typically produces good-looking results).

</p>
* **:green_circle: Action**: Run the simulation for a few frames to observe the lighting color changing on each iteration.
* **:green_circle: Action**: Run the simulation for a few frames to observe the lighting color changing on each Iteration of the Scenario.
### <a name="step-2">Step 2: Bundle Data and Logic Inside RandomizerTags</a>

14
com.unity.perception/Documentation~/Tutorial/Phase3.md


* **:green_circle: Action**: Choose `TutorialScene` (which is the Scene we have been working in) as your _**Main Scene**_ and the `SimulationScenario` object as your _**Scenario**_.
Here, you can also specify a name for the run, the number of iterations the Scenario will execute for, and the number of _**Instances**_ (number of nodes the work will be distributed across) for the run.
Here, you can also specify a name for the run, the number of Iterations the Scenario will execute for, and the number of _**Instances**_ (number of nodes the work will be distributed across) for the run.
* **:green_circle: Action**: Name your run `FirstRun`, set the number of iterations to `1000`, and instances to `20`.
* **:green_circle: Action**: Name your run `FirstRun`, set the number of Iterations to `1000`, and Instances to `20`.
Your project will now be built and then uploaded to Unity Simulation. Depending on the upload speed of your internet connection, this might take anywhere from a few seconds to a couple of minutes.
Your project will now be built and then uploaded to Unity Simulation. This may take a few minutes to complete, during which the editor may become frozen; this is normal behaviour.
* **:green_circle: Action**: Once the operation is complete, you can find the **Build ID**, **Run Definition ID**, and **Execution ID** of this Unity Simulation run in the _**Console**_ tab:

SynthDet 9ec23417-73cd-becd-9dd6-556183946153 2020-08-12T19:46:20+00:00
```
In case you have more than one cloud project, you will need to "activate" the one corresponding with your perception tutorial project. If there is only one project, it is already activated, and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
In case you have more than one cloud project, you will need to "activate" the one corresponding with your Perception Tutorial project. If there is only one project, it is already activated, and you will not need to execute the command below (note: replace `<project-id>` with the id of your desired project).
* **:green_circle: Action**: Activate the relevant project:

You may notice that the IDs seen above for the run named `FirstRun` match those we saw earlier in Unity Editor's _**Console**_. You can see here that the single execution for our recently uploaded build is `In_Progress` and that the execution ID is `yegz4WN`.
Unity Simulation utilizes the ability to run simulation instances in parallel. If you enter a number larger than 1 for the number of instances in the _**Run in Unity Simulation**_ window, your run will be parallelized, and multiple simulation instances will simultaneously execute. You can view the status of all simulation instances using the `usim summarize run-execution <execution-id>` command. This command will tell you how many instances have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
Unity Simulation utilizes the ability to run simulation Instances in parallel. If you enter a number larger than 1 for the number of Instances in the _**Run in Unity Simulation**_ window, your run will be parallelized, and multiple simulation Instances will simultaneously execute. You can view the status of all simulation Instances using the `usim summarize run-execution <execution-id>` command. This command will tell you how many Instances have succeeded, failed, have not run yet, or are in progress. Make sure to replace `<execution-id>` with the execution ID seen in your run list. In the above example, this ID would be `yegz4WN`.
* **:green_circle: Action**: Use the `usim summarize run-execution <execution-id>` command to observe the status of your execution nodes:

Follow the rest of the steps inside the notebook to generate a variety of plots and stats. Keep in mind that this notebook is provided just as an example, and you can modify and extend it according to your own needs using the tools provided by the [Dataset Insights framework](https://datasetinsights.readthedocs.io/en/latest/).
This concludes the Perception tutorial. The next step in this workflow would be to train an object-detection model using a dataset generated on Unity Simulation. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the dataset's statistics. In order to generate data for training, we recommend a dataset of about 400,000 captures.
In the near future, we will expand this tutorial to Phase 4, which will include instructions on how to train a Faster R-CNN object-detection model using a dataset that can be generated by following this tutorial.
This concludes the Perception Tutorial. The next step in this workflow would be to train an object-detection model using a dataset generated on Unity Simulation. It is important to note that the 1000 large dataset we generated here is probably not sufficiently large for training most models. We chose this number here so that the run would complete in a fairly short period of time, allowing us to move on to learning how to analyze the statistics of the dataset. In order to generate data for training, we recommend a dataset of about 400,000 captures.

6
com.unity.perception/Documentation~/Tutorial/TUTORIAL.md


## [Phase 2: Custom Randomizations](Phase2.md)
In order to get the best out of computer vision models, the training data needs to contain a large degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized randomizer in C# code. Once you complete this phase, you will know how to:
* Create custom randomizers by extending our provided samples.
* Coordinate the operation of several randomizers by specifying their order of execution and the objects they affect.
In order to get the best out of computer vision models, the training data needs to contain a large degree of variation. This is achieved through randomizing various aspects of your simulation between captured frames. While you will use basic randomizations in Phase 1, Phase 2 of the tutorial will help you learn how to randomize your simulations in more complex ways by guiding you through writing your first customized Randomizer in C# code. Once you complete this phase, you will know how to:
* Create custom Randomizers by extending our provided samples.
* Coordinate the operation of several Randomizers by specifying their order of execution and the objects they affect.
* Have objects specify criteria (e.g. ranges, means, etc.) and logic (e.g. unique behaviors) for their randomizable attributes.
## [Phase 3: Cloud](Phase3.md)

882
com.unity.perception/Documentation~/images/PerceptionCameraFinished.png

之前 之后
宽度: 1222  |  高度: 1482  |  大小: 224 KiB

22
com.unity.perception/Editor/Randomization/Editors/RunInUnitySimulationWindow.cs


m_SysParam = definition;
sysParamMenu.text = definition.description;
});
sysParamMenu.text = sysParamDefinitions[0].description;
m_SysParam = sysParamDefinitions[0];

}
catch (Exception e)
{
EditorUtility.ClearProgressBar();
PerceptionEditorAnalytics.ReportRunInUnitySimulationFailed(runGuid, e.Message);
throw;
}

Debug.Log("Created build zip");
}
List<AppParam> GenerateAppParamIds(CancellationToken token)
List<AppParam> GenerateAppParamIds(CancellationToken token, float progressStart, float progressEnd)
{
var appParamIds = new List<AppParam>();
var scenario = (ScenarioBase)m_ScenarioField.value;

constants["totalIterations"] = m_TotalIterationsField.value;
constants["instanceCount"] = m_InstanceCountField.value;
var progressIncrement = (progressEnd - progressStart) / m_InstanceCountField.value;
for (var i = 0; i < m_InstanceCountField.value; i++)
{

name = appParamName,
num_instances = 1
});
EditorUtility.DisplayProgressBar(
"Unity Simulation Run",
$"Uploading app-param-ids for instances: {i + 1}/{m_InstanceCountField.value}",
progressStart + progressIncrement * i);
}
return appParamIds;

{
EditorUtility.DisplayProgressBar("Unity Simulation Run", "Uploading build...", 0.1f);
m_RunButton.SetEnabled(false);
var cancellationTokenSource = new CancellationTokenSource();
var token = cancellationTokenSource.Token;

cancellationTokenSource: cancellationTokenSource);
Debug.Log($"Build upload complete: build id {buildId}");
var appParams = GenerateAppParamIds(token);
var appParams = GenerateAppParamIds(token, 0.1f, 0.9f);
EditorUtility.ClearProgressBar();
EditorUtility.DisplayProgressBar("Unity Simulation Run", "Uploading run definition...", 0.9f);
var runDefinitionId = API.UploadRunDefinition(new RunDefinition
{
app_params = appParams.ToArray(),

});
Debug.Log($"Run definition upload complete: run definition id {runDefinitionId}");
EditorUtility.DisplayProgressBar("Unity Simulation Run", "Executing run...", 0.95f);
var run = Run.CreateFromDefinitionId(runDefinitionId);
run.Execute();

EditorUtility.ClearProgressBar();
PerceptionEditorAnalytics.ReportRunInUnitySimulationSucceeded(runGuid, run.executionId);
}

6
com.unity.perception/Runtime/Randomization/Parameters/CategoricalParameter.cs


public sealed override Type sampleType => typeof(T);
/// <summary>
/// Returns the number of stored categories
/// </summary>
/// <returns>The number of stored categories</returns>
public int GetCategoryCount() => m_Categories.Count;
/// <summary>
/// Returns the category stored at the specified index
/// </summary>
/// <param name="index">The index of the category to lookup</param>

122
com.unity.perception/Documentation~/images/keypoint_template_header.png

之前 之后
宽度: 320  |  高度: 155  |  大小: 30 KiB

194
com.unity.perception/Documentation~/images/keypoint_template_keypoints.png

之前 之后
宽度: 902  |  高度: 484  |  大小: 50 KiB

99
com.unity.perception/Documentation~/images/keypoint_template_skeleton.png

之前 之后
宽度: 878  |  高度: 242  |  大小: 22 KiB

32
com.unity.perception/Editor/Randomization/Editors/RandomizerTagEditor.cs


using UnityEditor;
using UnityEditor.UIElements;
using UnityEngine.Experimental.Perception.Randomization.Randomizers;
using UnityEngine.UIElements;
namespace UnityEngine.Experimental.Perception.Randomization.Editor
{
[CustomEditor(typeof(RandomizerTag), true)]
public class RandomizerTagEditor : UnityEditor.Editor
{
public override VisualElement CreateInspectorGUI()
{
var rootElement = new VisualElement();
CreatePropertyFields(rootElement);
return rootElement;
}
void CreatePropertyFields(VisualElement rootElement)
{
var iterator = serializedObject.GetIterator();
iterator.NextVisible(true);
do
{
if (iterator.name == "m_Script")
continue;
var propertyField = new PropertyField(iterator.Copy());
propertyField.Bind(serializedObject);
rootElement.Add(propertyField);
} while (iterator.NextVisible(false));
}
}
}

3
com.unity.perception/Editor/Randomization/Editors/RandomizerTagEditor.cs.meta


fileFormatVersion: 2
guid: 364d57cb71da4535b77257c294c850f7
timeCreated: 1611697363

61
com.unity.perception/Runtime/GroundTruth/Labelers/AnimationPoseLabel.cs


using System;
using System.Collections.Generic;
using System.Linq;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Record that maps a pose to a timestamp
/// </summary>
[Serializable]
public class PoseTimestampRecord
{
/// <summary>
/// The percentage within the clip that the pose starts, a value from 0 (beginning) to 1 (end)
/// </summary>
[Tooltip("The percentage within the clip that the pose starts, a value from 0 (beginning) to 1 (end)")]
public float startOffsetPercent;
/// <summary>
/// The label to use for any captures inside of this time period
/// </summary>
public string poseLabel;
}
/// <summary>
/// The animation pose label is a mapping that file that maps a time range in an animation clip to a ground truth
/// pose. The timestamp record is defined by a pose label and a duration. The timestamp records are order dependent
/// and build on the previous entries. This means that if the first record has a duration of 5, then it will be the label
/// for all points in the clip from 0 (the beginning) to the five second mark. The next record will then go from the end
/// of the previous clip to its duration. If there is time left over in the flip, the final entry will be used.
/// </summary>
[CreateAssetMenu(fileName = "AnimationPoseTimestamp", menuName = "Perception/Animation Pose Timestamps")]
public class AnimationPoseLabel : ScriptableObject
{
/// <summary>
/// The animation clip used for all of the timestamps
/// </summary>
public AnimationClip animationClip;
/// <summary>
/// The list of timestamps, order dependent
/// </summary>
public List<PoseTimestampRecord> timestamps;
/// <summary>
/// Retrieves the pose for the clip at the current time.
/// </summary>
/// <param name="time">The time in question</param>
/// <returns>The pose for the passed in time</returns>
public string GetPoseAtTime(float time)
{
if (time < 0 || time > 1) return "unset";
var i = 1;
for (i = 1; i < timestamps.Count; i++)
{
if (timestamps[i].startOffsetPercent > time) break;
}
return timestamps[i - 1].poseLabel;
}
}
}

3
com.unity.perception/Runtime/GroundTruth/Labelers/AnimationPoseLabel.cs.meta


fileFormatVersion: 2
guid: 4c69656f5dd14516a3a18e42b3b43a4e
timeCreated: 1611270313

142
com.unity.perception/Runtime/GroundTruth/Labelers/CocoKeypointTemplate.asset


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 37a7d6f1a40c45a2981a6291f0d03337, type: 3}
m_Name: CocoKeypointTemplate
m_EditorClassIdentifier:
templateName: Coco
jointTexture: {fileID: 2800000, guid: e381cbaaf29614168bafc8f7ec5dbfe9, type: 3}
skeletonTexture: {fileID: 2800000, guid: e381cbaaf29614168bafc8f7ec5dbfe9, type: 3}
keyPoints:
- label: nose
associateToRig: 0
rigLabel: 0
color: {r: 1, g: 0, b: 0, a: 1}
- label: neck
associateToRig: 1
rigLabel: 9
color: {r: 1, g: 0.3397382, b: 0, a: 1}
- label: right_shoulder
associateToRig: 1
rigLabel: 14
color: {r: 1, g: 0.5694697, b: 0, a: 1}
- label: right_elbow
associateToRig: 1
rigLabel: 16
color: {r: 1, g: 0.8258381, b: 0, a: 1}
- label: right_wrist
associateToRig: 1
rigLabel: 18
color: {r: 0.6454141, g: 1, b: 0, a: 1}
- label: left_shoulder
associateToRig: 1
rigLabel: 13
color: {r: 0.33125544, g: 1, b: 0, a: 1}
- label: left_elbow
associateToRig: 1
rigLabel: 15
color: {r: 0.04907012, g: 1, b: 0, a: 1}
- label: left_wrist
associateToRig: 1
rigLabel: 17
color: {r: 0, g: 1, b: 0.16702724, a: 1}
- label: right_hip
associateToRig: 1
rigLabel: 2
color: {r: 0, g: 1, b: 0.36656523, a: 1}
- label: right_knee
associateToRig: 1
rigLabel: 4
color: {r: 0, g: 1, b: 0.58708096, a: 1}
- label: right_ankle
associateToRig: 1
rigLabel: 6
color: {r: 0, g: 1, b: 0.7695224, a: 1}
- label: left_hip
associateToRig: 1
rigLabel: 1
color: {r: 0, g: 1, b: 1, a: 1}
- label: left_knee
associateToRig: 1
rigLabel: 3
color: {r: 0, g: 0.63836884, b: 1, a: 1}
- label: left_ankle
associateToRig: 1
rigLabel: 5
color: {r: 0, g: 0.29786587, b: 1, a: 1}
- label: right_eye
associateToRig: 1
rigLabel: 22
color: {r: 0.45002556, g: 0, b: 1, a: 1}
- label: left_eye
associateToRig: 1
rigLabel: 21
color: {r: 0.9471822, g: 0, b: 1, a: 1}
- label: right_ear
associateToRig: 0
rigLabel: 22
color: {r: 1, g: 0, b: 0.6039734, a: 1}
- label: left_ear
associateToRig: 0
rigLabel: 21
color: {r: 1, g: 0, b: 0.11927748, a: 1}
skeleton:
- joint1: 0
joint2: 1
color: {r: 0.014684939, g: 0.05894964, b: 0.6226415, a: 1}
- joint1: 1
joint2: 2
color: {r: 0.5283019, g: 0, b: 0.074745394, a: 1}
- joint1: 2
joint2: 3
color: {r: 0.7830189, g: 0.32108742, b: 0.07756319, a: 1}
- joint1: 3
joint2: 4
color: {r: 0.9622642, g: 0.85543716, b: 0, a: 1}
- joint1: 1
joint2: 5
color: {r: 0.7019608, g: 0.20392157, b: 0.11461401, a: 1}
- joint1: 5
joint2: 6
color: {r: 0.3374826, g: 0.9056604, b: 0.26059094, a: 1}
- joint1: 6
joint2: 7
color: {r: 0.04214221, g: 0.4811321, b: 0.03404236, a: 1}
- joint1: 1
joint2: 8
color: {r: 0, g: 0.764151, b: 0.22962166, a: 1}
- joint1: 8
joint2: 9
color: {r: 0, g: 1, b: 0.3301921, a: 1}
- joint1: 9
joint2: 10
color: {r: 0, g: 0.9433962, b: 0.71313965, a: 1}
- joint1: 1
joint2: 11
color: {r: 0, g: 1, b: 1, a: 1}
- joint1: 11
joint2: 12
color: {r: 0, g: 0.38122815, b: 0.9433962, a: 1}
- joint1: 12
joint2: 13
color: {r: 0.20773989, g: 0, b: 0.7169812, a: 1}
- joint1: 0
joint2: 14
color: {r: 1, g: 0, b: 0.88550186, a: 1}
- joint1: 0
joint2: 15
color: {r: 1, g: 0, b: 0.81438303, a: 1}
- joint1: 16
joint2: 14
color: {r: 0.5743165, g: 0, b: 1, a: 1}
- joint1: 17
joint2: 15
color: {r: 0.8962264, g: 0, b: 0.12766689, a: 1}

8
com.unity.perception/Runtime/GroundTruth/Labelers/CocoKeypointTemplate.asset.meta


fileFormatVersion: 2
guid: a29b79d8ce98945a0855b1addec08d86
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

34
com.unity.perception/Runtime/GroundTruth/Labelers/JointLabel.cs


using System;
using System.Collections.Generic;
using UnityEngine;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Label to designate a custom joint/keypoint. These are needed to add body
/// parts to a humanoid model that are not contained in its <see cref="Animator"/> <see cref="Avatar"/>
/// </summary>
public class JointLabel : MonoBehaviour
{
/// <summary>
/// Maps this joint to a joint in a <see cref="KeyPointTemplate"/>
/// </summary>
[Serializable]
public class TemplateData
{
/// <summary>
/// The <see cref="KeyPointTemplate"/> that defines this joint.
/// </summary>
public KeyPointTemplate template;
/// <summary>
/// The name of the joint.
/// </summary>
public string label;
};
/// <summary>
/// List of all of the templates that this joint can be mapped to.
/// </summary>
public List<TemplateData> templateInformation;
}
}

3
com.unity.perception/Runtime/GroundTruth/Labelers/JointLabel.cs.meta


fileFormatVersion: 2
guid: 8cf4fa374b134b1680755f8280ae8e7d
timeCreated: 1610577744

437
com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointLabeler.cs


using System;
using System.Collections.Generic;
using System.Linq;
using Unity.Collections;
using Unity.Entities;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Produces keypoint annotations for a humanoid model. This labeler supports generic
/// <see cref="KeyPointTemplate"/>. Template values are mapped to rigged
/// <see cref="Animator"/> <seealso cref="Avatar"/>. Custom joints can be
/// created by applying <see cref="JointLabel"/> to empty game objects at a body
/// part's location.
/// </summary>
[Serializable]
public sealed class KeyPointLabeler : CameraLabeler
{
/// <summary>
/// The active keypoint template. Required to annotate keypoint data.
/// </summary>
public KeyPointTemplate activeTemplate;
/// <inheritdoc/>
public override string description
{
get => "Produces keypoint annotations for all visible labeled objects that have a humanoid animation avatar component.";
protected set { }
}
///<inheritdoc/>
protected override bool supportsVisualization => true;
// ReSharper disable MemberCanBePrivate.Global
/// <summary>
/// The GUID id to associate with the annotations produced by this labeler.
/// </summary>
public string annotationId = "8b3ef246-daa7-4dd5-a0e8-a943f6e7f8c2";
/// <summary>
/// The <see cref="IdLabelConfig"/> which associates objects with labels.
/// </summary>
public IdLabelConfig idLabelConfig;
// ReSharper restore MemberCanBePrivate.Global
AnnotationDefinition m_AnnotationDefinition;
EntityQuery m_EntityQuery;
Texture2D m_MissingTexture;
/// <summary>
/// Action that gets triggered when a new frame of key points are computed.
/// </summary>
public event Action<List<KeyPointEntry>> KeyPointsComputed;
/// <summary>
/// Creates a new key point labeler. This constructor creates a labeler that
/// is not valid until a <see cref="IdLabelConfig"/> and <see cref="KeyPointTemplate"/>
/// are assigned.
/// </summary>
public KeyPointLabeler() { }
/// <summary>
/// Creates a new key point labeler.
/// </summary>
/// <param name="config">The Id label config for the labeler</param>
/// <param name="template">The active keypoint template</param>
public KeyPointLabeler(IdLabelConfig config, KeyPointTemplate template)
{
this.idLabelConfig = config;
this.activeTemplate = template;
}
/// <summary>
/// Array of animation pose labels which map animation clip times to ground truth pose labels.
/// </summary>
public AnimationPoseLabel[] poseStateConfigs;
/// <inheritdoc/>
protected override void Setup()
{
if (idLabelConfig == null)
throw new InvalidOperationException("KeyPointLabeler's idLabelConfig field must be assigned");
m_AnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition("keypoints", new []{TemplateToJson(activeTemplate)},
"pixel coordinates of keypoints in a model, along with skeletal connectivity data", id: new Guid(annotationId));
m_EntityQuery = World.DefaultGameObjectInjectionWorld.EntityManager.CreateEntityQuery(typeof(Labeling), typeof(GroundTruthInfo));
m_KeyPointEntries = new List<KeyPointEntry>();
// Texture to use in case the template does not contain a texture for the joints or the skeletal connections
m_MissingTexture = new Texture2D(1, 1);
m_KnownStatus = new Dictionary<uint, CachedData>();
}
/// <inheritdoc/>
protected override void OnBeginRendering()
{
var reporter = perceptionCamera.SensorHandle.ReportAnnotationAsync(m_AnnotationDefinition);
var entities = m_EntityQuery.ToEntityArray(Allocator.TempJob);
var entityManager = World.DefaultGameObjectInjectionWorld.EntityManager;
m_KeyPointEntries.Clear();
foreach (var entity in entities)
{
ProcessEntity(entityManager.GetComponentObject<Labeling>(entity));
}
entities.Dispose();
KeyPointsComputed?.Invoke(m_KeyPointEntries);
reporter.ReportValues(m_KeyPointEntries);
}
// ReSharper disable InconsistentNaming
// ReSharper disable NotAccessedField.Global
// ReSharper disable NotAccessedField.Local
/// <summary>
/// Record storing all of the keypoint data of a labeled gameobject.
/// </summary>
[Serializable]
public class KeyPointEntry
{
/// <summary>
/// The label id of the entity
/// </summary>
public int label_id;
/// <summary>
/// The instance id of the entity
/// </summary>
public uint instance_id;
/// <summary>
/// The template that the points are based on
/// </summary>
public string template_guid;
/// <summary>
/// Pose ground truth for the current set of keypoints
/// </summary>
public string pose = "unset";
/// <summary>
/// Array of all of the keypoints
/// </summary>
public KeyPoint[] keypoints;
}
/// <summary>
/// The values of a specific keypoint
/// </summary>
[Serializable]
public class KeyPoint
{
/// <summary>
/// The index of the keypoint in the template file
/// </summary>
public int index;
/// <summary>
/// The keypoint's x-coordinate pixel location
/// </summary>
public float x;
/// <summary>
/// The keypoint's y-coordinate pixel location
/// </summary>
public float y;
/// <summary>
/// The state of the point, 0 = not present, 1 = keypoint is present
/// </summary>
public int state;
}
// ReSharper restore InconsistentNaming
// ReSharper restore NotAccessedField.Global
// ReSharper restore NotAccessedField.Local
// Converts a coordinate from world space into pixel space
Vector3 ConvertToScreenSpace(Vector3 worldLocation)
{
var pt = perceptionCamera.attachedCamera.WorldToScreenPoint(worldLocation);
pt.y = Screen.height - pt.y;
return pt;
}
List<KeyPointEntry> m_KeyPointEntries;
struct CachedData
{
public bool status;
public Animator animator;
public KeyPointEntry keyPoints;
public List<(JointLabel, int)> overrides;
}
Dictionary<uint, CachedData> m_KnownStatus;
bool TryToGetTemplateIndexForJoint(KeyPointTemplate template, JointLabel joint, out int index)
{
index = -1;
foreach (var jointTemplate in joint.templateInformation.Where(jointTemplate => jointTemplate.template == template))
{
for (var i = 0; i < template.keyPoints.Length; i++)
{
if (template.keyPoints[i].label == jointTemplate.label)
{
index = i;
return true;
}
}
}
return false;
}
bool DoesTemplateContainJoint(JointLabel jointLabel)
{
foreach (var template in jointLabel.templateInformation)
{
if (template.template == activeTemplate)
{
if (activeTemplate.keyPoints.Any(i => i.label == template.label))
{
return true;
}
}
}
return false;
}
void ProcessEntity(Labeling labeledEntity)
{
// Cache out the data of a labeled game object the first time we see it, this will
// save performance each frame. Also checks to see if a labeled game object can be annotated.
if (!m_KnownStatus.ContainsKey(labeledEntity.instanceId))
{
var cached = new CachedData()
{
status = false,
animator = null,
keyPoints = new KeyPointEntry(),
overrides = new List<(JointLabel, int)>()
};
if (idLabelConfig.TryGetLabelEntryFromInstanceId(labeledEntity.instanceId, out var labelEntry))
{
var entityGameObject = labeledEntity.gameObject;
cached.keyPoints.instance_id = labeledEntity.instanceId;
cached.keyPoints.label_id = labelEntry.id;
cached.keyPoints.template_guid = activeTemplate.templateID.ToString();
cached.keyPoints.keypoints = new KeyPoint[activeTemplate.keyPoints.Length];
for (var i = 0; i < cached.keyPoints.keypoints.Length; i++)
{
cached.keyPoints.keypoints[i] = new KeyPoint { index = i, state = 0 };
}
var animator = entityGameObject.transform.GetComponentInChildren<Animator>();
if (animator != null)
{
cached.animator = animator;
cached.status = true;
}
foreach (var joint in entityGameObject.transform.GetComponentsInChildren<JointLabel>())
{
if (TryToGetTemplateIndexForJoint(activeTemplate, joint, out var idx))
{
cached.overrides.Add((joint, idx));
cached.status = true;
}
}
}
m_KnownStatus[labeledEntity.instanceId] = cached;
}
var cachedData = m_KnownStatus[labeledEntity.instanceId];
if (cachedData.status)
{
var animator = cachedData.animator;
var keyPoints = cachedData.keyPoints.keypoints;
// Go through all of the rig keypoints and get their location
for (var i = 0; i < activeTemplate.keyPoints.Length; i++)
{
var pt = activeTemplate.keyPoints[i];
if (pt.associateToRig)
{
var bone = animator.GetBoneTransform(pt.rigLabel);
if (bone != null)
{
var loc = ConvertToScreenSpace(bone.position);
keyPoints[i].index = i;
keyPoints[i].x = loc.x;
keyPoints[i].y = loc.y;
keyPoints[i].state = 2;
}
}
}
// Go through all of the additional or override points defined by joint labels and get
// their locations
foreach (var (joint, idx) in cachedData.overrides)
{
var loc = ConvertToScreenSpace(joint.transform.position);
keyPoints[idx].index = idx;
keyPoints[idx].x = loc.x;
keyPoints[idx].y = loc.y;
keyPoints[idx].state = 1;
}
cachedData.keyPoints.pose = "unset";
if (cachedData.animator != null)
{
cachedData.keyPoints.pose = GetPose(cachedData.animator);
}
m_KeyPointEntries.Add(cachedData.keyPoints);
}
}
string GetPose(Animator animator)
{
var info = animator.GetCurrentAnimatorClipInfo(0);
if (info != null && info.Length > 0)
{
var clip = info[0].clip;
var timeOffset = animator.GetCurrentAnimatorStateInfo(0).normalizedTime;
if (poseStateConfigs != null)
{
foreach (var p in poseStateConfigs)
{
if (p.animationClip == clip)
{
var time = timeOffset;
var label = p.GetPoseAtTime(time);
return label;
}
}
}
}
return "unset";
}
/// <inheritdoc/>
protected override void OnVisualize()
{
var jointTexture = activeTemplate.jointTexture;
if (jointTexture == null) jointTexture = m_MissingTexture;
var skeletonTexture = activeTemplate.skeletonTexture;
if (skeletonTexture == null) skeletonTexture = m_MissingTexture;
foreach (var entry in m_KeyPointEntries)
{
foreach (var bone in activeTemplate.skeleton)
{
var joint1 = entry.keypoints[bone.joint1];
var joint2 = entry.keypoints[bone.joint2];
if (joint1.state != 0 && joint2.state != 0)
{
VisualizationHelper.DrawLine(joint1.x, joint1.y, joint2.x, joint2.y, bone.color, 8, skeletonTexture);
}
}
foreach (var keypoint in entry.keypoints)
{
if (keypoint.state != 0)
VisualizationHelper.DrawPoint(keypoint.x, keypoint.y, activeTemplate.keyPoints[keypoint.index].color, 8, jointTexture);
}
}
}
// ReSharper disable InconsistentNaming
// ReSharper disable NotAccessedField.Local
[Serializable]
struct JointJson
{
public string label;
public int index;
}
[Serializable]
struct SkeletonJson
{
public int joint1;
public int joint2;
}
[Serializable]
struct KeyPointJson
{
public string template_id;
public string template_name;
public JointJson[] key_points;
public SkeletonJson[] skeleton;
}
// ReSharper restore InconsistentNaming
// ReSharper restore NotAccessedField.Local
KeyPointJson TemplateToJson(KeyPointTemplate input)
{
var json = new KeyPointJson();
json.template_id = input.templateID.ToString();
json.template_name = input.templateName;
json.key_points = new JointJson[input.keyPoints.Length];
json.skeleton = new SkeletonJson[input.skeleton.Length];
for (var i = 0; i < input.keyPoints.Length; i++)
{
json.key_points[i] = new JointJson
{
label = input.keyPoints[i].label,
index = i
};
}
for (var i = 0; i < input.skeleton.Length; i++)
{
json.skeleton[i] = new SkeletonJson()
{
joint1 = input.skeleton[i].joint1,
joint2 = input.skeleton[i].joint2
};
}
return json;
}
}
}

3
com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointLabeler.cs.meta


fileFormatVersion: 2
guid: 377d37b913b843b6985fa57c13cb732c
timeCreated: 1610383503

80
com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointTemplate.cs


using System;
namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// A definition of a keypoint (joint).
/// </summary>
[Serializable]
public class KeyPointDefinition
{
/// <summary>
/// The name of the keypoint
/// </summary>
public string label;
/// <summary>
/// Does this keypoint map directly to a <see cref="Animator"/> <see cref="Avatar"/> <see cref="HumanBodyBones"/>
/// </summary>
public bool associateToRig = true;
/// <summary>
/// The associated <see cref="HumanBodyBones"/> of the rig
/// </summary>
public HumanBodyBones rigLabel = HumanBodyBones.Head;
/// <summary>
/// The color of the keypoint in the visualization
/// </summary>
public Color color;
}
/// <summary>
/// A skeletal connection between two joints.
/// </summary>
[Serializable]
public class SkeletonDefinition
{
/// <summary>
/// The first joint
/// </summary>
public int joint1;
/// <summary>
/// The second joint
/// </summary>
public int joint2;
/// <summary>
/// The color of the skeleton in the visualization
/// </summary>
public Color color;
}
/// <summary>
/// Template used to define the keypoints of a humanoid asset.
/// </summary>
[CreateAssetMenu(fileName = "KeypointTemplate", menuName = "Perception/Keypoint Template", order = 2)]
public class KeyPointTemplate : ScriptableObject
{
/// <summary>
/// The <see cref="Guid"/> of the template
/// </summary>
public string templateID = Guid.NewGuid().ToString();
/// <summary>
/// The name of the template
/// </summary>
public string templateName;
/// <summary>
/// Texture to use for the visualization of the joint.
/// </summary>
public Texture2D jointTexture;
/// <summary>
/// Texture to use for the visualization of the skeletal connection.
/// </summary>
public Texture2D skeletonTexture;
/// <summary>
/// Array of <see cref="KeyPointDefinition"/> for the template.
/// </summary>
public KeyPointDefinition[] keyPoints;
/// <summary>
/// Array of the <see cref="SkeletonDefinition"/> for the template.
/// </summary>
public SkeletonDefinition[] skeleton;
}
}

3
com.unity.perception/Runtime/GroundTruth/Labelers/KeyPointTemplate.cs.meta


fileFormatVersion: 2
guid: 37a7d6f1a40c45a2981a6291f0d03337
timeCreated: 1610633739

109
com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/VisualizationHelper.cs


namespace UnityEngine.Perception.GroundTruth
{
/// <summary>
/// Helper class that contains common visualization methods useful to ground truth labelers.
/// </summary>
public static class VisualizationHelper
{
static Texture2D s_OnePixel = new Texture2D(1, 1);
/// <summary>
/// Converts a 3D world space coordinate to image pixel space.
/// </summary>
/// <param name="camera">The rendering camera</param>
/// <param name="worldLocation">The 3D world location to convert</param>
/// <returns>The coordinate in pixel space</returns>
public static Vector3 ConvertToScreenSpace(Camera camera, Vector3 worldLocation)
{
var pt = camera.WorldToScreenPoint(worldLocation);
pt.y = Screen.height - pt.y;
return pt;
}
static Rect ToBoxRect(float x, float y, float halfSize = 3.0f)
{
return new Rect(x - halfSize, y - halfSize, halfSize * 2, halfSize * 2);
}
/// <summary>
/// Draw a point (in pixel space) on the screen
/// </summary>
/// <param name="pt">The point location, in pixel space</param>
/// <param name="color">The color of the point</param>
/// <param name="width">The width of the point</param>
/// <param name="texture">The texture to use for the point, defaults to a solid pixel</param>
public static void DrawPoint(Vector3 pt, Color color, float width = 4.0f, Texture texture = null)
{
DrawPoint(pt.x, pt.y, color, width, texture);
}
/// <summary>
/// Draw a point (in pixel space) on the screen
/// </summary>
/// <param name="x">The point's x value, in pixel space</param>
/// <param name="y">The point's y value, in pixel space</param>
/// <param name="color">The color of the point</param>
/// <param name="width">The width of the point</param>
/// <param name="texture">The texture to use for the point, defaults to a solid pixel</param>
public static void DrawPoint(float x, float y, Color color, float width = 4, Texture texture = null)
{
if (texture == null) texture = s_OnePixel;
var oldColor = GUI.color;
GUI.color = color;
GUI.DrawTexture(ToBoxRect(x, y, width * 0.5f), texture);
GUI.color = oldColor;
}
static float Magnitude(float p1X, float p1Y, float p2X, float p2Y)
{
var x = p2X - p1X;
var y = p2Y - p1Y;
return Mathf.Sqrt(x * x + y * y);
}
/// <summary>
/// Draw's a texture between two locations of a passed in width.
/// </summary>
/// <param name="p1">The start point in pixel space</param>
/// <param name="p2">The end point in pixel space</param>
/// <param name="color">The color of the line</param>
/// <param name="width">The width of the line</param>
/// <param name="texture">The texture to use, if null, will draw a solid line of passed in color</param>
public static void DrawLine(Vector2 p1, Vector2 p2, Color color, float width = 3.0f, Texture texture = null)
{
DrawLine(p1.x, p1.y, p2.x, p2.y, color, width, texture);
}
/// <summary>
/// Draw's a texture between two locations of a passed in width.
/// </summary>
/// <param name="p1X">The start point's x coordinate in pixel space</param>
/// <param name="p1Y">The start point's y coordinate in pixel space</param>
/// <param name="p2X">The end point's x coordinate in pixel space</param>
/// <param name="p2Y">The end point's y coordinate in pixel space</param>
/// <param name="color">The color of the line</param>
/// <param name="width">The width of the line</param>
/// <param name="texture">The texture to use, if null, will draw a solid line of passed in color</param>
public static void DrawLine (float p1X, float p1Y, float p2X, float p2Y, Color color, float width = 3.0f, Texture texture = null)
{
if (texture == null) texture = s_OnePixel;
var oldColor = GUI.color;
GUI.color = color;
var matrixBackup = GUI.matrix;
var angle = Mathf.Atan2 (p2Y - p1Y, p2X - p1X) * 180f / Mathf.PI;
var length = Magnitude(p1X, p1Y, p2X, p2Y);
GUIUtility.RotateAroundPivot (angle, new Vector2(p1X, p1Y));
var halfWidth = width * 0.5f;
GUI.DrawTexture (new Rect (p1X - halfWidth, p1Y - halfWidth, length, width), texture);
GUI.matrix = matrixBackup;
GUI.color = oldColor;
}
}
}

3
com.unity.perception/Runtime/GroundTruth/Labelers/Visualization/VisualizationHelper.cs.meta


fileFormatVersion: 2
guid: b4ef50bfa62549848a6d12c049397eba
timeCreated: 1611019489

72
com.unity.perception/Runtime/GroundTruth/Resources/AnimationRandomizerController.controller


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!91 &9100000
AnimatorController:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: AnimationRandomizerController
serializedVersion: 5
m_AnimatorParameters: []
m_AnimatorLayers:
- serializedVersion: 5
m_Name: Base Layer
m_StateMachine: {fileID: 1300458365308884037}
m_Mask: {fileID: 0}
m_Motions: []
m_Behaviours: []
m_BlendingMode: 0
m_SyncedLayerIndex: -1
m_DefaultWeight: 0
m_IKPass: 0
m_SyncedLayerAffectsTiming: 0
m_Controller: {fileID: 9100000}
--- !u!1107 &1300458365308884037
AnimatorStateMachine:
serializedVersion: 5
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Base Layer
m_ChildStates:
- serializedVersion: 1
m_State: {fileID: 3212544235944076811}
m_Position: {x: 390, y: 320, z: 0}
m_ChildStateMachines: []
m_AnyStateTransitions: []
m_EntryTransitions: []
m_StateMachineTransitions: {}
m_StateMachineBehaviours: []
m_AnyStatePosition: {x: 50, y: 20, z: 0}
m_EntryPosition: {x: 50, y: 120, z: 0}
m_ExitPosition: {x: 800, y: 120, z: 0}
m_ParentStateMachinePosition: {x: 800, y: 20, z: 0}
m_DefaultState: {fileID: 3212544235944076811}
--- !u!1102 &3212544235944076811
AnimatorState:
serializedVersion: 5
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: RandomState
m_Speed: 1
m_CycleOffset: 0
m_Transitions: []
m_StateMachineBehaviours: []
m_Position: {x: 50, y: 50, z: 0}
m_IKOnFeet: 0
m_WriteDefaultValues: 1
m_Mirror: 0
m_SpeedParameterActive: 0
m_MirrorParameterActive: 0
m_CycleOffsetParameterActive: 0
m_TimeParameterActive: 0
m_Motion: {fileID: 7400000, guid: 11ff68761e3b74dbd84106361e9a4ec7, type: 2}
m_Tag:
m_SpeedParameter:
m_MirrorParameter:
m_CycleOffsetParameter:
m_TimeParameter:

8
com.unity.perception/Runtime/GroundTruth/Resources/AnimationRandomizerController.controller.meta


fileFormatVersion: 2
guid: 492c555d00e884241a46c09b49877983
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 9100000
userData:
assetBundleName:
assetBundleVariant:

1001
com.unity.perception/Runtime/GroundTruth/Resources/PlayerIdle.anim
文件差异内容过多而无法显示
查看文件

8
com.unity.perception/Runtime/GroundTruth/Resources/PlayerIdle.anim.meta


fileFormatVersion: 2
guid: 11ff68761e3b74dbd84106361e9a4ec7
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

6
com.unity.perception/Runtime/GroundTruth/Resources/joint_circle.png

之前 之后
宽度: 32  |  高度: 32  |  大小: 847 B

108
com.unity.perception/Runtime/GroundTruth/Resources/joint_circle.png.meta


fileFormatVersion: 2
guid: e381cbaaf29614168bafc8f7ec5dbfe9
TextureImporter:
internalIDToNameTable: []
externalObjects: {}
serializedVersion: 11
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
vTOnly: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: 1
wrapV: 1
wrapW: -1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 1
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 8
textureShape: 1
singleChannelComponent: 0
flipbookRows: 1
flipbookColumns: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
ignorePngGamma: 0
applyGammaDecoding: 0
platformSettings:
- serializedVersion: 3
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 0
- serializedVersion: 3
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
forceMaximumCompressionQuality_BC6H_BC7: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID: 5e97eb03825dee720800000000000000
internalID: 0
vertices: []
indices:
edges: []
weights: []
secondaryTextures: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

10
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/CategorialParameters/AnimationClipParameter.cs


using System;
namespace UnityEngine.Experimental.Perception.Randomization.Parameters
{
/// <summary>
/// A categorical parameter for animation clips
/// </summary>
[Serializable]
public class AnimationClipParameter : CategoricalParameter<AnimationClip> { }
}

3
com.unity.perception/Runtime/Randomization/Parameters/ParameterTypes/CategorialParameters/AnimationClipParameter.cs.meta


fileFormatVersion: 2
guid: 6af6ee532f5e4e4b83353f2f32105665
timeCreated: 1610935882

46
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/AnimationRandomizer.cs


using System;
using UnityEngine;
using UnityEngine.Experimental.Perception.Randomization.Parameters;
using UnityEngine.Experimental.Perception.Randomization.Randomizers.SampleRandomizers.Tags;
using UnityEngine.Experimental.Perception.Randomization.Samplers;
namespace UnityEngine.Experimental.Perception.Randomization.Randomizers.SampleRandomizers
{
/// <summary>
/// Chooses a random of frame of a random clip for a game object
/// </summary>
[Serializable]
[AddRandomizerMenu("Perception/Animation Randomizer")]
public class AnimationRandomizer : Randomizer
{
FloatParameter m_FloatParameter = new FloatParameter{ value = new UniformSampler(0, 1) };
const string k_ClipName = "PlayerIdle";
const string k_StateName = "Base Layer.RandomState";
void RandomizeAnimation(AnimationRandomizerTag tag)
{
var animator = tag.gameObject.GetComponent<Animator>();
animator.applyRootMotion = tag.applyRootMotion;
var overrider = tag.animatorOverrideController;
if (overrider != null && tag.animationClips.GetCategoryCount() > 0)
{
overrider[k_ClipName] = tag.animationClips.Sample();
animator.Play(k_StateName, 0, m_FloatParameter.Sample());
}
}
/// <inheritdoc/>
protected override void OnIterationStart()
{
if (m_FloatParameter == null) m_FloatParameter = new FloatParameter{ value = new UniformSampler(0, 1) };
var taggedObjects = tagManager.Query<AnimationRandomizerTag>();
foreach (var taggedObject in taggedObjects)
{
RandomizeAnimation(taggedObject);
}
}
}
}

3
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Randomizers/AnimationRandomizer.cs.meta


fileFormatVersion: 2
guid: 8b57910cfd4a4dec90d6aa4a8ef824da
timeCreated: 1610802187

48
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Tags/AnimationRandomizerTag.cs


using UnityEngine.Experimental.Perception.Randomization.Parameters;
using UnityEngine.Perception.GroundTruth;
namespace UnityEngine.Experimental.Perception.Randomization.Randomizers.SampleRandomizers.Tags
{
/// <summary>
/// Used in conjunction with a <see cref="AnimationRandomizer"/> to select a random animation frame for
/// the tagged game object
/// </summary>
[RequireComponent(typeof(Animator))]
[AddComponentMenu("Perception/RandomizerTags/Animation Randomizer Tag")]
public class AnimationRandomizerTag : RandomizerTag
{
/// <summary>
/// A list of animation clips from which to choose
/// </summary>
public AnimationClipParameter animationClips;
/// <summary>
/// Apply the root motion to the animator. If true, if an animation has a rotation translation and/or rotation
/// that will be applied to the labeled model, which means that the model maybe move to a new position.
/// If false, then the model will stay at its current position/rotation.
/// </summary>
public bool applyRootMotion = false;
/// <summary>
/// Gets the animation override controller for an animation randomization. The controller is loaded from
/// resources.
/// </summary>
public AnimatorOverrideController animatorOverrideController
{
get
{
if (m_Controller == null)
{
var animator = gameObject.GetComponent<Animator>();
var runtimeAnimatorController = Resources.Load<RuntimeAnimatorController>("AnimationRandomizerController");
m_Controller = new AnimatorOverrideController(runtimeAnimatorController);
animator.runtimeAnimatorController = m_Controller;
}
return m_Controller;
}
}
AnimatorOverrideController m_Controller;
}
}

3
com.unity.perception/Runtime/Randomization/Randomizers/RandomizerExamples/Tags/AnimationRandomizerTag.cs.meta


fileFormatVersion: 2
guid: f8943e41c7c34facb177a5decc1b2aef
timeCreated: 1610802367

307
com.unity.perception/Tests/Runtime/GroundTruthTests/KeyPointGroundTruthTests.cs


using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using UnityEngine;
using UnityEngine.Perception.GroundTruth;
using UnityEngine.TestTools;
namespace GroundTruthTests
{
[TestFixture]
public class KeyPointGroundTruthTests : GroundTruthTestBase
{
static GameObject SetupCamera(IdLabelConfig config, KeyPointTemplate template, Action<List<KeyPointLabeler.KeyPointEntry>> computeListener)
{
var cameraObject = new GameObject();
cameraObject.SetActive(false);
var camera = cameraObject.AddComponent<Camera>();
camera.orthographic = false;
camera.fieldOfView = 60;
camera.nearClipPlane = 0.3f;
camera.farClipPlane = 1000;
camera.transform.position = new Vector3(0, 0, -10);
var perceptionCamera = cameraObject.AddComponent<PerceptionCamera>();
perceptionCamera.captureRgbImages = false;
var keyPointLabeler = new KeyPointLabeler(config, template);
if (computeListener != null)
keyPointLabeler.KeyPointsComputed += computeListener;
perceptionCamera.AddLabeler(keyPointLabeler);
return cameraObject;
}
static KeyPointTemplate CreateTestTemplate(Guid guid, string label)
{
var keyPoints = new[]
{
new KeyPointDefinition
{
label = "FrontLowerLeft",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "FrontUpperLeft",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "FrontUpperRight",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "FrontLowerRight",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "BackLowerLeft",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "BackUpperLeft",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "BackUpperRight",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "BackLowerRight",
associateToRig = false,
color = Color.black
},
new KeyPointDefinition
{
label = "Center",
associateToRig = false,
color = Color.black
}
};
var skeleton = new[]
{
new SkeletonDefinition
{
joint1 = 0,
joint2 = 1,
color = Color.magenta
},
new SkeletonDefinition
{
joint1 = 1,
joint2 = 2,
color = Color.magenta
},
new SkeletonDefinition
{
joint1 = 2,
joint2 = 3,
color = Color.magenta
},
new SkeletonDefinition
{
joint1 = 3,
joint2 = 0,
color = Color.magenta
},
new SkeletonDefinition
{
joint1 = 4,
joint2 = 5,
color = Color.blue
},
new SkeletonDefinition
{
joint1 = 5,
joint2 = 6,
color = Color.blue
},
new SkeletonDefinition
{
joint1 = 6,
joint2 = 7,
color = Color.blue
},
new SkeletonDefinition
{
joint1 = 7,
joint2 = 4,
color = Color.blue
},
new SkeletonDefinition
{
joint1 = 0,
joint2 = 4,
color = Color.green
},
new SkeletonDefinition
{
joint1 = 1,
joint2 = 5,
color = Color.green
},
new SkeletonDefinition
{
joint1 = 2,
joint2 = 6,
color = Color.green
},
new SkeletonDefinition
{
joint1 = 3,
joint2 = 7,
color = Color.green
},
};
var template = ScriptableObject.CreateInstance<KeyPointTemplate>();
template.templateID = guid.ToString();
template.templateName = label;
template.jointTexture = null;
template.skeletonTexture = null;
template.keyPoints = keyPoints;
template.skeleton = skeleton;
return template;
}
[Test]
public void KeypointTemplate_CreateTemplateTest()
{
var guid = Guid.NewGuid();
const string label = "TestTemplate";
var template = CreateTestTemplate(guid, label);
Assert.AreEqual(template.templateID, guid.ToString());
Assert.AreEqual(template.templateName, label);
Assert.IsNull(template.jointTexture);
Assert.IsNull(template.skeletonTexture);
Assert.IsNotNull(template.keyPoints);
Assert.IsNotNull(template.skeleton);
Assert.AreEqual(template.keyPoints.Length, 9);
Assert.AreEqual(template.skeleton.Length, 12);
var k0 = template.keyPoints[0];
Assert.NotNull(k0);
Assert.AreEqual(k0.label, "FrontLowerLeft");
Assert.False(k0.associateToRig);
Assert.AreEqual(k0.color, Color.black);
var s0 = template.skeleton[0];
Assert.NotNull(s0);
Assert.AreEqual(s0.joint1, 0);
Assert.AreEqual(s0.joint2, 1);
Assert.AreEqual(s0.color, Color.magenta);
}
static IdLabelConfig SetUpLabelConfig()
{
var cfg = ScriptableObject.CreateInstance<IdLabelConfig>();
cfg.Init(new List<IdLabelEntry>()
{
new IdLabelEntry
{
id = 1,
label = "label"
}
});
return cfg;
}
static void SetupCubeJoint(GameObject cube, KeyPointTemplate template, string label, float x, float y, float z)
{
var joint = new GameObject();
joint.transform.parent = cube.transform;
joint.transform.localPosition = new Vector3(x, y, z);
var jointLabel = joint.AddComponent<JointLabel>();
jointLabel.templateInformation = new List<JointLabel.TemplateData>();
var templateData = new JointLabel.TemplateData
{
template = template,
label = label
};
jointLabel.templateInformation.Add(templateData);
}
static void SetupCubeJoints(GameObject cube, KeyPointTemplate template)
{
SetupCubeJoint(cube, template, "FrontLowerLeft", -0.5f, -0.5f, -0.5f);
SetupCubeJoint(cube, template, "FrontUpperLeft", -0.5f, 0.5f, -0.5f);
SetupCubeJoint(cube, template, "FrontUpperRight", 0.5f, 0.5f, -0.5f);
SetupCubeJoint(cube, template, "FrontLowerRight", 0.5f, -0.5f, -0.5f);
SetupCubeJoint(cube, template, "BackLowerLeft", -0.5f, -0.5f, 0.5f);
SetupCubeJoint(cube, template, "BackUpperLeft", -0.5f, 0.5f, 0.5f);
SetupCubeJoint(cube, template, "BackUpperRight", 0.5f, 0.5f, 0.5f);
SetupCubeJoint(cube, template, "BackLowerRight", 0.5f, -0.5f, 0.5f);
}
[UnityTest]
public IEnumerator Keypoint_TestStaticLabeledCube()
{
var incoming = new List<List<KeyPointLabeler.KeyPointEntry>>();
var template = CreateTestTemplate(Guid.NewGuid(), "TestTemplate");
var cam = SetupCamera(SetUpLabelConfig(), template, (data) =>
{
incoming.Add(data);
});
var cube = TestHelper.CreateLabeledCube(scale: 6, z: 8);
SetupCubeJoints(cube, template);
cube.SetActive(true);
cam.SetActive(true);
AddTestObjectForCleanup(cam);
AddTestObjectForCleanup(cube);
yield return null;
yield return null;
var testCase = incoming.Last();
Assert.AreEqual(1, testCase.Count);
var t = testCase.First();
Assert.NotNull(t);
Assert.AreEqual(1, t.instance_id);
Assert.AreEqual(1, t.label_id);
Assert.AreEqual(template.templateID.ToString(), t.template_guid);
Assert.AreEqual(9, t.keypoints.Length);
Assert.AreEqual(t.keypoints[0].x, t.keypoints[1].x);
Assert.AreEqual(t.keypoints[2].x, t.keypoints[3].x);
Assert.AreEqual(t.keypoints[4].x, t.keypoints[5].x);
Assert.AreEqual(t.keypoints[6].x, t.keypoints[7].x);
Assert.AreEqual(t.keypoints[0].y, t.keypoints[3].y);
Assert.AreEqual(t.keypoints[1].y, t.keypoints[2].y);
Assert.AreEqual(t.keypoints[4].y, t.keypoints[7].y);
Assert.AreEqual(t.keypoints[5].y, t.keypoints[6].y);
for (var i = 0; i < 9; i++) Assert.AreEqual(i, t.keypoints[i].index);
for (var i = 0; i < 8; i++) Assert.AreEqual(1, t.keypoints[i].state);
Assert.Zero(t.keypoints[8].state);
Assert.Zero(t.keypoints[8].x);
Assert.Zero(t.keypoints[8].y);
}
}
}

11
com.unity.perception/Tests/Runtime/GroundTruthTests/KeyPointGroundTruthTests.cs.meta


fileFormatVersion: 2
guid: c62092ba10e4e4a80a0ec03e6e92593a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存