浏览代码

resolved merge conflict with dev-0.3 branch

/develop-generalizationTraining-TrainerController
Joe Ward 7 年前
当前提交
9163a54a
共有 159 个文件被更改,包括 11675 次插入4573 次删除
  1. 21
      README.md
  2. 9
      docs/Background-Jupyter.md
  3. 17
      docs/Getting-Started-with-Balance-Ball.md
  4. 2
      docs/Learning-Environment-Design-Brains.md
  5. 12
      docs/Learning-Environment-Examples.md
  6. 52
      docs/Limitations-and-Common-Issues.md
  7. 6
      docs/ML-Agents-Overview.md
  8. 3
      docs/Readme.md
  9. 32
      docs/Using-Docker.md
  10. 215
      docs/images/ml-agents-ODD.png
  11. 865
      docs/images/wall.png
  12. 54
      python/Basics.ipynb
  13. 4
      python/tests/test_bc.py
  14. 4
      python/tests/test_ppo.py
  15. 2
      python/tests/test_unityagents.py
  16. 2
      python/tests/test_unitytrainers.py
  17. 69
      python/trainer_config.yaml
  18. 10
      python/unityagents/environment.py
  19. 13
      python/unitytrainers/models.py
  20. 6
      python/unitytrainers/ppo/models.py
  21. 6
      python/unitytrainers/ppo/trainer.py
  22. 29
      unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs
  23. 942
      unity-environment/Assets/ML-Agents/Examples/Banana/BananaImitation.unity
  24. 964
      unity-environment/Assets/ML-Agents/Examples/Banana/BananaRL.unity
  25. 4
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/BANANA.prefab
  26. 36
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/StudentAgent.prefab
  27. 40
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/TeacherAgent.prefab
  28. 4
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/BADBANANA.prefab
  29. 74
      unity-environment/Assets/ML-Agents/Examples/Banana/Scripts/BananaAgent.cs
  30. 22
      unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs
  31. 219
      unity-environment/Assets/ML-Agents/Examples/Crawler/Crawler.unity
  32. 964
      unity-environment/Assets/ML-Agents/Examples/Crawler/TFModels/crawler.bytes
  33. 41
      unity-environment/Assets/ML-Agents/Examples/Hallway/Prefabs/HallwayArea.prefab
  34. 884
      unity-environment/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity
  35. 198
      unity-environment/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs
  36. 927
      unity-environment/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.bytes
  37. 5
      unity-environment/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.bytes.meta
  38. 2
      unity-environment/Assets/ML-Agents/Examples/PushBlock/Materials/groundPushblock.mat
  39. 108
      unity-environment/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab
  40. 180
      unity-environment/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity
  41. 150
      unity-environment/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs
  42. 994
      unity-environment/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.bytes
  43. 2
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker1.mat
  44. 4
      unity-environment/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerBall/Prefabs/SoccerBall.prefab
  45. 67
      unity-environment/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity
  46. 147
      unity-environment/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs
  47. 19
      unity-environment/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs
  48. 31
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs
  49. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/SuccessGround.mat
  50. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Obstacle.mat
  51. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Goal.mat
  52. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/FailGround.mat
  53. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Block.mat
  54. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Ball.mat
  55. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/pitMaterial.mat
  56. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/goalMaterial.mat
  57. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/agentMaterial.mat
  58. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsBall/Materials/logo2.mat
  59. 2
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsBall/Materials/logo1.mat
  60. 2
      unity-environment/Assets/ML-Agents/Scripts/Brain.cs
  61. 71
      unity-environment/Assets/ML-Agents/Scripts/Decision.cs
  62. 4
      unity-environment/Assets/ML-Agents/Scripts/ExternalCommunicator.cs
  63. 160
      unity-environment/Assets/ML-Agents/Scripts/Monitor.cs
  64. 6
      unity-environment/ProjectSettings/TagManager.asset
  65. 39
      docs/Feature-On-Demand-Decisions.md
  66. 30
      docs/Migrating-v0.3.md
  67. 1001
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLAgent.prefab
  68. 1001
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLArea.prefab
  69. 8
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLArea.prefab.meta
  70. 1001
      unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/TeachingArea.prefab
  71. 1001
      unity-environment/Assets/ML-Agents/Examples/Banana/TFModels/BananaRL.bytes
  72. 7
      unity-environment/Assets/ML-Agents/Examples/Banana/TFModels/BananaRL.bytes.meta
  73. 9
      unity-environment/Assets/ML-Agents/Examples/WallJump.meta
  74. 80
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs
  75. 11
      unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs.meta
  76. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Material.meta
  77. 77
      unity-environment/Assets/ML-Agents/Examples/WallJump/Material/spawnVolumeMaterial.mat
  78. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Material/spawnVolumeMaterial.mat.meta
  79. 77
      unity-environment/Assets/ML-Agents/Examples/WallJump/Material/wallMaterial.mat
  80. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Material/wallMaterial.mat.meta
  81. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs.meta
  82. 700
      unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab
  83. 9
      unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab.meta
  84. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes.meta
  85. 1001
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity
  86. 9
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity.meta
  87. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts.meta
  88. 28
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAcademy.cs
  89. 13
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAcademy.cs.meta
  90. 327
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs
  91. 13
      unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs.meta
  92. 10
      unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels.meta
  93. 1001
      unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels/WallJump.bytes
  94. 9
      unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels/WallJump.bytes.meta

21
README.md


to the wider research and game developer communities.
## Features
* Unity Engine flexibility and simplicity
* Flexible single-agent and multi-agent support
* Multiple visual observations (cameras)
* Discrete and continuous action spaces
* Unity environment control from Python
* 10+ sample Unity environments
* Support for multiple environment configurations and training scenarios
* Train memory-enhanced Agents using deep reinforcement learning
* Built-in support for Imitation Learning (coming soon)
* Built-in support for Imitation Learning
* Flexible Agent control with On Demand Decision Making
* Python control interface
* TensorFlow Sharp Agent Embedding _[Experimental]_
* Simplified set-up with Docker _(Experimental)_
For more information on ML-Agents, in addition to installation, and usage
instructions, see our [documentation home](docs).
**For more information, in addition to installation and usage
instructions, see our [documentation home](docs/README.md).**
We have also published a series of blog posts that are relevant for ML-Agents:
- Overviewing reinforcement learning concepts

- [Post]() announcing the winners of our
- [Post](https://blogs.unity3d.com/2018/02/28/introducing-the-winners-of-the-first-ml-agents-challenge/) announcing the winners of our
[first ML-Agents Challenge](https://connect.unity.com/challenges/ml-agents-1)
- [Post](https://blogs.unity3d.com/2018/01/23/designing-safer-cities-through-simulations/)
overviewing how Unity can be leveraged as a simulator to design safer cities.

- [A Game Developer Learns Machine Learning](https://mikecann.co.uk/machine-learning/a-game-developer-learns-machine-learning-intent/)
- [Unity3D Machine Learning – Setting up the environment & TensorFlow for AgentML on Windows 10](https://unity3d.college/2017/10/25/machine-learning-in-unity3d-setting-up-the-environment-tensorflow-for-agentml-on-windows-10/)
- [Explore Unity Technologies ML-Agents Exclusively on Intel Architecture](https://software.intel.com/en-us/articles/explore-unity-technologies-ml-agents-exclusively-on-intel-architecture)
## Community and Feedback

9
docs/Background-Jupyter.md


# Jupyter
# Background: Jupyter
embedded visualizations. We provide one such notebook, `Basics.ipynb`, for
testing the Python API.
embedded visualizations. We provide one such notebook, `python/Basics.ipynb`,
for testing the Python control interface to a Unity build. This notebook is
introduced in the
[Getting Started with the 3D Balance Ball Environment](Getting-Started-with-Balance-Ball.md)
tutorial, but can be used for testing the connection to any Unity build.
For a walkthrough of how to use Jupyter, see
[Running the Jupyter Notebook](http://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/execute.html)

17
docs/Getting-Started-with-Balance-Ball.md


# Getting Started with the 3D Balance Ball Example
# Getting Started with the 3D Balance Ball Environment
This tutorial walks through the end-to-end process of opening an ML-Agents
example environment in Unity, building the Unity executable, training an agent

environments or as ways to test new ML algorithms. After reading this tutorial,
you should be able to explore and build the example environments.
![Balance Ball](images/balance.png)
![3D Balance Ball](images/balance.png)
This walk-through uses the **3D Balance Ball** environment. 3D Balance Ball contains
a number of platforms and balls (which are all copies of each other).

In order to install and set up ML-Agents, the Python dependencies and Unity,
see the [installation instructions](Installation.md).
## Understanding a Unity Environment (Balance Ball)
## Understanding a Unity Environment (3D Balance Ball)
An agent is an autonomous actor that observes and interacts with an
_environment_. In the context of Unity, an environment is a scene containing

The first thing you may notice after opening the 3D Balance Ball scene is that
it contains not one, but several platforms. Each platform in the scene is an
independent agent, but they all share the same brain. Balance Ball does this
independent agent, but they all share the same brain. 3D Balance Ball does this
to speed up training since all twelve agents contribute to training in parallel.
### Academy

## Training the Brain with Reinforcement Learning
Now that we have a Unity executable containing the simulation environment, we
can perform the training.
can perform the training. To first ensure that your environment and the Python
API work as expected, you can use the `python/Basics`
[Jupyter notebook](Background-Jupyter.md).
This notebook contains a simple walkthrough of the functionality of the API.
Within `Basics`, be sure to set `env_name` to the name of the environment file
you built earlier.
### Training with PPO

* Lesson - only interesting when performing
[curriculum training](Training-Curriculum-Learning.md).
This is not used in the 3d Balance Ball environment.
This is not used in the 3D Balance Ball environment.
* Cumulative Reward - The mean cumulative episode reward over all agents.
Should increase during a successful training session.
* Entropy - How random the decisions of the model are. Should slowly decrease

2
docs/Learning-Environment-Design-Brains.md


# Brains
The Brain encapsulates the decision making process. Brain objects must be children of the Academy in the Unity scene hierarchy. Every Agent must be assigned a Brain, but you can use the same Brain with more than one Agent.
The Brain encapsulates the decision making process. Brain objects must be children of the Academy in the Unity scene hierarchy. Every Agent must be assigned a Brain, but you can use the same Brain with more than one Agent. You can also create several Brains, attach each of the Brain to one or more than one Agent.
Use the Brain class directly, rather than a subclass. Brain behavior is determined by the **Brain Type**. ML-Agents defines four Brain Types:

12
docs/Learning-Environment-Examples.md


* Visual Observations: 0
* Reset Parameters: None
## 3DBall
## 3DBall: 3D Balance Ball
![Balance Ball](images/balance.png)
![3D Balance Ball](images/balance.png)
* Set-up: A balance-ball task, where the agent controls the platform.
* Goal: The agent must balance the platform in order to keep the ball on it for as long as possible.

* Set-up: A platforming environment where the agent can jump over a wall.
* Goal: The agent must use the block to scale the wall and reach the goal.
* Agents: The environment contains one agent linked to a single brain.
* Agents: The environment contains one agent linked to two different brains. The brain the agent is linked to changes depending on the height of the wall.
* -0.01 for every step.
* -0.0005 for every step.
* Vector Action space: (Discrete) Size of 6, corresponding to movement in cardinal directions, jumping, and no movement.
* Vector Action space: (Discrete) Size of 74, corresponding to 14 raycasts each detecting 4 possible objects. plus the global position of the agent and whether or not the agent is grounded.
* Reset Parameters: One, corresponding to number of steps in training. Used to adjust size of the wall for Curriculum Learning.
* Reset Parameters: 4, corresponding to the height of the possible walls.
## Reacher

52
docs/Limitations-and-Common-Issues.md


## Unity SDK
### Headless Mode
Currently headless mode is disabled. We hope to address these in a future version of Unity.
If you enable Headless mode, you will not be able to collect visual
observations from your agents.
Currently the speed of the game physics can only be increased to 100x real-time. The Academy also moves in time with FixedUpdate() rather than Update(), so game behavior tied to frame updates may be out of sync.
### macOS Metal Support
When running a Unity Environment on macOS using Metal rendering, the application can crash when the lock-screen is open. The solution is to set rendering to OpenGL. This can be done by navigating: `Edit -> Project Settings -> Player`. Clicking on `Other Settings`. Unchecking `Auto Graphics API for Mac`. Setting `OpenGL Core` to be above `Metal` in the priority list.
Currently the speed of the game physics can only be increased to 100x
real-time. The Academy also moves in time with FixedUpdate() rather than
Update(), so game behavior tied to frame updates may be out of sync.
### Python version
As of version 0.3, we no longer support Python 2.
If you directly import your Unity environment without building it in the editor, you might need to give it additional permissions to execute it.
If you directly import your Unity environment without building it in the
editor, you might need to give it additional permissions to execute it.
If you receive such a permission error on macOS, run:

`chmod -R 755 *.x86_64`
On Windows, you can find instructions [here](https://technet.microsoft.com/en-us/library/cc754344(v=ws.11).aspx).
On Windows, you can find instructions
[here](https://technet.microsoft.com/en-us/library/cc754344(v=ws.11).aspx).
If you are able to launch the environment from `UnityEnvironment` but then receive a timeout error, there may be a number of possible causes.
* _Cause_: There may be no Brains in your environment which are set to `External`. In this case, the environment will not attempt to communicate with python. _Solution_: Set the train you wish to externally control through the Python API to `External` from the Unity Editor, and rebuild the environment.
* _Cause_: On OSX, the firewall may be preventing communication with the environment. _Solution_: Add the built environment binary to the list of exceptions on the firewall by following instructions [here](https://support.apple.com/en-us/HT201642).
### Filename not found
If you receive a file-not-found error while attempting to launch an environment, ensure that the environment files are in the root repository directory. For example, if there is a sub-folder containing the environment files, those files should be removed from the sub-folder and moved to the root.
If you are able to launch the environment from `UnityEnvironment` but
then receive a timeout error, there may be a number of possible causes.
* _Cause_: There may be no Brains in your environment which are set
to `External`. In this case, the environment will not attempt to
communicate with python. _Solution_: Set the Brains(s) you wish to
externally control through the Python API to `External` from the
Unity Editor, and rebuild the environment.
* _Cause_: On OSX, the firewall may be preventing communication with
the environment. _Solution_: Add the built environment binary to the
list of exceptions on the firewall by following instructions
[here](https://support.apple.com/en-us/HT201642).
* _Cause_: An error happened in the Unity Environment preventing
communication. _Solution_: Look into the
[log files](https://docs.unity3d.com/Manual/LogFiles.html)
generated by the Unity Environment to figure what error happened.
If you receive an exception `"Couldn't launch new environment because communication port {} is still in use. "`, you can change the worker number in the python script when calling
If you receive an exception `"Couldn't launch new environment because
communication port {} is still in use. "`, you can change the worker
number in the python script when calling
If you receive a message `Mean reward : nan` when attempting to train a model using PPO, this is due to the episodes of the learning environment not terminating. In order to address this, set `Max Steps` for either the Academy or Agents within the Scene Inspector to a value greater than 0. Alternatively, it is possible to manually set `done` conditions for episodes from within scripts for custom episode-terminating events.
If you receive a message `Mean reward : nan` when attempting to train a
model using PPO, this is due to the episodes of the learning environment
not terminating. In order to address this, set `Max Steps` for either
the Academy or Agents within the Scene Inspector to a value greater
than 0. Alternatively, it is possible to manually set `done` conditions
for episodes from within scripts for custom episode-terminating events.

6
docs/ML-Agents-Overview.md


The
[Getting Started with the 3D Balance Ball Example](Getting-Started-with-Balance-Ball.md)
tutorial covers this training mode with the **Balance Ball** sample environment.
tutorial covers this training mode with the **3D Balance Ball** sample environment.
### Custom Training and Inference

must react to events or games where agents can take actions of variable
duration. Switching between decision taking at every step and
on-demand-decision is one button click away. You can learn more about the
on-demand-decision feature [here](Feature-On-Demand-Decision.md).
on-demand-decision feature [here](Feature-On-Demand-Decisions.md).
* **Memory-enhanced Agents** - In some scenarios, agents must learn to
remember the past in order to take the

To help you use ML-Agents, we've created several in-depth tutorials
for [installing ML-Agents](Installation.md),
[getting started](Getting-Started-with-Balance-Ball.md)
with a sample Balance Ball environment (one of our many
with the 3D Balance Ball environment (one of our many
[sample environments](Learning-Environment-Examples.md)) and
[making your own environment](Learning-Environment-Create-New.md).

3
docs/Readme.md


* [Installation & Set-up](Installation.md)
* [Background: Jupyter Notebooks](Background-Jupyter.md)
* [Docker Set-up (Experimental)](Using-Docker.md)
* [Getting Started with the Balance Ball Environment](Getting-Started-with-Balance-Ball.md)
* [Getting Started with the 3D Balance Ball Environment](Getting-Started-with-Balance-Ball.md)
* [Example Environments](Learning-Environment-Examples.md)
## Creating Learning Environments

## Help
* [ML-Agents Glossary](Glossary.md)
* [Limitations & Common Issues](Limitations-and-Common-Issues.md)
* [Migrating to ML-Agents v0.3](Migrating-v0.3.md)
## C# API and Components
* Academy

32
docs/Using-Docker.md


Run the Docker container by calling the following command at the top-level of the repository:
```
docker run --mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \
<image-name>:latest <environment-name> \
--docker-target-name=unity-volume \
--train --run-id=<run-id>
docker run --name <container-name> \
--mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \
<image-name>:latest <environment-name> \
--docker-target-name=unity-volume \
--train \
--run-id=<run-id>
- `<container-name>` is used to identify the container (in case you want to interrupt and terminate it). This is optional and Docker will generate a random name if this is not set. _Note that this must be unique for every run of a Docker image._
- `<image-name>` and `<environment-name>`: References the image and environment names, respectively.
- `source`: Reference to the path in your host OS where you will store the Unity executable.
- `target`: Tells Docker to mount the `source` path as a disk with this name.

For the `3DBall` environment, for example this would be:
```
docker run --mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \
balance.ball.v0.1:latest 3Dball \
--docker-target-name=unity-volume \
--train --run-id=3dball_first_trial
docker run --name 3DBallContainer.first.trial \
--mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \
balance.ball.v0.1:latest 3Dball \
--docker-target-name=unity-volume \
--train \
--run-id=3dball_first_trial
### Stopping Container and Saving State
If you are satisfied with the training progress, you can stop the Docker container while saving state using the following command:
```
docker kill --signal=SIGINT <container-name>
```
`<container-name>` is the name of the container specified in the earlier `docker run` command. If you didn't specify one, you can find the randomly generated identifier by running `docker container ls`.

215
docs/images/ml-agents-ODD.png

之前 之后
宽度: 816  |  高度: 316  |  大小: 48 KiB

865
docs/images/wall.png

之前 之后
宽度: 942  |  高度: 780  |  大小: 42 KiB

54
python/Basics.ipynb


"cell_type": "markdown",
"metadata": {},
"source": [
"# Unity ML Agents\n",
"# Unity ML-Agents\n",
"This notebook contains a walkthrough of the basic functions of the Python API for Unity ML Agents. For instructions on building a Unity environment, see [here](https://github.com/Unity-Technologies/ml-agents/wiki/Getting-Started-with-Balance-Ball)."
"This notebook contains a walkthrough of the basic functions of the Python API for Unity ML-Agents. For instructions on building a Unity environment, see [here](https://github.com/Unity-Technologies/ml-agents/blob/master/docs/Getting-Started-with-Balance-Ball.md)."
]
},
{

"### 1. Load dependencies"
"### 1. Set environment parameters\n",
"\n",
"Be sure to set `env_name` to the name of the Unity environment file you want to launch."
"metadata": {
"collapsed": true
},
"metadata": {},
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"\n",
"from unityagents import UnityEnvironment\n",
"\n",
"%matplotlib inline"
"env_name = \"3DBall\" # Name of the Unity environment binary to launch\n",
"train_mode = True # Whether to run the environment in training or inference mode"
]
},
{

"### 2. Set environment parameters\n",
"\n",
"Be sure to set `env_name` to the name of the Unity environment file you want to launch."
"### 2. Load dependencies"
"metadata": {
"collapsed": true
},
"metadata": {},
"env_name = \"3DBall\" # Name of the Unity environment binary to launch\n",
"train_mode = True # Whether to run the environment in training or inference mode"
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"\n",
"from unityagents import UnityEnvironment\n",
"\n",
"%matplotlib inline"
]
},
{

{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"env = UnityEnvironment(file_name=env_name)\n",

{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"# Reset the environment\n",

{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"for episode in range(10):\n",

{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"env.close()"

"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
"version": "3.6.3"
}
},
"nbformat": 4,

4
python/tests/test_bc.py


"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations": 2,

"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations": 2,

4
python/tests/test_ppo.py


"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations": 2,

"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations": 2,

2
python/tests/test_unityagents.py


"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations": 2,

2
python/tests/test_unitytrainers.py


"brainNames": ["RealFakeBrain"],
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"apiNumber":"API-3",
"brainParameters": [{
"vectorObservationSize": 3,
"numStackedVectorObservations" : 2,

69
python/trainer_config.yaml


summary_freq: 1000
use_recurrent: false
BananaBrain:
normalize: false
batch_size: 1024
beta: 5.0e-3
buffer_size: 10240
max_steps: 2.0e5
batch_size: 1048
buffer_size: 10240
max_steps: 5.0e4
batch_size: 128
buffer_size: 2048
hidden_units: 128
hidden_units: 256
SmallWallBrain:
max_steps: 2.0e5
batch_size: 128
buffer_size: 2048
beta: 5.0e-3
hidden_units: 256
summary_freq: 2000
time_horizon: 128
num_layers: 2
normalize: false
BigWallBrain:
max_steps: 2.0e5
batch_size: 128
buffer_size: 2048
beta: 5.0e-3
hidden_units: 256
summary_freq: 2000
time_horizon: 128
num_layers: 2
normalize: false
StrikerBrain:
max_steps: 1.0e5

batch_size: 32
buffer_size: 320
HallwayBrainDC:
num_layers: 3
hidden_units: 256
beta: 1.0e-2
gamma: 0.99
num_epoch: 3
buffer_size: 512
batch_size: 64
max_steps: 5.0e5
summary_freq: 1000
time_horizon: 64
BrainWallJumpDC:
HallwayBrain:
memory_size: 256
buffer_size: 16
batch_size: 2
max_steps: 5.0e5
summary_freq: 1000
time_horizon: 64
HallwayBrainDCLSTM:
use_recurrent: true
sequence_length: 64
num_layers: 2
hidden_units: 128
beta: 1.0e-2
gamma: 0.99
num_epoch: 3
buffer_size: 16
batch_size: 2
buffer_size: 1024
batch_size: 128
max_steps: 5.0e5
summary_freq: 1000
time_horizon: 64

10
python/unityagents/environment.py


atexit.register(self.close)
self.port = base_port + worker_id
self._buffer_size = 12000
self._python_api = "API-2"
self._version_ = "API-3"
self._loaded = False
self._open_socket = False

.format(str(file_name)))
if "apiNumber" not in p:
self._unity_api = "API-1"
self._unity_version = "API-1"
self._unity_api = p["apiNumber"]
if self._unity_api != self._python_api:
self._unity_version = p["apiNumber"]
if self._unity_version != self._version_:
"of ML-Agents.".format(self._python_api, self._unity_api))
"of ML-Agents.".format(self._version_, self._unity_version))
self._data = {}
self._global_done = None
self._academy_name = p["AcademyName"]

13
python/unitytrainers/models.py


self.selected_actions = c_layers.one_hot_encoding(self.action_holder, self.a_size)
self.all_old_probs = tf.placeholder(shape=[None, self.a_size], dtype=tf.float32, name='old_probabilities')
self.probs = tf.reduce_sum(self.all_probs * self.selected_actions, axis=1)
self.old_probs = tf.reduce_sum(self.all_old_probs * self.selected_actions, axis=1)
# We reshape these tensors to [batch x 1] in order to be of the same rank as continuous control probabilities.
self.probs = tf.expand_dims(tf.reduce_sum(self.all_probs * self.selected_actions, axis=1), 1)
self.old_probs = tf.expand_dims(tf.reduce_sum(self.all_old_probs * self.selected_actions, axis=1), 1)
def create_cc_actor_critic(self, h_size, num_layers):
num_streams = 2

a = tf.exp(-1 * tf.pow(tf.stop_gradient(self.output) - self.mu, 2) / (2 * self.sigma_sq))
b = 1 / tf.sqrt(2 * self.sigma_sq * np.pi)
self.all_probs = tf.multiply(a, b, name="action_probs")
self.probs = tf.reduce_prod(self.all_probs, axis=1)
self.entropy = tf.reduce_sum(0.5 * tf.log(2 * np.pi * np.e * self.sigma_sq))
self.entropy = tf.reduce_mean(0.5 * tf.log(2 * np.pi * np.e * self.sigma_sq))
self.old_probs = tf.reduce_prod(self.all_old_probs, axis=1)
# We keep these tensors the same name, but use new nodes to keep code parallelism with discrete control.
self.probs = tf.identity(self.all_probs)
self.old_probs = tf.identity(self.all_old_probs)

6
python/unitytrainers/ppo/models.py


"""
self.returns_holder = tf.placeholder(shape=[None], dtype=tf.float32, name='discounted_rewards')
self.advantage = tf.placeholder(shape=[None], dtype=tf.float32, name='advantages')
self.advantage = tf.placeholder(shape=[None, 1], dtype=tf.float32, name='advantages')
self.learning_rate = tf.train.polynomial_decay(lr, self.global_step, max_step, 1e-10, power=1.0)
self.old_value = tf.placeholder(shape=[None], dtype=tf.float32, name='old_value_estimates')

v_opt_b = tf.squared_difference(self.returns_holder, clipped_value_estimate)
self.value_loss = tf.reduce_mean(tf.boolean_mask(tf.maximum(v_opt_a, v_opt_b), self.mask))
# Here we calculate PPO policy loss. In continuous control this is done independently for each action gaussian
# and then averaged together. This provides significantly better performance than treating the probability
# as an average of probabilities, or as a joint probability.
self.loss = self.policy_loss + 0.5 * self.value_loss - decay_beta * tf.reduce_mean(
tf.boolean_mask(entropy, self.mask))
self.update_batch = optimizer.minimize(self.loss)

6
python/unitytrainers/ppo/trainer.py


total_v, total_p = 0, 0
advantages = self.training_buffer.update_buffer['advantages'].get_batch()
self.training_buffer.update_buffer['advantages'].set(
(advantages - advantages.mean()) / advantages.std() + 1e-10)
(advantages - advantages.mean()) / (advantages.std() + 1e-10))
for k in range(num_epoch):
self.training_buffer.update_buffer.shuffle()
for l in range(len(self.training_buffer.update_buffer['actions']) // n_sequences):

self.model.returns_holder: np.array(_buffer['discounted_returns'][start:end]).reshape(
[-1]),
self.model.old_value: np.array(_buffer['value_estimates'][start:end]).reshape([-1]),
self.model.advantage: np.array(_buffer['advantages'][start:end]).reshape([-1]),
self.model.advantage: np.array(_buffer['advantages'][start:end]).reshape([-1, 1]),
self.model.all_old_probs: np.array(
_buffer['action_probs'][start:end]).reshape([-1, self.brain.vector_action_space_size])}
if self.is_continuous:

_obs = np.array(_buffer['observations%d' % i][start:end])
(_batch, _seq, _w, _h, _c) = _obs.shape
feed_dict[self.model.visual_in[i]] = _obs.reshape([-1, _w, _h, _c])
# Memories are zeros
# feed_dict[self.model.memory_in] = np.zeros([batch_size, self.m_size])
feed_dict[self.model.memory_in] = np.array(_buffer['memory'][start:end])[:, 0, :]
v_loss, p_loss, _ = self.sess.run(
[self.model.value_loss, self.model.policy_loss,

29
unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs


using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine;
public class Ball3DDecision : MonoBehaviour, Decision

public float[] Decide(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
if (gameObject.GetComponent<Brain>().brainParameters.vectorActionSpaceType == SpaceType.continuous)
if (gameObject.GetComponent<Brain>().brainParameters.vectorActionSpaceType
== SpaceType.continuous)
{
List<float> act = new List<float>();

act.Add(state[5] * rotationSpeed);
act.Add(vectorObs[5] * rotationSpeed);
act.Add(-state[7] * rotationSpeed);
act.Add(-vectorObs[7] * rotationSpeed);
else
{
return new float[1]{ 1f };
}
return new float[1] { 1f };
public List<float> MakeMemory(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
return new List<float>();
}

942
unity-environment/Assets/ML-Agents/Examples/Banana/BananaImitation.unity
文件差异内容过多而无法显示
查看文件

964
unity-environment/Assets/ML-Agents/Examples/Banana/BananaRL.unity
文件差异内容过多而无法显示
查看文件

4
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/BANANA.prefab


- component: {fileID: 65445686293066652}
- component: {fileID: 114475702868436998}
m_Layer: 0
m_Name: BANANA
m_Name: Banana
m_TagString: banana
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 29857f71905e1484ba5cf296a5749a5f, type: 2}
m_StaticBatchInfo:

m_Name:
m_EditorClassIdentifier:
respawn: 0
myArea: {fileID: 0}

36
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/StudentAgent.prefab


m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 9efa4522ab2a04af4bdbcf95ddef5711, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 260483cdfc6b14e26823a02f23bd8baa, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 9efa4522ab2a04af4bdbcf95ddef5711, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
observations: []
maxStep: 0
resetOnDone: 1
state: []
stackedStates: []
reward: 0
done: 0
maxStepReached: 0
value: 0
CumulativeReward: 0
stepCounter: 0
agentStoredAction: []
memory: []
id: 0
agentParameters:
agentCameras: []
maxStep: 0
resetOnDone: 1
onDemandDecision: 0
numberOfActionsBetweenDecisions: 1
xForce: 100
yForce: 300
zForce: 100
moveSpeed: 2
myLazer: {fileID: 1711242980656446}
myLaser: {fileID: 0}
contribute: 1

40
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/TeacherAgent.prefab


- component: {fileID: 54684485300203016}
- component: {fileID: 114899212543667072}
m_Layer: 0
m_Name: ExpertAgent
m_Name: TeacherAgent
m_TagString: agent
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 260483cdfc6b14e26823a02f23bd8baa, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 9efa4522ab2a04af4bdbcf95ddef5711, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 9efa4522ab2a04af4bdbcf95ddef5711, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 866f7a84824d141dbbe50dd1893207d9, type: 2}
m_StaticBatchInfo:

m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
observations: []
maxStep: 0
resetOnDone: 1
state: []
stackedStates: []
reward: 0
done: 0
maxStepReached: 0
value: 0
CumulativeReward: 0
stepCounter: 0
agentStoredAction: []
memory: []
id: 0
agentParameters:
agentCameras: []
maxStep: 0
resetOnDone: 1
onDemandDecision: 0
numberOfActionsBetweenDecisions: 1
xForce: 100
yForce: 300
zForce: 100
moveSpeed: 2
myLazer: {fileID: 1194927375649470}
myLaser: {fileID: 0}
contribute: 0
--- !u!136 &136300867333360650
CapsuleCollider:

4
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/BADBANANA.prefab


- component: {fileID: 65156783716742152}
- component: {fileID: 114713238261374398}
m_Layer: 0
m_Name: BAD BANANA
m_Name: BadBanana
m_TagString: badBanana
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 2a6df5f4bffbc431bacfbd2c155375cb, type: 2}
m_StaticBatchInfo:

m_Name:
m_EditorClassIdentifier:
respawn: 0
myArea: {fileID: 0}

74
unity-environment/Assets/ML-Agents/Examples/Banana/Scripts/BananaAgent.cs


float frozenTime;
float effectTime;
Rigidbody agentRB;
public float turnSpeed;
public float xForce;
public float yForce;
public float zForce;
// Speed of agent rotation.
public float turnSpeed = 300;
// Speed of agent movement.
public float moveSpeed = 2;
public GameObject myLazer;
public GameObject myLaser;
RayPerception rayPer;
agentRB = GetComponent<Rigidbody>(); // cache the RB
agentRB = GetComponent<Rigidbody>();
rayPer = GetComponent<RayPerception>();
myAcademy = myAcademyObj.GetComponent<BananaAcademy>();
}

float[] rayAngles = { 20f, 90f, 160f, 45f, 135f, 70f, 110f };
string[] detectableObjects = { "banana", "agent", "wall", "badBanana", "frozenAgent" };
RayPerception(rayDistance, rayAngles, detectableObjects);
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f));
Vector3 localVelocity = transform.InverseTransformDirection(agentRB.velocity);
AddVectorObs(localVelocity.x);
AddVectorObs(localVelocity.z);

public void RayPerception(float rayDistance, float[] rayAngles, string[] detectableObjects) {
foreach (float angle in rayAngles)
{
float noise = 0f;
float noisyAngle = angle + Random.Range(-noise, noise);
Vector3 position = transform.TransformDirection(GiveCatersian(rayDistance, noisyAngle));
Debug.DrawRay(transform.position, position, Color.green, 0.0f, true);
RaycastHit hit;
float[] subList = new float[detectableObjects.Length + 2];
if (Physics.SphereCast(transform.position, 1.0f, position, out hit, rayDistance))
{
for (int i = 0; i < detectableObjects.Length; i++)
{
if (hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
foreach (float f in subList)
AddVectorObs(f);
}
}
public Vector3 GiveCatersian(float radius, float angle)
{
float x = radius * Mathf.Cos(DegreeToRadian(angle));
float z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, -0.1f, z);
}
public float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
public Color32 ToColor(int HexVal)
{
byte R = (byte)((HexVal >> 16) & 0xFF);

dirToGo *= 0.5f;
agentRB.velocity *= 0.75f;
}
agentRB.AddForce(new Vector3(dirToGo.x * xForce, dirToGo.y * yForce, dirToGo.z * zForce),
ForceMode.Acceleration);
transform.Rotate(rotateDir, Time.deltaTime * turnSpeed);
agentRB.AddForce(dirToGo * moveSpeed, ForceMode.VelocityChange);
transform.Rotate(rotateDir, Time.fixedDeltaTime * turnSpeed);
}
if (agentRB.velocity.sqrMagnitude > 25f) // slow it down

if (shoot)
{
myLazer.transform.localScale = new Vector3(1f, 1f, 1f);
Vector3 position = transform.TransformDirection(GiveCatersian(25f, 90f));
myLaser.transform.localScale = new Vector3(1f, 1f, 1f);
Vector3 position = transform.TransformDirection(RayPerception.PolarToCartesian(25f, 90f));
Debug.DrawRay(transform.position, position, Color.red, 0f, true);
RaycastHit hit;
if (Physics.SphereCast(transform.position, 2f, position, out hit, 25f))

}
else
{
myLazer.transform.localScale = new Vector3(0f, 0f, 0f);
myLaser.transform.localScale = new Vector3(0f, 0f, 0f);
}

shoot = false;
agentRB.velocity = Vector3.zero;
bananas = 0;
myLazer.transform.localScale = new Vector3(0f, 0f, 0f);
myLaser.transform.localScale = new Vector3(0f, 0f, 0f);
transform.position = new Vector3(Random.Range(-myArea.range, myArea.range),
2f, Random.Range(-myArea.range, myArea.range))
+ area.transform.position;

22
unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs


using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
public float[] Decide(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
return new float[1]{ 1f };
return new float[1] { 1f };
public List<float> MakeMemory(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
}
}

219
unity-environment/Assets/ML-Agents/Examples/Crawler/Crawler.unity


propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
--- !u!114 &241482184
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
brain: {fileID: 393360180}
--- !u!1001 &245747752
Prefab:
m_ObjectHideFlags: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

-
vectorActionSpaceType: 1
vectorObservationSpaceType: 1
brainType: 3
brainType: 0
- {fileID: 1691062983}
- {fileID: 241482184}
- {fileID: 740969743}
- {fileID: 1774724418}
instanceID: 15038
--- !u!114 &740969743
- {fileID: 1878329807}
- {fileID: 1612920415}
- {fileID: 498988004}
- {fileID: 1614632444}
instanceID: 3904
--- !u!114 &498988004
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 393360180}
--- !u!1001 &777494435

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

type: 2}
propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1392866527}
m_LocalRotation: {x: 0.30070576, y: 0, z: 0, w: 0.953717}
m_LocalPosition: {x: 0, y: 15, z: -10}
m_LocalPosition: {x: 0, y: 15, z: -15}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
--- !u!114 &1612920415
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
brain: {fileID: 393360180}
--- !u!114 &1614632444
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
graphModel: {fileID: 4900000, guid: 3569492a1961e4abe87b232f5ccaac90, type: 3}
graphScope:
graphPlaceholders: []
BatchSizePlaceholderName: batch_size
VectorObservationPlacholderName: vector_observation
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
VisualObservationPlaceholderName: []
ActionPlaceholderName: action
PreviousActionPlaceholderName: prev_action
brain: {fileID: 393360180}
--- !u!1001 &1679305519
Prefab:
m_ObjectHideFlags: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
--- !u!114 &1691062983
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
continuousPlayerActions:
- key: 101
index: 0
value: -1
- key: 101
index: 1
value: -1
- key: 101
index: 2
value: -1
- key: 101
index: 3
value: -1
- key: 114
index: 3
value: 1
- key: 116
index: 11
value: -1
- key: 100
index: 1
value: 1
- key: 119
index: 7
value: -1
discretePlayerActions: []
defaultAction: 0
brain: {fileID: 393360180}
--- !u!114 &1774724418
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
graphModel: {fileID: 4900000, guid: 3569492a1961e4abe87b232f5ccaac90, type: 3}
graphScope:
graphPlaceholders: []
BatchSizePlaceholderName: batch_size
StatePlacholderName: state
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
ObservationPlaceholderName: []
ActionPlaceholderName: action
PreviousActionPlaceholderName: prev_action
brain: {fileID: 393360180}
--- !u!1001 &1808602249
Prefab:
m_ObjectHideFlags: 0

m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0}
--- !u!114 &1878329807
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
continuousPlayerActions:
- key: 101
index: 0
value: -1
- key: 101
index: 1
value: -1
- key: 101
index: 2
value: -1
- key: 101
index: 3
value: -1
- key: 114
index: 3
value: 1
- key: 116
index: 11
value: -1
- key: 100
index: 1
value: 1
- key: 119
index: 7
value: -1
discretePlayerActions: []
defaultAction: 0
brain: {fileID: 393360180}
--- !u!1001 &2022513540
Prefab:
m_ObjectHideFlags: 0

propertyPath: agentParameters.maxStep
value: 5000
objectReference: {fileID: 0}
- target: {fileID: 1066072914129342, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
propertyPath: m_IsActive
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3db4283e33af74336bfedb01d0e011bf, type: 2}
m_IsPrefabParent: 0

m_Name:
m_EditorClassIdentifier:
maxSteps: 0
isInference: 0
trainingConfiguration:
width: 80
height: 80

width: 1280
height: 720
qualityLevel: 5
timeScale: 1
timeScale: 2
episodeCount: 0
stepsSinceReset: 0

964
unity-environment/Assets/ML-Agents/Examples/Crawler/TFModels/crawler.bytes
文件差异内容过多而无法显示
查看文件

41
unity-environment/Assets/ML-Agents/Examples/Hallway/Prefabs/HallwayArea.prefab


- component: {fileID: 65694772044352326}
- component: {fileID: 23564461918312698}
m_Layer: 0
m_Name: GoalB
m_TagString: goal
m_Name: redGoal
m_TagString: redGoal
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0

- component: {fileID: 65575186354124900}
- component: {fileID: 23052381252885230}
m_Layer: 0
m_Name: GoalA
m_TagString: goal
m_Name: orangeGoal
m_TagString: orangeGoal
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0

- component: {fileID: 23122298029829586}
- component: {fileID: 54112968250075710}
- component: {fileID: 114286701363010626}
- component: {fileID: 114569343444552314}
m_Layer: 0
m_Name: Agent
m_TagString: agent

m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1185286572034058}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 7, y: 0.5, z: 8.7}
m_LocalPosition: {x: 7, y: 0.5, z: 9}
m_LocalScale: {x: 5, y: 0.32738775, z: 5}
m_Children:
- {fileID: 4152465931272056}

m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1390180936769712}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: -7, y: 0.5, z: 8.699999}
m_LocalPosition: {x: -7, y: 0.5, z: 9}
m_LocalScale: {x: 5, y: 0.32739, z: 5}
m_Children:
- {fileID: 4184627531154506}

m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1537906118403888}
m_Material: {fileID: 0}
m_Material: {fileID: 13400000, guid: 8c6374adc4d814c2eb5ecdfe810d813b, type: 2}
m_IsTrigger: 0
m_Enabled: 0
serializedVersion: 2

m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1699348701669618}
m_Material: {fileID: 0}
m_Material: {fileID: 13400000, guid: 8c6374adc4d814c2eb5ecdfe810d813b, type: 2}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2

m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1143687295388296}
m_Material: {fileID: 0}
m_Material: {fileID: 13400000, guid: 8c6374adc4d814c2eb5ecdfe810d813b, type: 2}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2

m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1931145547757896}
m_Material: {fileID: 0}
m_Material: {fileID: 13400000, guid: 8c6374adc4d814c2eb5ecdfe810d813b, type: 2}
m_IsTrigger: 0
m_Enabled: 0
serializedVersion: 2

m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
stepCounter: 0
maxStep: 0
maxStep: 3000
goalA: {fileID: 1185286572034058}
goalB: {fileID: 1390180936769712}
orangeGoal: {fileID: 1390180936769712}
redGoal: {fileID: 1185286572034058}
violetBlock: {fileID: 1143687295388296}
redBlock: {fileID: 1143687295388296}
--- !u!114 &114569343444552314
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1471560210313468}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:

884
unity-environment/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity
文件差异内容过多而无法显示
查看文件

198
unity-environment/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs


// Put this script on your blue cube.
using System.Collections;
using System.Collections;
public GameObject ground; // ground game object. we will use the area bounds to spawn the blocks
public GameObject ground;
public GameObject goalA;
public GameObject goalB;
public GameObject orangeBlock; // the orange block we are going to be pushing
public GameObject violetBlock;
Rigidbody shortBlockRB; // cached on initialization
Rigidbody agentRB; // cached on initialization
Material groundMaterial; // cached on Awake()
public GameObject orangeGoal;
public GameObject redGoal;
public GameObject orangeBlock;
public GameObject redBlock;
RayPerception rayPer;
Rigidbody shortBlockRB;
Rigidbody agentRB;
Material groundMaterial;
int selection;
public override void InitializeAgent()

brain = FindObjectOfType<Brain>(); // only one brain in the scene so this should find our brain. BRAAAINS.
agentRB = GetComponent<Rigidbody>(); // cache the agent rigidbody
groundRenderer = ground.GetComponent<Renderer>(); // get the ground renderer so we can change the material when a goal is scored
groundMaterial = groundRenderer.material; // starting material
brain = FindObjectOfType<Brain>();
rayPer = GetComponent<RayPerception>();
agentRB = GetComponent<Rigidbody>();
groundRenderer = ground.GetComponent<Renderer>();
groundMaterial = groundRenderer.material;
public void RayPerception(float rayDistance,
float[] rayAngles, string[] detectableObjects, float height)
{
foreach (float angle in rayAngles)
{
float noise = 0f;
float noisyAngle = angle + Random.Range(-noise, noise);
Vector3 position = transform.TransformDirection(GiveCatersian(rayDistance, noisyAngle));
position.y = height;
Debug.DrawRay(transform.position, position, Color.red, 0.1f, true);
RaycastHit hit;
float[] subList = new float[detectableObjects.Length + 2];
if (Physics.SphereCast(transform.position, 1.0f, position, out hit, rayDistance))
{
for (int i = 0; i < detectableObjects.Length; i++)
{
if (hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
foreach (float f in subList)
AddVectorObs(f);
}
}
public Vector3 GiveCatersian(float radius, float angle)
{
float x = radius * Mathf.Cos(DegreeToRadian(angle));
float z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, 1f, z);
}
public float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
float rayDistance = 8.5f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f };
string[] detectableObjects = { "goal", "orangeBlock", "redBlock", "wall" };
RayPerception(rayDistance, rayAngles, detectableObjects, 0f);
float rayDistance = 12f;
float[] rayAngles = { 20f, 60f, 90f, 120f, 160f };
string[] detectableObjects = { "orangeGoal", "redGoal", "orangeBlock", "redBlock", "wall" };
AddVectorObs((float)GetStepCount() / (float)agentParameters.maxStep);
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f));
// swap ground material, wait time seconds, then swap back to the regular ground material.
yield return new WaitForSeconds(time); // wait for 2 sec
yield return new WaitForSeconds(time);
public void MoveAgent(float[] act)
{

// If we're using Continuous control you will need to change the Action
if (brain.brainParameters.vectorActionSpaceType == SpaceType.continuous)
{
dirToGo = transform.forward * Mathf.Clamp(act[0], -1f, 1f);

{
int action = Mathf.FloorToInt(act[0]);
if (action == 0)
{
dirToGo = transform.forward * 1f;
}
else if (action == 1)
switch (action)
dirToGo = transform.forward * -1f;
}
else if (action == 2)
{
rotateDir = transform.up * 1f;
}
else if (action == 3)
{
rotateDir = transform.up * -1f;
case 0:
dirToGo = transform.forward * 1f;
break;
case 1:
dirToGo = transform.forward * -1f;
break;
case 2:
rotateDir = transform.up * 1f;
break;
case 3:
rotateDir = transform.up * -1f;
break;
transform.Rotate(rotateDir, Time.deltaTime * 100f);
agentRB.AddForce(dirToGo * academy.agentRunSpeed, ForceMode.VelocityChange); // GO
transform.Rotate(rotateDir, Time.deltaTime * 150f);
agentRB.AddForce(dirToGo * academy.agentRunSpeed, ForceMode.VelocityChange);
AddReward(-0.0003f);
MoveAgent(vectorAction); //perform agent actions
bool fail = false; // did the agent or block get pushed off the edge?
if (!Physics.Raycast(agentRB.position, Vector3.down, 20)) // if the agent has gone over the edge, we done.
{
fail = true; // fell off bro
AddReward(-1f); // BAD AGENT
//transform.position = GetRandomSpawnPos(agentSpawnAreaBounds, agentSpawnArea);
Done(); // if we mark an agent as done it will be reset automatically. AgentReset() will be called.
}
if (fail)
{
StartCoroutine(GoalScoredSwapGroundMaterial(academy.failMaterial, .5f)); // swap ground material to indicate fail
}
AddReward(-1f / agentParameters.maxStep);
MoveAgent(vectorAction);
// detect when we touch the goal
if (col.gameObject.CompareTag("goal")) // touched goal
if (col.gameObject.CompareTag("orangeGoal") || col.gameObject.CompareTag("redGoal"))
if ((selection == 0 && col.gameObject.name == "GoalA") || (selection == 1 && col.gameObject.name == "GoalB"))
if ((selection == 0 && col.gameObject.CompareTag("orangeGoal")) ||
(selection == 1 && col.gameObject.CompareTag("redGoal")))
AddReward(1f); // you get 5 points
StartCoroutine(GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 2)); // swap ground material for a bit to indicate we scored.
SetReward(1f);
StartCoroutine(GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 0.5f));
AddReward(-0.1f); // you lose a point
StartCoroutine(GoalScoredSwapGroundMaterial(academy.failMaterial, .5f)); // swap ground material to indicate fail
SetReward(-0.1f);
StartCoroutine(GoalScoredSwapGroundMaterial(academy.failMaterial, 0.5f));
Done(); // if we mark an agent as done it will be reset automatically. AgentReset() will be called.
Done();
// In the editor, if "Reset On Done" is checked then AgentReset() will be called automatically anytime we mark done = true in an agent script.
float agentOffset = -15f;
float blockOffset = 0f;
orangeBlock.transform.position = new Vector3(0f + Random.Range(-3f, 3f), 2f, -15f + Random.Range(-5f, 5f)) + ground.transform.position;
violetBlock.transform.position = new Vector3(0f, -1000f, -15f + Random.Range(-5f, 5f)) + ground.transform.position;
orangeBlock.transform.position =
new Vector3(0f + Random.Range(-3f, 3f), 2f, blockOffset + Random.Range(-5f, 5f))
+ ground.transform.position;
redBlock.transform.position =
new Vector3(0f, -1000f, blockOffset + Random.Range(-5f, 5f))
+ ground.transform.position;
orangeBlock.transform.position = new Vector3(0f, -1000f, -15f + Random.Range(-5f, 5f)) + ground.transform.position;
violetBlock.transform.position = new Vector3(0f, 2f, -15f + Random.Range(-5f, 5f)) + ground.transform.position;
orangeBlock.transform.position =
new Vector3(0f, -1000f, blockOffset + Random.Range(-5f, 5f))
+ ground.transform.position;
redBlock.transform.position =
new Vector3(0f, 2f, blockOffset + Random.Range(-5f, 5f))
+ ground.transform.position;
transform.position = new Vector3(0f+ Random.Range(-3f, 3f), 1f, 0f + Random.Range(-5f, 5f)) + ground.transform.position;
transform.position = new Vector3(0f + Random.Range(-3f, 3f),
1f, agentOffset + Random.Range(-5f, 5f))
+ ground.transform.position;
int goalPos = Random.Range(0, 2);
if (goalPos == 0)
{
orangeGoal.transform.position = new Vector3(7f, 0.5f, 9f) + area.transform.position;
redGoal.transform.position = new Vector3(-7f, 0.5f, 9f) + area.transform.position;
}
else
{
redGoal.transform.position = new Vector3(7f, 0.5f, 9f) + area.transform.position;
orangeGoal.transform.position = new Vector3(-7f, 0.5f, 9f) + area.transform.position;
}

927
unity-environment/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.bytes
文件差异内容过多而无法显示
查看文件

5
unity-environment/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.bytes.meta


fileFormatVersion: 2
guid: 8aa65be485d2a408291f32ff56a9074e
guid: 207e5dbbeeca8431d8f57fd1c96280af
timeCreated: 1520904091
licenseType: Free
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

2
unity-environment/Assets/ML-Agents/Examples/PushBlock/Materials/groundPushblock.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: ground_pushblock
m_Name: groundPushblock
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4

108
unity-environment/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab


- component: {fileID: 23572363946664112}
- component: {fileID: 54817351390947638}
- component: {fileID: 114505490781873732}
- component: {fileID: 114421647563711602}
m_Layer: 0
m_Name: Agent
m_TagString: agent

- component: {fileID: 114098955560590180}
m_Layer: 5
m_Name: Reward Function
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1738131294272224
GameObject:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4469888496441150}
- component: {fileID: 33813720350873002}
- component: {fileID: 23919465149654940}
- component: {fileID: 65000028652760624}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 1, z: -4.43}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Children:
- {fileID: 4469888496441150}
m_Father: {fileID: 4006990431719496}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}

m_GameObject: {fileID: 1293433783856340}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 2, y: 1, z: -8}
m_LocalScale: {x: 3, y: 1, z: 3}
m_LocalScale: {x: 2, y: 0.75, z: 2}
m_Children: []
m_Father: {fileID: 4006990431719496}
m_RootOrder: 5

m_Father: {fileID: 4207862233523270}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0}
--- !u!4 &4469888496441150
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1738131294272224}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0.5}
m_LocalScale: {x: 0.2, y: 0.2, z: 0.2}
m_Children: []
m_Father: {fileID: 4188187884171146}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4643236529171504
Transform:
m_ObjectHideFlags: 1

m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23919465149654940
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1738131294272224}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &33062522536833656
MeshFilter:
m_ObjectHideFlags: 1

m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1942656630263630}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33813720350873002
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1738131294272224}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33817663761775486
MeshFilter:
m_ObjectHideFlags: 1

m_GameObject: {fileID: 1489716781518988}
serializedVersion: 2
m_Mass: 10
m_Drag: 2
m_Drag: 4
m_AngularDrag: 0.05
m_UseGravity: 1
m_IsKinematic: 0

--- !u!65 &65000028652760624
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1738131294272224}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65084077644419200
BoxCollider:
m_ObjectHideFlags: 1

m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 10, z: 1}
m_Size: {x: 1, y: 20, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!114 &114098955560590180
MonoBehaviour:

m_Name:
m_EditorClassIdentifier:
agent: {fileID: 0}
--- !u!114 &114421647563711602
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1489716781518988}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &114458416987814396
MonoBehaviour:
m_ObjectHideFlags: 1

m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
stepCounter: 0
agentParameters:
agentCameras: []
maxStep: 5000

m_Extent: {x: 0, y: 0, z: 0}
goal: {fileID: 1441513772919942}
block: {fileID: 1293433783856340}
obstacle: {fileID: 0}
goalDetect: {fileID: 0}
--- !u!114 &114548077184658240
MonoBehaviour:

180
unity-environment/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity


m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 03bcc81e249714a22bb411dddcc5d15e, type: 2}
m_IsPrefabParent: 0
--- !u!114 &116521526
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
continuousPlayerActions:
- key: 97
index: 0
value: -1
- key: 100
index: 0
value: 1
- key: 119
index: 1
value: 1
- key: 115
index: 1
value: -1
discretePlayerActions:
- key: 97
value: 1
- key: 100
value: 0
- key: 119
value: 2
- key: 115
value: 3
defaultAction: -1
brain: {fileID: 226584073}
--- !u!1001 &117714360
Prefab:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
brainParameters:
vectorObservationSize: 18
numStackedVectorObservations: 1
vectorActionSize: 2
vectorObservationSize: 70
numStackedVectorObservations: 3
vectorActionSize: 6
vectorActionSpaceType: 1
-
-
-
-
vectorActionSpaceType: 0
brainType: 3
brainType: 0
- {fileID: 116521526}
- {fileID: 1745549652}
- {fileID: 1725896589}
- {fileID: 1000359790}
instanceID: 320464
- {fileID: 1774808999}
- {fileID: 966937467}
- {fileID: 1078689134}
- {fileID: 626804319}
instanceID: 23980
--- !u!1 &255077123
GameObject:
m_ObjectHideFlags: 0

m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &626804319
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
graphModel: {fileID: 4900000, guid: 2e1836ea047144568b39370292aa8a6e, type: 3}
graphScope:
graphPlaceholders: []
BatchSizePlaceholderName: batch_size
VectorObservationPlacholderName: vector_observation
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
VisualObservationPlaceholderName: []
ActionPlaceholderName: action
PreviousActionPlaceholderName: prev_action
brain: {fileID: 226584073}
--- !u!1001 &756777950
Prefab:
m_ObjectHideFlags: 0

propertyPath: m_RootOrder
value: 5
objectReference: {fileID: 0}
- target: {fileID: 114505490781873732, guid: 03bcc81e249714a22bb411dddcc5d15e,
type: 2}
propertyPath: brain
value:
objectReference: {fileID: 226584073}
- target: {fileID: 223130197460698574, guid: 03bcc81e249714a22bb411dddcc5d15e,
type: 2}
propertyPath: m_Camera

propertyPath: m_Name
value: Area
objectReference: {fileID: 0}
- target: {fileID: 114505490781873732, guid: 03bcc81e249714a22bb411dddcc5d15e,
type: 2}
propertyPath: brain
value:
objectReference: {fileID: 226584073}
- target: {fileID: 114505490781873732, guid: 03bcc81e249714a22bb411dddcc5d15e,
type: 2}
propertyPath: obstacle
value:
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 03bcc81e249714a22bb411dddcc5d15e, type: 2}
m_IsPrefabParent: 0

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 03bcc81e249714a22bb411dddcc5d15e, type: 2}
m_IsPrefabParent: 0
--- !u!114 &1000359790
--- !u!114 &966937467
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
graphModel: {fileID: 4900000, guid: 2e1836ea047144568b39370292aa8a6e, type: 3}
graphScope:
graphPlaceholders: []
BatchSizePlaceholderName: batch_size
VectorObservationPlacholderName: vector_observation
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
VisualObservationPlaceholderName: []
ActionPlaceholderName: action
PreviousActionPlaceholderName: prev_action
brain: {fileID: 226584073}
brain: {fileID: 0}
--- !u!1 &1009000883
GameObject:
m_ObjectHideFlags: 0

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 03bcc81e249714a22bb411dddcc5d15e, type: 2}
m_IsPrefabParent: 0
--- !u!114 &1078689134
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 226584073}
--- !u!1001 &1082394721
Prefab:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
maxSteps: 0
isInference: 1
trainingConfiguration:
width: 1280
height: 720

targetFrameRate: 60
resetParameters:
resetParameters: []
episodeCount: 1
stepsSinceReset: 0
agentRunSpeed: 1
agentRunSpeed: 2
agentRotationSpeed: 15
spawnAreaMarginMultiplier: 0.5
goalScoredMaterial: {fileID: 2100000, guid: 80ffa2c5b96364e4b80fbb2e42b0af3d, type: 2}

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 03bcc81e249714a22bb411dddcc5d15e, type: 2}
m_IsPrefabParent: 0
--- !u!114 &1725896589
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 226584073}
--- !u!114 &1745549652
--- !u!114 &1774808999
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
brain: {fileID: 0}
continuousPlayerActions:
- key: 97
index: 0
value: -1
- key: 100
index: 0
value: 1
- key: 119
index: 1
value: 1
- key: 115
index: 1
value: -1
discretePlayerActions:
- key: 97
value: 3
- key: 100
value: 2
- key: 119
value: 0
- key: 115
value: 1
- key: 113
value: 4
- key: 101
value: 5
defaultAction: -1
brain: {fileID: 226584073}
--- !u!1001 &1783815898
Prefab:
m_ObjectHideFlags: 0

150
unity-environment/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs


Rigidbody blockRB; //cached on initialization
Rigidbody agentRB; //cached on initialization
Material groundMaterial; //cached on Awake()
RayPerception rayPer;
/// <summary>
/// We will be changing the ground material based on success/failue

base.InitializeAgent();
goalDetect = block.GetComponent<GoalDetect>();
goalDetect.agent = this;
rayPer = GetComponent<RayPerception>();
// Cache the agent rigidbody
agentRB = GetComponent<Rigidbody>();

public override void CollectObservations()
{
// Block position relative to goal.
Vector3 blockPosRelToGoal = blockRB.position - goal.transform.position;
// Block position relative to agent.
Vector3 blockPosRelToAgent = blockRB.position - agentRB.position;
// Obstacle position relative to agent.
// Agent position relative to ground.
Vector3 agentPos = agentRB.position - area.transform.position;
// Goal position relative to ground.
Vector3 goalPos = goal.transform.position - ground.transform.position;
AddVectorObs(agentPos);
AddVectorObs(goalPos);
AddVectorObs(blockPosRelToGoal);
AddVectorObs(blockPosRelToAgent);
// Add velocity of block and agent to observations.
AddVectorObs(blockRB.velocity);
AddVectorObs(agentRB.velocity);
float rayDistance = 12f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
string[] detectableObjects;
detectableObjects = new string[] { "block", "goal", "wall" };
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f));
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 1.5f, 0f));
public Vector3 GetRandomSpawnPos(float spawnHeight)
public Vector3 GetRandomSpawnPos()
{
bool foundNewSpawnLocation = false;
Vector3 randomSpawnPos = Vector3.zero;

Done();
// Swap ground material for a bit to indicate we scored.
StartCoroutine(GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 1));
StartCoroutine(GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 0.5f));
}
/// <summary>

/// </summary>
public void MoveAgent(float[] act)
{
// AGENT ACTIONS
// Here we define the actions our agent can use, such as
// "go left", "go forward", "turn", etc.
// In the brain we define the number of axes we want to use here.
// In this example we need 2 axes to define:
// Right/left movement (act[0])
// Forward/back movement (act[1])
Vector3 dirToGo = Vector3.zero;
Vector3 rotateDir = Vector3.zero;
// Example: Right/Left Movement. It is defined in this line:
// Vector3 directionX = Vector3.right * Mathf.Clamp(act[0], -1f, 1f);
// The neural network is setting the act[0] value.
// If it chooses 1 then the agent will go right.
// If it chooses -1 the agent will go left.
// If it chooses .42 then it will go a little bit right
// If it chooses -.8 then it will go left (well...80% left)
// Energy Conservation Penalties
// Give penalties based on how fast the agent chooses to go.
// The agent should only exert as much energy as necessary.
// This is how animals work as well.
// i.e. You're probably not running in place at all times.
// Larger the value, the less the penalty is.
float energyConservPenaltyModifier = 10000;
// The larger the movement, the greater the penalty given.
AddReward(-Mathf.Abs(act[0]) / energyConservPenaltyModifier);
AddReward(-Mathf.Abs(act[1]) / energyConservPenaltyModifier);
Vector3 directionX = Vector3.zero;
Vector3 directionZ = Vector3.zero;
// Move left or right in world space.
directionX = Vector3.right * Mathf.Clamp(act[0], -1f, 1f);
// Move forward or back in world space.
directionZ = Vector3.forward * Mathf.Clamp(act[1], -1f, 1f);
// Add directions together. This is the direction we want the agent
// to move in.
Vector3 dirToGo = directionX + directionZ;
int action = Mathf.FloorToInt(act[0]);
// Apply movement force!
agentRB.AddForce(dirToGo * academy.agentRunSpeed, ForceMode.VelocityChange);
if (dirToGo != Vector3.zero)
// Goalies and Strikers have slightly different action spaces.
switch (action)
// Rotate the agent appropriately.
agentRB.rotation = Quaternion.Lerp(agentRB.rotation,
Quaternion.LookRotation(dirToGo),
Time.deltaTime * academy.agentRotationSpeed);
case 0:
dirToGo = transform.forward * 1f;
break;
case 1:
dirToGo = transform.forward * -1f;
break;
case 2:
rotateDir = transform.up * 1f;
break;
case 3:
rotateDir = transform.up * -1f;
break;
case 4:
dirToGo = transform.right * -0.75f;
break;
case 5:
dirToGo = transform.right * 0.75f;
break;
transform.Rotate(rotateDir, Time.fixedDeltaTime * 200f);
agentRB.AddForce(dirToGo * academy.agentRunSpeed,
ForceMode.VelocityChange);
}

MoveAgent(vectorAction);
// Penalty given each step to encourage agent to finish task quickly.
AddReward(-.00005f);
// Did the agent or block get pushed off the edge?
bool fail = false;
// If the agent has gone over the edge, end the episode.
if (!Physics.Raycast(agentRB.position, Vector3.down, 3))
{
// Fell off bro
fail = true;
// BAD AGENT
SetReward(-1f);
// If we mark an agent as done it will be reset automatically.
// AgentReset() will be called.
Done();
}
// If the block has gone over the edge, end the episode.
if (!Physics.Raycast(blockRB.position, Vector3.down, 3))
{
// Fell off bro
fail = true;
// BAD AGENT
SetReward(-1f);
// If we mark an agent as done it will be reset automatically.
// AgentReset() will be called.
Done();
}
if (fail)
{
// Swap ground material to indicate failure of the episode.
StartCoroutine(GoalScoredSwapGroundMaterial(academy.failMaterial, 1f));
}
AddReward(-1f / agentParameters.maxStep);
}
/// <summary>

{
// Get a random position for the block.
block.transform.position = GetRandomSpawnPos(1.5f);
block.transform.position = GetRandomSpawnPos();
// Reset block velocity back to zero.
blockRB.velocity = Vector3.zero;

area.transform.Rotate(new Vector3(0f, rotationAngle, 0f));
ResetBlock();
transform.position = GetRandomSpawnPos(1.5f);
transform.position = GetRandomSpawnPos();

994
unity-environment/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.bytes
文件差异内容过多而无法显示
查看文件

2
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker1.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: checker 1
m_Name: checker1
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4

4
unity-environment/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerBall/Prefabs/SoccerBall.prefab


- component: {fileID: 114729878072981838}
- component: {fileID: 54501682632205078}
m_Layer: 0
m_Name: Soccer Ball
m_Name: SoccerBall
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: cff0f8a4e9c85a045be6d610d20b87b3, type: 2}
- {fileID: 2100000, guid: fb69c479c3cec624b96dfe20d305c1b9, type: 2}

m_EditorClassIdentifier:
area: {fileID: 0}
lastTouchedBy: {fileID: 0}
wallTag: wall
agentTag: agent
redGoalTag: redGoal
blueGoalTag: blueGoal

67
unity-environment/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity


m_Name:
m_EditorClassIdentifier:
maxSteps: 0
isInference: 1
trainingConfiguration:
width: 800
height: 500

targetFrameRate: 60
resetParameters:
resetParameters: []
episodeCount: 0
stepsSinceReset: 0
brainStriker: {fileID: 1890643319}
brainGoalie: {fileID: 1379650626}
redMaterial: {fileID: 2100000, guid: 776dd8b57653342839c3fb5f46ce664e, type: 2}

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: ab518b4d19ded4d9dbe6fec8239e88b8, type: 2}
m_IsPrefabParent: 0
--- !u!1 &1870405448 stripped
GameObject:
m_PrefabParentObject: {fileID: 1060120010984970, guid: ab518b4d19ded4d9dbe6fec8239e88b8,
type: 2}
m_PrefabInternal: {fileID: 1870405447}
--- !u!1 &1870405449 stripped
GameObject:
m_PrefabParentObject: {fileID: 1241552893761096, guid: ab518b4d19ded4d9dbe6fec8239e88b8,
type: 2}
m_PrefabInternal: {fileID: 1870405447}
--- !u!1 &1870405450 stripped
GameObject:
m_PrefabParentObject: {fileID: 1555791983229206, guid: ab518b4d19ded4d9dbe6fec8239e88b8,
type: 2}
m_PrefabInternal: {fileID: 1870405447}
--- !u!1 &1870405451 stripped
GameObject:
m_PrefabParentObject: {fileID: 1280263969241142, guid: ab518b4d19ded4d9dbe6fec8239e88b8,
type: 2}
m_PrefabInternal: {fileID: 1870405447}
--- !u!114 &1870405452
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1870405448}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &1870405453
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1870405449}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &1870405454
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1870405450}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &1870405455
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1870405451}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!1 &1890643317
GameObject:
m_ObjectHideFlags: 0

147
unity-environment/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs


[HideInInspector]
public Rigidbody agentRB;
SoccerAcademy academy;
Renderer renderer;
Renderer agentRenderer;
RayPerception rayPer;
renderer.material = team == Team.red ? academy.redMaterial : academy.blueMaterial;
agentRenderer.material = team == Team.red ? academy.redMaterial : academy.blueMaterial;
}
public void JoinRedTeam(AgentRole role)

renderer.material = academy.redMaterial;
agentRenderer.material = academy.redMaterial;
}
public void JoinBlueTeam(AgentRole role)

renderer.material = academy.blueMaterial;
agentRenderer.material = academy.blueMaterial;
void Awake()
public override void InitializeAgent()
renderer = GetComponent<Renderer>();
base.InitializeAgent();
agentRenderer = GetComponent<Renderer>();
rayPer = GetComponent<RayPerception>();
academy = FindObjectOfType<SoccerAcademy>();
PlayerState playerState = new PlayerState();
playerState.agentRB = GetComponent<Rigidbody>();

playerState.playerIndex = playerIndex;
}
public override void InitializeAgent()
{
base.InitializeAgent();
}
public void RayPerception(float rayDistance,
float[] rayAngles, string[] detectableObjects,
float startHeight, float endHeight)
{
foreach (float angle in rayAngles)
{
float noise = 0f;
float noisyAngle = angle + Random.Range(-noise, noise);
Vector3 position = transform.TransformDirection(
GiveCatersian(rayDistance, noisyAngle));
position.y = endHeight;
Debug.DrawRay(transform.position + new Vector3(0f, startHeight, 0f),
position, Color.red, 0.1f, true);
RaycastHit hit;
float[] subList = new float[detectableObjects.Length + 2];
if (Physics.SphereCast(transform.position +
new Vector3(0f, startHeight, 0f), 1.0f,
position, out hit, rayDistance))
{
for (int i = 0; i < detectableObjects.Length; i++)
{
if (hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
foreach (float f in subList)
AddVectorObs(f);
}
}
public Vector3 GiveCatersian(float radius, float angle)
{
float x = radius * Mathf.Cos(DegreeToRadian(angle));
float z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, 1f, z);
}
public float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
public override void CollectObservations()
{
float rayDistance = 20f;

detectableObjects = new string[] { "ball", "blueGoal", "redGoal",
"wall", "blueAgent", "redAgent" };
}
RayPerception(rayDistance, rayAngles, detectableObjects, 0f, 0f);
RayPerception(rayDistance, rayAngles, detectableObjects, 1f, 1f);
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f));
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 1f, 0f));
}
public void MoveAgent(float[] act)

if (agentRole == AgentRole.goalie)
{
kickPower = 0f;
if (action == 0)
switch (action)
dirToGo = transform.forward * 1f;
kickPower = 1f;
case 0:
dirToGo = transform.forward * 1f;
kickPower = 1f;
break;
case 1:
dirToGo = transform.forward * -1f;
break;
case 3:
dirToGo = transform.right * -1f;
break;
case 2:
dirToGo = transform.right * 1f;
break;
else if (action == 1)
{
dirToGo = transform.forward * -1f;
}
else if (action == 3)
{
dirToGo = transform.right * -1f;
}
else if (action == 2)
{
dirToGo = transform.right * 1f;
}
if (action == 0)
switch (action)
dirToGo = transform.forward * 1f;
kickPower = 1f;
}
else if (action == 1)
{
dirToGo = transform.forward * -1f;
}
else if (action == 2)
{
rotateDir = transform.up * 1f;
}
else if (action == 3)
{
rotateDir = transform.up * -1f;
}
else if (action == 4)
{
dirToGo = transform.right * -1f;
}
else if (action == 5)
{
dirToGo = transform.right * 1f;
case 0:
dirToGo = transform.forward * 1f;
kickPower = 1f;
break;
case 1:
dirToGo = transform.forward * -1f;
break;
case 2:
rotateDir = transform.up * 1f;
break;
case 3:
rotateDir = transform.up * -1f;
break;
case 4:
dirToGo = transform.right * -0.75f;
break;
case 5:
dirToGo = transform.right * 0.75f;
break;
}
}
transform.Rotate(rotateDir, Time.deltaTime * 100f);

19
unity-environment/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs


using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
public float[] Decide(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
public List<float> MakeMemory(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
public List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
}
}

31
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs


using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine;
public class RandomDecision : MonoBehaviour, Decision

public void Awake()
{
brainParameters =
brainParameters =
public float[] Decide(List<float> state, List<Texture2D> observation,
float reward, bool done, List<float> memory)
public float[] Decide(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
if (actionSpaceType == SpaceType.continuous)
{
List<float> act = new List<float>();

act.Add(2*Random.value-1);
act.Add(2 * Random.value - 1);
else
{
return new float[1] { Random.Range(0,actionSpaceSize) };
}
return new float[1] { Random.Range(0, actionSpaceSize) };
public List<float> MakeMemory(List<float> state,
List<Texture2D> observation, float reward,
bool done, List<float> memory)
public List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory)
{
return new List<float>();
}

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/SuccessGround.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: success ground green
m_Name: SuccessGround
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Obstacle.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: obstacle
m_Name: Obstacle
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Goal.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: goal
m_Name: Goal
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/FailGround.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: fail ground red
m_Name: FailGround
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Block.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: block
m_Name: Block
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/Ball.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: ball
m_Name: Ball
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/pitMaterial.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: pit_mat
m_Name: pitMaterial
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/goalMaterial.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: goal_mat
m_Name: goalMaterial
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsGrid/agentMaterial.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: agent_mat
m_Name: agentMaterial
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _EMISSION
m_LightmapFlags: 1

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsBall/Materials/logo2.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: logo 2
m_Name: logo2
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4

2
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Materials/MaterialsBall/Materials/logo1.mat


m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: logo 1
m_Name: logo1
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _ALPHAPREMULTIPLY_ON
m_LightmapFlags: 4

2
unity-environment/Assets/ML-Agents/Scripts/Brain.cs


* the state
* <br> If discrete : The number of possible values the state can take*/
[Range(1, 10)]
[Range(1, 50)]
public int numStackedVectorObservations = 1;
public int vectorActionSize = 1;

71
unity-environment/Assets/ML-Agents/Scripts/Decision.cs


using System.Collections;
using System.Collections.Generic;
using System.Collections.Generic;
/// Generic functions for Decision Interface
/// <summary>
/// Interface for implementing the behavior of an Agent that uses a Heuristic
/// Brain. The behavior of an Agent in this case is fully decided using the
/// implementation of these methods and no training or inference takes place.
/// Currently, the Heuristic Brain does not support text observations and actions.
/// </summary>
/// \brief Implement this method to define the logic of decision making
/// for the CoreBrainHeuristic
/** Given the information about the agent, return a vector of actions.
* @param state The state of the agent
* @param observation The cameras the agent uses
* @param reward The reward the agent had at the previous step
* @param done Whether or not the agent is done
* @param memory The memories stored from the previous step with MakeMemory()
* @return The vector of actions the agent will take at the next step
*/
float[] Decide(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory);
/// <summary>
/// Defines the decision-making logic of the agent. Given the information
/// about the agent, returns a vector of actions.
/// </summary>
/// <returns>Vector action vector.</returns>
/// <param name="vectorObs">The vector observations of the agent.</param>
/// <param name="visualObs">The cameras the agent uses for visual observations.</param>
/// <param name="reward">The reward the agent received at the previous step.</param>
/// <param name="done">Whether or not the agent is done.</param>
/// <param name="memory">
/// The memories stored from the previous step with
/// <see cref="MakeMemory(List{float}, List{Texture2D}, float, bool, List{float})"/>
/// </param>
float[] Decide(
List<float>
vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory);
/// \brief Implement this method to define the logic of memory making for
/// the CoreBrainHeuristic
/** Given the information about the agent, return the new memory vector for the agent.
* @param state The state of the agent
* @param observation The cameras the agent uses
* @param reward The reward the agent had at the previous step
* @param done Weather or not the agent is done
* @param memory The memories stored from the previous step with MakeMemory()
* @return The vector of memories the agent will use at the next step
*/
List<float> MakeMemory(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory);
}
/// <summary>
/// Defines the logic for creating the memory vector for the Agent.
/// </summary>
/// <returns>The vector of memories the agent will use at the next step.</returns>
/// <param name="vectorObs">The vector observations of the agent.</param>
/// <param name="visualObs">The cameras the agent uses for visual observations.</param>
/// <param name="reward">The reward the agent received at the previous step.</param>
/// <param name="done">Whether or not the agent is done.</param>
/// <param name="memory">
/// The memories stored from the previous call to this method.
/// </param>
List<float> MakeMemory(
List<float> vectorObs,
List<Texture2D> visualObs,
float reward,
bool done,
List<float> memory);
}

4
unity-environment/Assets/ML-Agents/Scripts/ExternalCommunicator.cs


StreamWriter logWriter;
string logPath;
const string api = "API-2";
const string _version_ = "API-3";
/// Placeholder for state information to send.
[System.Serializable]

accParamerters.brainParameters = new List<BrainParameters>();
accParamerters.brainNames = new List<string>();
accParamerters.externalBrainNames = new List<string>();
accParamerters.apiNumber = api;
accParamerters.apiNumber = _version_;
accParamerters.logPath = logPath;
foreach (Brain b in brains)
{

160
unity-environment/Assets/ML-Agents/Scripts/Monitor.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using Newtonsoft.Json;
using System.Collections.Generic;
using UnityEngine;
/** The type of monitor the information must be displayed in.
* <slider> corresponds to a slingle rectangle which width is given
* by a float between -1 and 1. (green is positive, red is negative)
* <hist> corresponds to n vertical sliders.
* <text> is a text field.
* <bar> is a rectangle of fixed length to represent the proportions
* of a list of floats.
*/
using Newtonsoft.Json;
/// <summary>
/// The type of monitor the information must be displayed in.
/// <slider> corresponds to a single rectangle whose width is given
/// by a float between -1 and 1. (green is positive, red is negative)
/// <hist> corresponds to n vertical sliders.
/// <text> is a text field.
/// <bar> is a rectangle of fixed length to represent the proportions
/// of a list of floats.
/// </summary>
public enum MonitorType
{
slider,

}
/** Monitor is used to display information. Use the log function to add
* information to your monitor.
*/
/// <summary>
/// Monitor is used to display information about the Agent within the Unity
/// scene. Use the log function to add information to your monitor.
/// </summary>
/// <summary>
/// Represents how high above the target the monitors will be.
/// </summary>
[HideInInspector]
static public float verticalOffset = 3f;
static bool isInstanciated;
static bool isInstantiated;
static Dictionary<Transform, Dictionary<string, DisplayValue>> displayTransformValues;
static Color[] barColors;
private struct DisplayValue
struct DisplayValue
static Dictionary<Transform, Dictionary<string, DisplayValue>> displayTransformValues;
static private Color[] barColors;
[HideInInspector]
static public float verticalOffset = 3f;
/**< \brief This float represents how high above the target the monitors will be. */
static GUIStyle keyStyle;
static GUIStyle valueStyle;

static bool initialized;
/** Use the Monitor.Log static function to attach information to a transform.
* If displayType is <text>, value can be any object.
* If sidplayType is <slider>, value must be a float.
* If sidplayType is <hist>, value must be a List or Array of floats.
* If sidplayType is <bar>, value must be a list or Array of positive floats.
* Note that <slider> and <hist> caps values between -1 and 1.
* @param key The name of the information you wish to Log.
* @param value The value you want to display.
* @param displayType The type of display.
* @param target The transform you want to attach the information to.
*/
/// <summary>
/// Use the Monitor.Log static function to attach information to a transform.
/// If displayType is <text>, value can be any object.
/// If sidplayType is <slider>, value must be a float.
/// If sidplayType is <hist>, value must be a List or Array of floats.
/// If sidplayType is <bar>, value must be a list or Array of positive floats.
/// Note that <slider> and <hist> caps values between -1 and 1.
/// </summary>
/// <returns>The log.</returns>
/// <param name="key">The name of the information you wish to Log.</param>
/// <param name="value">The value you want to display.</param>
/// <param name="displayType">The type of display.</param>
/// <param name="target">
/// The transform you want to attach the information to.
/// </param>
public static void Log(
string key,
object value,

if (!isInstanciated)
if (!isInstantiated)
InstanciateCanvas();
isInstanciated = true;
InstantiateCanvas();
isInstantiated = true;
}

}
}
/** Remove a value from a monitor
* @param target The transform to which the information is attached
* @param key The key of the information you want to remove
*/
/// <summary>
/// Remove a value from a monitor.
/// </summary>
/// <param name="target">
/// The transform to which the information is attached.
/// </param>
/// <param name="key">The key of the information you want to remove.</param>
public static void RemoveValue(Transform target, string key)
{
if (target == null)

}
/** Remove all information from a monitor
* @param target The transform to which the information is attached
*/
/// <summary>
/// Remove all information from a monitor.
/// </summary>
/// <param name="target">
/// The transform to which the information is attached.
/// </param>
public static void RemoveAllValues(Transform target)
{
if (target == null)

{
displayTransformValues.Remove(target);
}
/** Use SetActive to enable or disable the Monitor via script
* @param active Set the Monitor's status to the value of active
*/
public static void SetActive(bool active){
if (!isInstanciated)
/// <summary>
/// Use SetActive to enable or disable the Monitor via script
/// </summary>
/// <param name="active">Value to set the Monitor's status to.</param>
public static void SetActive(bool active)
{
if (!isInstantiated)
InstanciateCanvas();
isInstanciated = true;
InstantiateCanvas();
isInstantiated = true;
}
if (canvas != null)

}
private static void InstanciateCanvas()
/// Initializes the canvas.
static void InstantiateCanvas()
{
canvas = GameObject.Find("AgentMonitorCanvas");
if (canvas == null)

canvas.AddComponent<Monitor>();
}
displayTransformValues = new Dictionary<Transform, Dictionary< string , DisplayValue>>();
displayTransformValues = new Dictionary<Transform, Dictionary<string, DisplayValue>>();
/// Convert the input object to a float array. Returns a float[0] if the
/// conversion process fails.
float[] ToFloatArray(object input)
{
try

}
catch
{
}
try
{

}
catch
{
/// <summary> <inheritdoc/> </summary>
void OnGUI()
{
if (!initialized)

displayTransformValues.Remove(target);
continue;
}
float widthScaler = (Screen.width / 1000f);
float keyPixelWidth = 100 * widthScaler;
float keyPixelHeight = 20 * widthScaler;

{
continue;
}
Dictionary<string, DisplayValue> displayValues = displayTransformValues[target];

valueStyle.alignment = TextAnchor.MiddleLeft;
GUI.Label(new Rect(
origin.x + paddingwidth + keyPixelWidth,
origin.y - (index + 1) * keyPixelHeight,
keyPixelWidth, keyPixelHeight),
origin.y - (index + 1) * keyPixelHeight,
keyPixelWidth, keyPixelHeight),
JsonConvert.SerializeObject(displayValues[key].value, Formatting.None), valueStyle);
}

if (displayValues[key].value.GetType() == typeof(float))
if (displayValues[key].value is float)
{
sliderValue = (float)displayValues[key].value;
}

}
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * sliderValue, keyPixelHeight * 0.8f),
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * sliderValue, keyPixelHeight * 0.8f),
GUIContent.none, s);
}

}
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth + (keyPixelWidth * histWidth + paddingwidth / 2) * i,
origin.y - (index + 0.1f) * keyPixelHeight,
keyPixelWidth * histWidth, -keyPixelHeight * value),
origin.y - (index + 0.1f) * keyPixelHeight,
keyPixelWidth * histWidth, -keyPixelHeight * value),
GUIContent.none, s);
}

{
float[] vals = ToFloatArray(displayValues[key].value);
float valsSum = 0f;
float valsCum = 0f;
float valsCum = 0f;
foreach (float f in vals)
{
valsSum += Mathf.Max(f, 0);

float value = Mathf.Max(vals[i], 0) / valsSum;
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth + keyPixelWidth * valsCum,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * value, keyPixelHeight * 0.8f),
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * value, keyPixelHeight * 0.8f),
GUIContent.none, colorStyle[i % colorStyle.Length]);
valsCum += value;

}
}
/// Helper method used to initialize the GUI. Called once.
void Initialize()
{
keyStyle = GUI.skin.label;

barColors = new Color[6]{ Color.magenta, Color.blue, Color.cyan, Color.green, Color.yellow, Color.red };
barColors = new Color[6] { Color.magenta, Color.blue, Color.cyan, Color.green, Color.yellow, Color.red };
colorStyle = new GUIStyle[barColors.Length];
for (int i = 0; i < barColors.Length; i++)
{

6
unity-environment/ProjectSettings/TagManager.asset


- blueGoal
- blueAgent
- redAgent
- redBlock
- goal
- ground
- orangeBlock
- block
- orangeGoal
layers:
- Default
- TransparentFX

39
docs/Feature-On-Demand-Decisions.md


# On Demand Decision Making
## Description
On demand decision making allows agents to request decisions from their
brains only when needed instead of receiving decisions at a fixed
frequency. This is useful when the agents commit to an action for a
variable number of steps or when the agents cannot make decisions
at the same time. This typically the case for turn based games, games
where agents must react to events or games where agents can take
actions of variable duration.
## How to use
To enable or disable on demand decision making, use the checkbox called
`On Demand Decisions` in the Agent Inspector.
<p align="center">
<img src="images/ml-agents-ODD.png"
alt="On Demand Decision"
width="500" border="10" />
</p>
* If `On Demand Decisions` is not checked, the Agent will request a new
decision every `Decision Frequency` steps and
perform an action every step. In the example above,
`CollectObservations()` will be called every 5 steps and
`AgentAction()` will be called at every step. This means that the
Agent will reuse the decision the Brain has given it.
* If `On Demand Decisions` is checked, the Agent controls when to receive
decisions, and take actions. To do so, the Agent may leverage one or two methods:
* `RequestDecision()` Signals that the Agent is requesting a decision.
This causes the Agent to collect its observations and ask the Brain for a
decision at the next step of the simulation. Note that when an Agent
requests a decision, it also request an action.
This is to ensure that all decisions lead to an action during training.
* `RequestAction()` Signals that the Agent is requesting an action. The
action provided to the Agent in this case is the same action that was
provided the last time it requested a decision.

30
docs/Migrating-v0.3.md


# Migrating to ML-Agents v0.3
There are a large number of new features and improvements in ML-Agents v0.3 which change both the training process and Unity API in ways which will cause incompatibilities with environments made using older versions. This page is designed to highlight those changes for users familiar with v0.1 or v0.2 in order to ensure a smooth transition.
## Important
* ML-Agents is no longer compatible with Python 2.
## Python Training
* The training script `ppo.py` and `PPO.ipynb` Python notebook have been replaced with a single `learn.py` script as the launching point for training with ML-Agents. For more information on using `learn.py`, see [here]().
* Hyperparameters for training brains are now stored in the `trainer_config.yaml` file. For more information on using this file, see [here]().
## Unity API
* Modifications to an Agent's rewards must now be done using either `AddReward()` or `SetReward()`.
* Setting an Agent to done now requires the use of the `Done()` method.
* `CollectStates()` has been replaced by `CollectObservations()`, which now no longer returns a list of floats.
* To collect observations, call `AddVectorObs()` within `CollectObservations()`. Note that you can call `AddVectorObs()` with floats, integers, lists and arrays of floats, Vector3 and Quaternions.
* `AgentStep()` has been replaced by `AgentAction()`.
* `WaitTime()` has been removed.
* The `Frame Skip` field of the Academy is replaced by the Agent's `Decision Frequency` field, enabling agent to make decisions at different frequencies.
## Semantics
In order to more closely align with the terminology used in the Reinforcement Learning field, and to be more descriptive, we have changed the names of some of the concepts used in ML-Agents. The changes are highlighted in the table below.
| Old - v0.2 and earlier | New - v0.3 and later |
| --- | --- |
| State | Vector Observation |
| Observation | Visual Observation |
| Action | Vector Action |
| N/A | Text Observation |
| N/A | Text Action |

1001
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLAgent.prefab
文件差异内容过多而无法显示
查看文件

1001
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLArea.prefab
文件差异内容过多而无法显示
查看文件

8
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/RLArea.prefab.meta


fileFormatVersion: 2
guid: 38400a68c4ea54b52998e34ee238d1a7
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 100100000
userData:
assetBundleName:
assetBundleVariant:

1001
unity-environment/Assets/ML-Agents/Examples/Banana/Prefabs/TeachingArea.prefab
文件差异内容过多而无法显示
查看文件

1001
unity-environment/Assets/ML-Agents/Examples/Banana/TFModels/BananaRL.bytes
文件差异内容过多而无法显示
查看文件

7
unity-environment/Assets/ML-Agents/Examples/Banana/TFModels/BananaRL.bytes.meta


fileFormatVersion: 2
guid: f60ba855bdc5f42689de283a9a572667
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/WallJump.meta


fileFormatVersion: 2
guid: e61aed11f93544227801dfd529bf41c6
folderAsset: yes
timeCreated: 1520964896
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

80
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs


using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Ray perception component. Attach this to agents to enable "local perception"
/// via the use of ray casts directed outward from the agent.
/// </summary>
public class RayPerception : MonoBehaviour
{
List<float> perceptionBuffer = new List<float>();
Vector3 endPosition;
RaycastHit hit;
/// <summary>
/// Creates perception vector to be used as part of an observation of an agent.
/// </summary>
/// <returns>The partial vector observation corresponding to the set of rays</returns>
/// <param name="rayDistance">Radius of rays</param>
/// <param name="rayAngles">Anlges of rays (starting from (1,0) on unit circle).</param>
/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
/// <param name="startOffset">Starting heigh offset of ray from center of agent.</param>
/// <param name="endOffset">Ending height offset of ray from center of agent.</param>
public List<float> Perceive(float rayDistance,
float[] rayAngles, string[] detectableObjects,
float startOffset, float endOffset)
{
perceptionBuffer.Clear();
// For each ray sublist stores categorial information on detected object
// along with object distance.
foreach (float angle in rayAngles)
{
endPosition = transform.TransformDirection(
PolarToCartesian(rayDistance, angle));
endPosition.y = endOffset;
if (Application.isEditor)
{
Debug.DrawRay(transform.position + new Vector3(0f, startOffset, 0f),
endPosition, Color.black, 0.01f, true);
}
float[] subList = new float[detectableObjects.Length + 2];
if (Physics.SphereCast(transform.position +
new Vector3(0f, startOffset, 0f), 0.5f,
endPosition, out hit, rayDistance))
{
for (int i = 0; i < detectableObjects.Length; i++)
{
if (hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
perceptionBuffer.AddRange(subList);
}
return perceptionBuffer;
}
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
public static Vector3 PolarToCartesian(float radius, float angle)
{
float x = radius * Mathf.Cos(DegreeToRadian(angle));
float z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, 0f, z);
}
/// <summary>
/// Converts degrees to radians.
/// </summary>
public static float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
}

11
unity-environment/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs.meta


fileFormatVersion: 2
guid: bb172294dbbcc408286b156a2c4b553c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Material.meta


fileFormatVersion: 2
guid: 3002d747534d24598b059f75c43b8d45
folderAsset: yes
timeCreated: 1517448702
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

77
unity-environment/Assets/ML-Agents/Examples/WallJump/Material/spawnVolumeMaterial.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: spawnVolumeMaterial
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _ALPHABLEND_ON
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 3000
stringTagMap:
RenderType: Transparent
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 10
- _GlossMapScale: 1
- _Glossiness: 0
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 2
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 5
- _UVSec: 0
- _ZWrite: 0
m_Colors:
- _Color: {r: 0, g: 0.83448315, b: 1, a: 0.303}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Material/spawnVolumeMaterial.mat.meta


fileFormatVersion: 2
guid: ecd59def9213741058b969f699d10e8e
timeCreated: 1506376733
licenseType: Pro
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

77
unity-environment/Assets/ML-Agents/Examples/WallJump/Material/wallMaterial.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: wallMaterial
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _ALPHABLEND_ON
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 3000
stringTagMap:
RenderType: Transparent
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 10
- _GlossMapScale: 1
- _Glossiness: 0
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 2
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 5
- _UVSec: 0
- _ZWrite: 0
m_Colors:
- _Color: {r: 0.56228375, g: 0.76044035, b: 0.9558824, a: 0.603}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Material/wallMaterial.mat.meta


fileFormatVersion: 2
guid: a0c2c8b2ac71342e1bd714d7178198e3
timeCreated: 1506376733
licenseType: Pro
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs.meta


fileFormatVersion: 2
guid: 22e282f4b1d48436b91d6ad8a8903e1c
folderAsset: yes
timeCreated: 1517535133
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

700
unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!1001 &100100000
Prefab:
m_ObjectHideFlags: 1
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications: []
m_RemovedComponents: []
m_ParentPrefab: {fileID: 0}
m_RootGameObject: {fileID: 1280098394364104}
m_IsPrefabParent: 1
--- !u!1 &1195095783991828
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4651390251185036}
- component: {fileID: 33846302425286506}
- component: {fileID: 65193133000831296}
- component: {fileID: 23298506819960420}
- component: {fileID: 54678503543725326}
- component: {fileID: 114925928594762506}
- component: {fileID: 114092229367912210}
m_Layer: 0
m_Name: Agent
m_TagString: agent
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1243905751985214
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4245926775170606}
- component: {fileID: 33016986498506672}
- component: {fileID: 65082856895024712}
- component: {fileID: 23546212824591690}
m_Layer: 0
m_Name: Wall
m_TagString: wall
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1280098394364104
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4768003208014390}
m_Layer: 0
m_Name: WallJumpArea
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1395477826315484
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4294902888415044}
- component: {fileID: 33528566080995282}
- component: {fileID: 65551840025329434}
- component: {fileID: 23354960268522594}
- component: {fileID: 54027918861229180}
m_Layer: 0
m_Name: shortBlock
m_TagString: block
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1535176706844624
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4064361527677764}
- component: {fileID: 33890127227328306}
- component: {fileID: 65857793473814344}
- component: {fileID: 23318234009360618}
m_Layer: 0
m_Name: SpawnVolume
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1707364840842826
GameObject:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4930287780304116}
- component: {fileID: 33507625006194266}
- component: {fileID: 65060909118748988}
- component: {fileID: 23872068720866504}
m_Layer: 0
m_Name: Cube
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1964440537870194
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4192801639223200}
- component: {fileID: 33252015425015410}
- component: {fileID: 65412457053290128}
- component: {fileID: 23001074490764582}
m_Layer: 0
m_Name: Ground
m_TagString: walkableSurface
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1982078136115924
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4011541840118462}
- component: {fileID: 33618033993823702}
- component: {fileID: 65431820516000586}
- component: {fileID: 23621829541977726}
m_Layer: 0
m_Name: Goal
m_TagString: goal
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &4011541840118462
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1982078136115924}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 6.7, y: 0.4, z: 3.3}
m_LocalScale: {x: 4, y: 0.32738775, z: 4}
m_Children: []
m_Father: {fileID: 4768003208014390}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4064361527677764
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1535176706844624}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 1.05, z: -6.5}
m_LocalScale: {x: 16.2, y: 1, z: 7}
m_Children: []
m_Father: {fileID: 4768003208014390}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4192801639223200
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1964440537870194}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: -4}
m_LocalScale: {x: 20, y: 1, z: 20}
m_Children: []
m_Father: {fileID: 4768003208014390}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4245926775170606
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1243905751985214}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: -1.53}
m_LocalScale: {x: 20, y: 0, z: 1.5}
m_Children: []
m_Father: {fileID: 4768003208014390}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4294902888415044
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395477826315484}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 1.51, y: 2.05, z: -3.86}
m_LocalScale: {x: 3, y: 2, z: 3}
m_Children: []
m_Father: {fileID: 4768003208014390}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4651390251185036
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_LocalRotation: {x: -0, y: 0.96758014, z: -0, w: 0.25256422}
m_LocalPosition: {x: -8.2, y: 1, z: -12.08}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 4930287780304116}
m_Father: {fileID: 4768003208014390}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4768003208014390
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1280098394364104}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 4651390251185036}
- {fileID: 4192801639223200}
- {fileID: 4011541840118462}
- {fileID: 4294902888415044}
- {fileID: 4245926775170606}
- {fileID: 4064361527677764}
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4930287780304116
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1707364840842826}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: -0, y: 0, z: 0.49}
m_LocalScale: {x: 0.1941064, y: 0.19410636, z: 0.19410636}
m_Children: []
m_Father: {fileID: 4651390251185036}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!23 &23001074490764582
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1964440537870194}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 2c19bff363d1644b0818652340f120d5, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23298506819960420
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 260483cdfc6b14e26823a02f23bd8baa, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23318234009360618
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1535176706844624}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: ecd59def9213741058b969f699d10e8e, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23354960268522594
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395477826315484}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: f4abcb290251940948a31b349a6f9995, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23546212824591690
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1243905751985214}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: a0c2c8b2ac71342e1bd714d7178198e3, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23621829541977726
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1982078136115924}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: e810187ce86f44ba1a373ca07a86ea81, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23872068720866504
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1707364840842826}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &33016986498506672
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1243905751985214}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33252015425015410
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1964440537870194}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33507625006194266
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1707364840842826}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33528566080995282
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395477826315484}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33618033993823702
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1982078136115924}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33846302425286506
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33890127227328306
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1535176706844624}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!54 &54027918861229180
Rigidbody:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395477826315484}
serializedVersion: 2
m_Mass: 10
m_Drag: 1
m_AngularDrag: 0.05
m_UseGravity: 1
m_IsKinematic: 0
m_Interpolate: 0
m_Constraints: 116
m_CollisionDetection: 0
--- !u!54 &54678503543725326
Rigidbody:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
serializedVersion: 2
m_Mass: 25
m_Drag: 2
m_AngularDrag: 0.05
m_UseGravity: 1
m_IsKinematic: 0
m_Interpolate: 0
m_Constraints: 80
m_CollisionDetection: 0
--- !u!65 &65060909118748988
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1707364840842826}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 0
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65082856895024712
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1243905751985214}
m_Material: {fileID: 13400000, guid: 2053f160e462a428ab794446c043b144, type: 2}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65193133000831296
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65412457053290128
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1964440537870194}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65431820516000586
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1982078136115924}
m_Material: {fileID: 0}
m_IsTrigger: 1
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 50, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65551840025329434
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395477826315484}
m_Material: {fileID: 13400000, guid: 2053f160e462a428ab794446c043b144, type: 2}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!65 &65857793473814344
BoxCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1535176706844624}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!114 &114092229367912210
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &114925928594762506
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 676fca959b8ee45539773905ca71afa1, type: 3}
m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
agentParameters:
agentCameras: []
maxStep: 2000
resetOnDone: 1
onDemandDecision: 0
numberOfActionsBetweenDecisions: 5
noWallBrain: {fileID: 0}
smallWallBrain: {fileID: 0}
bigWallBrain: {fileID: 0}
ground: {fileID: 1964440537870194}
spawnArea: {fileID: 1535176706844624}
goal: {fileID: 1982078136115924}
shortBlock: {fileID: 1395477826315484}
wall: {fileID: 1243905751985214}
jumpingTime: 0
jumpTime: 0.2
fallingForce: 111
hitGroundColliders:
- {fileID: 0}
- {fileID: 0}
- {fileID: 0}

9
unity-environment/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab.meta


fileFormatVersion: 2
guid: 54e3af627216447f790531de496099f0
timeCreated: 1520541093
licenseType: Free
NativeFormatImporter:
mainObjectFileID: 100100000
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes.meta


fileFormatVersion: 2
guid: 1d53e87fe54dd4178b88cc1a23b11731
folderAsset: yes
timeCreated: 1517446674
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1001
unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity
文件差异内容过多而无法显示
查看文件

9
unity-environment/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity.meta


fileFormatVersion: 2
guid: 56024e8d040d344709949bc88128944d
timeCreated: 1506808980
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts.meta


fileFormatVersion: 2
guid: 344123e9bd87b48fdbebb4202a771d96
folderAsset: yes
timeCreated: 1517445791
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

28
unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAcademy.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class WallJumpAcademy : Academy {
[Header("Specific to WallJump")]
public float agentRunSpeed;
public float agentJumpHeight;
//when a goal is scored the ground will use this material for a few seconds.
public Material goalScoredMaterial;
//when fail, the ground will use this material for a few seconds.
public Material failMaterial;
[HideInInspector]
//use ~3 to make things less floaty
public float gravityMultiplier = 2.5f;
[HideInInspector]
public float agentJumpVelocity = 777;
[HideInInspector]
public float agentJumpVelocityMaxChange = 10;
// Use this for initialization
public override void InitializeAcademy()
{
Physics.gravity *= gravityMultiplier;
}
}

13
unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAcademy.cs.meta


fileFormatVersion: 2
guid: 50b93afe82bc647b581a706891913e7f
timeCreated: 1517447911
licenseType: Free
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

327
unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs


//Put this script on your blue cube.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System.Linq;
public class WallJumpAgent : Agent
{
// Depending on this value, the wall will have different height
int configuration;
// Brain to use when no wall is present
public Brain noWallBrain;
// Brain to use when a jumpable wall is present
public Brain smallWallBrain;
// Brain to use when a wall requiring a block to jump over is present
public Brain bigWallBrain;
public GameObject ground;
public GameObject spawnArea;
Bounds spawnAreaBounds;
public GameObject goal;
public GameObject shortBlock;
public GameObject wall;
Rigidbody shortBlockRB;
Rigidbody agentRB;
Material groundMaterial;
Renderer groundRenderer;
WallJumpAcademy academy;
RayPerception rayPer;
public float jumpingTime;
public float jumpTime;
// This is a downward force applied when falling to make jumps look
// less floaty
public float fallingForce;
// Use to check the coliding objects
public Collider[] hitGroundColliders = new Collider[3];
Vector3 jumpTargetPos;
Vector3 jumpStartingPos;
string[] detectableObjects;
public override void InitializeAgent()
{
academy = FindObjectOfType<WallJumpAcademy>();
rayPer = GetComponent<RayPerception>();
configuration = Random.Range(0, 5);
detectableObjects = new string[] { "wall", "goal", "block" };
agentRB = GetComponent<Rigidbody>();
shortBlockRB = shortBlock.GetComponent<Rigidbody>();
spawnAreaBounds = spawnArea.GetComponent<Collider>().bounds;
groundRenderer = ground.GetComponent<Renderer>();
groundMaterial = groundRenderer.material;
spawnArea.SetActive(false);
}
// Begin the jump sequence
public void Jump()
{
jumpingTime = 0.2f;
jumpStartingPos = agentRB.position;
}
/// <summary>
/// Does the ground check.
/// </summary>
/// <returns><c>true</c>, if the agent is on the ground,
/// <c>false</c> otherwise.</returns>
/// <param name="boxWidth">The width of the box used to perform
/// the ground check. </param>
public bool DoGroundCheck(float boxWidth)
{
hitGroundColliders = new Collider[3];
Physics.OverlapBoxNonAlloc(
gameObject.transform.position + new Vector3(0, -0.05f, 0),
new Vector3(boxWidth / 2f, 0.5f, boxWidth / 2f),
hitGroundColliders,
gameObject.transform.rotation);
bool grounded = false;
foreach (Collider col in hitGroundColliders)
{
if (col != null && col.transform != this.transform &&
(col.CompareTag("walkableSurface") ||
col.CompareTag("block") ||
col.CompareTag("wall")))
{
grounded = true; //then we're grounded
break;
}
}
return grounded;
}
/// <summary>
/// Moves a rigidbody towards a position smoothly.
/// </summary>
/// <param name="targetPos">Target position.</param>
/// <param name="rb">The rigidbody to be moved.</param>
/// <param name="targetVel">The velocity to target during the
/// motion.</param>
/// <param name="maxVel">The maximum velocity posible.</param>
void MoveTowards(
Vector3 targetPos, Rigidbody rb, float targetVel, float maxVel)
{
Vector3 moveToPos = targetPos - rb.worldCenterOfMass;
Vector3 velocityTarget = moveToPos * targetVel * Time.fixedDeltaTime;
if (float.IsNaN(velocityTarget.x) == false)
{
rb.velocity = Vector3.MoveTowards(
rb.velocity, velocityTarget, maxVel);
}
}
public override void CollectObservations()
{
float rayDistance = 20f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
AddVectorObs(rayPer.Perceive(
rayDistance, rayAngles, detectableObjects, 0f, 0f));
AddVectorObs(rayPer.Perceive(
rayDistance, rayAngles, detectableObjects, 2.5f, 2.5f));
Vector3 agentPos = agentRB.position - ground.transform.position;
AddVectorObs(agentPos / 20f);
AddVectorObs(DoGroundCheck(0.4f) ? 1 : 0);
}
/// <summary>
/// Gets a random spawn position in the spawningArea.
/// </summary>
/// <returns>The random spawn position.</returns>
public Vector3 GetRandomSpawnPos()
{
Vector3 randomSpawnPos = Vector3.zero;
float randomPosX = Random.Range(-spawnAreaBounds.extents.x,
spawnAreaBounds.extents.x);
float randomPosZ = Random.Range(-spawnAreaBounds.extents.z,
spawnAreaBounds.extents.z);
randomSpawnPos = spawnArea.transform.position +
new Vector3(randomPosX, 0.45f, randomPosZ);
return randomSpawnPos;
}
/// <summary>
/// Chenges the color of the ground for a moment
/// </summary>
/// <returns>The Enumerator to be used in a Coroutine</returns>
/// <param name="mat">The material to be swaped.</param>
/// <param name="time">The time the material will remain.</param>
IEnumerator GoalScoredSwapGroundMaterial(Material mat, float time)
{
groundRenderer.material = mat;
yield return new WaitForSeconds(time); //wait for 2 sec
groundRenderer.material = groundMaterial;
}
public void MoveAgent(float[] act)
{
AddReward(-0.0005f);
bool smallGrounded = DoGroundCheck(0.4f);
bool largeGrounded = DoGroundCheck(1.0f);
Vector3 dirToGo = Vector3.zero;
Vector3 rotateDir = Vector3.zero;
int action = Mathf.FloorToInt(act[0]);
switch (action)
{
case 0:
dirToGo = transform.forward * 1f * (largeGrounded ? 1f : 0.5f);
break;
case 1:
dirToGo = transform.forward * -1f * (largeGrounded ? 1f : 0.5f);
break;
case 2:
rotateDir = transform.up * -1f;
break;
case 3:
rotateDir = transform.up * 1f;
break;
case 4:
dirToGo = transform.right * -0.6f * (largeGrounded ? 1f : 0.5f);
break;
case 5:
dirToGo = transform.right * 0.6f * (largeGrounded ? 1f : 0.5f);
break;
case 6:
if ((jumpingTime <= 0f) && smallGrounded)
{
Jump();
}
break;
}
transform.Rotate(rotateDir, Time.fixedDeltaTime * 300f);
agentRB.AddForce(dirToGo * academy.agentRunSpeed,
ForceMode.VelocityChange);
if (jumpingTime > 0f)
{
jumpTargetPos =
new Vector3(agentRB.position.x,
jumpStartingPos.y + academy.agentJumpHeight,
agentRB.position.z) + dirToGo;
MoveTowards(jumpTargetPos, agentRB, academy.agentJumpVelocity,
academy.agentJumpVelocityMaxChange);
}
if (!(jumpingTime > 0f) && !largeGrounded)
{
agentRB.AddForce(
Vector3.down * fallingForce, ForceMode.Acceleration);
}
jumpingTime -= Time.fixedDeltaTime;
}
public override void AgentAction(float[] vectorAction, string textAction)
{
MoveAgent(vectorAction);
if ((!Physics.Raycast(agentRB.position, Vector3.down, 20))
|| (!Physics.Raycast(shortBlockRB.position, Vector3.down, 20)))
{
Done();
SetReward(-1f);
ResetBlock(shortBlockRB);
StartCoroutine(
GoalScoredSwapGroundMaterial(academy.failMaterial, .5f));
}
}
// Detect when the agent hits the goal
void OnTriggerEnter(Collider col)
{
if (col.gameObject.CompareTag("goal") && DoGroundCheck(0.4f))
{
SetReward(1f);
Done();
StartCoroutine(
GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 2));
}
}
//Reset the orange block position
void ResetBlock(Rigidbody blockRB)
{
blockRB.transform.position = GetRandomSpawnPos();
blockRB.velocity = Vector3.zero;
blockRB.angularVelocity = Vector3.zero;
}
public override void AgentReset()
{
ResetBlock(shortBlockRB);
transform.localPosition = new Vector3(
18 * (Random.value - 0.5f), 1, -12);
configuration = Random.Range(0, 5);
agentRB.velocity = default(Vector3);
}
private void FixedUpdate()
{
if (configuration != -1)
{
ConfigureAgent(configuration);
configuration = -1;
}
}
/// <summary>
/// Configures the agent. Given an integer config, the wall will have
/// different height and a different brain will be assigned to the agent.
/// </summary>
/// <param name="config">Config.
/// If 0 : No wall and noWallBrain.
/// If 1: Small wall and smallWallBrain.
/// Other : Tall wall and BigWallBrain. </param>
void ConfigureAgent(int config)
{
if (config == 0)
{
wall.transform.localScale = new Vector3(
wall.transform.localScale.x,
academy.resetParameters["no_wall_height"],
wall.transform.localScale.z);
GiveBrain(noWallBrain);
}
else if (config == 1)
{
wall.transform.localScale = new Vector3(
wall.transform.localScale.x,
academy.resetParameters["small_wall_height"],
wall.transform.localScale.z);
GiveBrain(smallWallBrain);
}
else
{
float height =
academy.resetParameters["big_wall_min_height"] +
Random.value * (academy.resetParameters["big_wall_max_height"] -
academy.resetParameters["big_wall_min_height"]);
wall.transform.localScale = new Vector3(
wall.transform.localScale.x,
height,
wall.transform.localScale.z);
GiveBrain(bigWallBrain);
}
}
}

13
unity-environment/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs.meta


fileFormatVersion: 2
guid: 676fca959b8ee45539773905ca71afa1
timeCreated: 1517445814
licenseType: Free
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels.meta


fileFormatVersion: 2
guid: da68dfde501d241c788cadc9805b214a
folderAsset: yes
timeCreated: 1517539094
licenseType: Free
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1001
unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels/WallJump.bytes
文件差异内容过多而无法显示
查看文件

9
unity-environment/Assets/ML-Agents/Examples/WallJump/TFModels/WallJump.bytes.meta


fileFormatVersion: 2
guid: fae11f80dd25b4bc4918ce5223fd8e5b
timeCreated: 1520732146
licenseType: Free
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存