浏览代码

Merge branch 'master' into develop-var-len-obs-feature

/bullet-hell-barracuda-test-1.3.1
vincentpierre 3 年前
当前提交
e1b94b8b
共有 103 个文件被更改,包括 1873 次插入878 次删除
  1. 3
      .github/workflows/pytest.yml
  2. 1
      .yamato/com.unity.ml-agents-performance.yml
  3. 4
      .yamato/com.unity.ml-agents-test.yml
  4. 4
      .yamato/compressed-sensor-test.yml
  5. 4
      .yamato/gym-interface-test.yml
  6. 4
      .yamato/python-ll-api-test.yml
  7. 15
      .yamato/test_versions.metafile
  8. 2
      Project/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs
  9. 6
      Project/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab
  10. 24
      Project/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab
  11. 6
      Project/Assets/ML-Agents/Examples/3DBall/Prefabs/Visual3DBall.prefab
  12. 20
      Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs
  13. 6
      Project/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab
  14. 21
      Project/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab
  15. 6
      Project/Assets/ML-Agents/Examples/Crawler/Prefabs/CrawlerBase.prefab
  16. 10
      Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab
  17. 10
      Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/GridFoodCollectorArea.prefab
  18. 8
      Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab
  19. 23
      Project/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab
  20. 8
      Project/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity
  21. 6
      Project/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab
  22. 43
      Project/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab
  23. 6
      Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3Heuristic.prefab
  24. 6
      Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3VectorObs.prefab
  25. 6
      Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3VisualObs.prefab
  26. 22
      Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab
  27. 8
      Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab
  28. 22
      Project/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab
  29. 43
      Project/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab
  30. 22
      Project/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab
  31. 29
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ModelOverrider.cs
  32. 28
      Project/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab
  33. 21
      Project/Assets/ML-Agents/Examples/Soccer/Prefabs/StrikersVsGoalieField.prefab
  34. 18
      Project/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab
  35. 12
      Project/Assets/ML-Agents/Examples/Walker/Prefabs/Ragdoll/WalkerRagdollBase.prefab
  36. 5
      Project/Assets/ML-Agents/Examples/Walker/Prefabs/Ragdoll/WalkerRagdollDySingleSpeedVariant.prefab
  37. 7
      Project/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab
  38. 6
      Project/Assets/ML-Agents/Examples/Worm/Prefabs/WormBasePrefab.prefab
  39. 27
      com.unity.ml-agents/CHANGELOG.md
  40. 49
      com.unity.ml-agents/Runtime/Academy.cs
  41. 5
      com.unity.ml-agents/Runtime/Communicator/ICommunicator.cs
  42. 147
      com.unity.ml-agents/Runtime/Communicator/RpcCommunicator.cs
  43. 129
      com.unity.ml-agents/Runtime/Inference/ApplierImpl.cs
  44. 22
      com.unity.ml-agents/Runtime/Inference/ModelRunner.cs
  45. 17
      com.unity.ml-agents/Runtime/Inference/Utils/Multinomial.cs
  46. 12
      com.unity.ml-agents/Runtime/Policies/BarracudaPolicy.cs
  47. 2
      com.unity.ml-agents/Runtime/Policies/BehaviorParameters.cs
  48. 14
      com.unity.ml-agents/Runtime/SideChannels/SideChannel.cs
  49. 4
      com.unity.ml-agents/Runtime/Timer.cs
  50. 19
      com.unity.ml-agents/Tests/Editor/Communicator/RpcCommunicatorTests.cs
  51. 198
      com.unity.ml-agents/Tests/Editor/DiscreteActionOutputApplierTest.cs
  52. 13
      com.unity.ml-agents/Tests/Editor/ModelRunnerTest.cs
  53. 2
      docs/Installation.md
  54. 13
      docs/Learning-Environment-Design-Agents.md
  55. 2
      docs/Training-Configuration-File.md
  56. 392
      ml-agents/mlagents/trainers/buffer.py
  57. 3
      ml-agents/mlagents/trainers/cli_utils.py
  58. 16
      ml-agents/mlagents/trainers/demo_loader.py
  59. 40
      ml-agents/mlagents/trainers/ghost/trainer.py
  60. 41
      ml-agents/mlagents/trainers/learn.py
  61. 22
      ml-agents/mlagents/trainers/ppo/optimizer_torch.py
  62. 33
      ml-agents/mlagents/trainers/ppo/trainer.py
  63. 24
      ml-agents/mlagents/trainers/sac/optimizer_torch.py
  64. 7
      ml-agents/mlagents/trainers/sac/trainer.py
  65. 18
      ml-agents/mlagents/trainers/settings.py
  66. 7
      ml-agents/mlagents/trainers/stats.py
  67. 6
      ml-agents/mlagents/trainers/tests/__init__.py
  68. 9
      ml-agents/mlagents/trainers/tests/mock_brain.py
  69. 60
      ml-agents/mlagents/trainers/tests/test_buffer.py
  70. 9
      ml-agents/mlagents/trainers/tests/test_demo_loader.py
  71. 29
      ml-agents/mlagents/trainers/tests/test_trajectory.py
  72. 32
      ml-agents/mlagents/trainers/tests/torch/test_ghost.py
  73. 15
      ml-agents/mlagents/trainers/tests/torch/test_policy.py
  74. 68
      ml-agents/mlagents/trainers/tests/torch/test_ppo.py
  75. 3
      ml-agents/mlagents/trainers/tests/torch/test_reward_providers/test_curiosity.py
  76. 18
      ml-agents/mlagents/trainers/tests/torch/test_reward_providers/utils.py
  77. 13
      ml-agents/mlagents/trainers/tests/torch/test_sac.py
  78. 15
      ml-agents/mlagents/trainers/torch/action_log_probs.py
  79. 14
      ml-agents/mlagents/trainers/torch/agent_action.py
  80. 2
      ml-agents/mlagents/trainers/torch/components/bc/module.py
  81. 16
      ml-agents/mlagents/trainers/torch/components/reward_providers/curiosity_reward_provider.py
  82. 4
      ml-agents/mlagents/trainers/torch/components/reward_providers/extrinsic_reward_provider.py
  83. 12
      ml-agents/mlagents/trainers/torch/components/reward_providers/gail_reward_provider.py
  84. 43
      ml-agents/mlagents/trainers/trajectory.py
  85. 8
      ml-agents/setup.py
  86. 29
      ml-agents/tests/yamato/training_int_tests.py
  87. 439
      Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.onnx
  88. 15
      Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.onnx.meta
  89. 58
      docs/Training-Plugins.md
  90. 3
      ml-agents-plugin-examples/README.md
  91. 0
      ml-agents-plugin-examples/mlagents_plugin_examples/__init__.py
  92. 27
      ml-agents-plugin-examples/mlagents_plugin_examples/example_stats_writer.py
  93. 0
      ml-agents-plugin-examples/mlagents_plugin_examples/tests/__init__.py
  94. 13
      ml-agents-plugin-examples/mlagents_plugin_examples/tests/test_stats_writer_plugin.py
  95. 17
      ml-agents-plugin-examples/setup.py
  96. 1
      ml-agents/mlagents/plugins/__init__.py

3
.github/workflows/pytest.yml


jobs:
pytest:
runs-on: ubuntu-latest
env:
TEST_ENFORCE_BUFFER_KEY_TYPES: 1
strategy:
matrix:
python-version: [3.6.x, 3.7.x, 3.8.x]

python -m pip install --progress-bar=off -e ./ml-agents
python -m pip install --progress-bar=off -r test_requirements.txt
python -m pip install --progress-bar=off -e ./gym-unity
python -m pip install --progress-bar=off -e ./ml-agents-plugin-examples
- name: Save python dependencies
run: |
pip freeze > pip_versions-${{ matrix.python-version }}.txt

1
.yamato/com.unity.ml-agents-performance.yml


test_editors:
- version: 2019.4
- version: 2020.1
- version: 2020.2
---
{% for editor in test_editors %}

4
.yamato/com.unity.ml-agents-test.yml


enableCodeCoverage: !!bool true
testProject: DevProject
enableNoDefaultPackages: !!bool true
- version: 2020.1
enableCodeCoverage: !!bool true
testProject: DevProject
enableNoDefaultPackages: !!bool true
- version: 2020.2
enableCodeCoverage: !!bool true
testProject: DevProject

4
.yamato/compressed-sensor-test.yml


- .yamato/standalone-build-test.yml#test_linux_standalone_{{ editor.version }}
triggers:
cancel_old_ci: true
{% if editor.extra_test == "sensor" %}
expression: |
(pull_request.target eq "master" OR
pull_request.target match "release.+") AND

pull_request.changes.any match "Project/**" OR
pull_request.changes.any match "ml-agents/**" OR
pull_request.changes.any match "ml-agents/tests/yamato/**" OR
{% endif %}
{% endfor %}

4
.yamato/gym-interface-test.yml


- .yamato/standalone-build-test.yml#test_linux_standalone_{{ editor.version }}
triggers:
cancel_old_ci: true
{% if editor.extra_test == "gym" %}
expression: |
(pull_request.target eq "master" OR
pull_request.target match "release.+") AND

pull_request.changes.any match "ml-agents/**" OR
pull_request.changes.any match "ml-agents/tests/yamato/**" OR
{% endif %}
{% endfor %}

4
.yamato/python-ll-api-test.yml


- .yamato/standalone-build-test.yml#test_linux_standalone_{{ editor.version }}
triggers:
cancel_old_ci: true
{% if editor.extra_test == "llapi" %}
expression: |
(pull_request.target eq "master" OR
pull_request.target match "release.+") AND

pull_request.changes.any match "ml-agents/**" OR
pull_request.changes.any match "ml-agents/tests/yamato/**" OR
{% endif %}
{% endfor %}

15
.yamato/test_versions.metafile


# List of editor versions for standalone-build-test and its dependencies.
# csharp_backcompat_version is used in training-int-tests to determine the
# older package version to run the backwards compat tests against.
# We always run training-int-tests for all versions of the editor
# For each "other" test, we only run it against a single version of the
# editor to reduce the number of yamato jobs
csharp_backcompat_version: 1.0.0
extra_test: llapi
csharp_backcompat_version: 1.0.0
- version: 2020.1
csharp_backcompat_version: 1.0.0
extra_test: gym
# 2020.2 moved the AssetImporters namespace
# but we didn't handle this until 1.2.0
csharp_backcompat_version: 1.2.0
extra_test: sensor

2
Project/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs


scenes,
outputPath,
buildTarget,
BuildOptions.None
BuildOptions.Development
);
var isOk = buildResult.summary.result == BuildResult.Succeeded;
var error = "";

6
Project/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab


m_BrainParameters:
VectorObservationSize: 8
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 2
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: 3DBall
TeamId: 0

24
Project/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 5
numStackedVectorObservations: 9
vectorActionSize: 02000000
vectorActionDescriptions: []
vectorActionSpaceType: 1
m_Model: {fileID: 11400000, guid: 27d49984757ed46b181090a532ef48e5, type: 3}
m_InferenceDevice: 0
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 2
BranchSizes:
VectorActionSize: 02000000
VectorActionDescriptions: []
VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: d179c44c147aa4ffbbb725f009eca3b8, type: 3}
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 1
--- !u!114 &114466000339026140
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 5000
MaxStep: 5000
ball: {fileID: 1142513601053358}
--- !u!114 &8193279139064749781
MonoBehaviour:

m_EditorClassIdentifier:
DecisionPeriod: 5
TakeActionsBetweenDecisions: 1
offsetStep: 0
--- !u!114 &7923264721978289873
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1978072206102878
GameObject:
m_ObjectHideFlags: 0

6
Project/Assets/ML-Agents/Examples/3DBall/Prefabs/Visual3DBall.prefab


m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 2
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Visual3DBall
TeamId: 0

20
Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs


using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
using Unity.MLAgents.Sensors.Reflection;
public class Ball3DHardAgent : Agent
{

SetResetParameters();
}
public override void CollectObservations(VectorSensor sensor)
[Observable(numStackedObservations: 9)]
Vector2 Rotation
sensor.AddObservation(gameObject.transform.rotation.z);
sensor.AddObservation(gameObject.transform.rotation.x);
sensor.AddObservation((ball.transform.position - gameObject.transform.position));
get
{
return new Vector2(gameObject.transform.rotation.z, gameObject.transform.rotation.x);
}
}
[Observable(numStackedObservations: 9)]
Vector3 PositionDelta
{
get
{
return ball.transform.position - gameObject.transform.position;
}
}
public override void OnActionReceived(ActionBuffers actionBuffers)

6
Project/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab


m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Basic
TeamId: 0

21
Project/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 6
numStackedVectorObservations: 3
vectorActionSize: 03000000
vectorActionDescriptions: []
vectorActionSpaceType: 1
VectorObservationSize: 6
NumStackedVectorObservations: 3
m_ActionSpec:
m_NumContinuousActions: 3
BranchSizes:
VectorActionSize: 03000000
VectorActionDescriptions: []
VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114878620968301562
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 0
MaxStep: 0
target: {fileID: 1160631129428284}
bodyObject: {fileID: 1680588139522898}
strength: 500

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1680588139522898
GameObject:
m_ObjectHideFlags: 0

6
Project/Assets/ML-Agents/Examples/Crawler/Prefabs/CrawlerBase.prefab


m_BrainParameters:
VectorObservationSize: 32
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 20
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName:
TeamId: 0

10
Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab


VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 3210b528a2bc44a86bd6bd1d571070f8, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: FoodCollector
TeamId: 0

VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 3210b528a2bc44a86bd6bd1d571070f8, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: FoodCollector
TeamId: 0

VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 3210b528a2bc44a86bd6bd1d571070f8, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: FoodCollector
TeamId: 0

VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 3210b528a2bc44a86bd6bd1d571070f8, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: FoodCollector
TeamId: 0

VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 3210b528a2bc44a86bd6bd1d571070f8, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: FoodCollector
TeamId: 0

10
Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/GridFoodCollectorArea.prefab


VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 75910f45f20be49b18e2b95879a217b2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 75910f45f20be49b18e2b95879a217b2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 75910f45f20be49b18e2b95879a217b2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 75910f45f20be49b18e2b95879a217b2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 75910f45f20be49b18e2b95879a217b2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridFoodCollector
TeamId: 0

8
Project/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab


VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: ec4b31b5d66ca4e51ae3ac41945facb2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: VisualFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: ec4b31b5d66ca4e51ae3ac41945facb2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: VisualFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: ec4b31b5d66ca4e51ae3ac41945facb2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: VisualFoodCollector
TeamId: 0

VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: ec4b31b5d66ca4e51ae3ac41945facb2, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: VisualFoodCollector
TeamId: 0

23
Project/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 0
numStackedVectorObservations: 1
vectorActionSize: 05000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
VectorActionSize: 05000000
VectorActionDescriptions: []
VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114650561397225712
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 100
MaxStep: 100
area: {fileID: 114704252266302846}
timeBetweenDecisionsAtInference: 0.15
renderCamera: {fileID: 0}

m_Width: 84
m_Height: 64
m_Grayscale: 0
m_ObservationStacks: 1
m_Compression: 1
--- !u!114 &7980686505185502968
MonoBehaviour:

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1625008366184734
GameObject:
m_ObjectHideFlags: 0

trueAgent: {fileID: 1488387672112076}
goalPref: {fileID: 1508142483324970, guid: 1ec4e4e96e7514d45b7ebc3ba5a9a481, type: 3}
pitPref: {fileID: 1811317785436014, guid: d13ee2db77b3a4dcc8664d2fe2a0f219, type: 3}
numberOfObstacles: 1
--- !u!1 &1656910849934022
GameObject:
m_ObjectHideFlags: 0

8
Project/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.44971228, g: 0.49977815, b: 0.57563734, a: 1}
m_IndirectSpecularColor: {r: 0.4497121, g: 0.49977785, b: 0.57563704, a: 1}
m_UseRadianceAmbientProbe: 0
--- !u!157 &3
LightmapSettings:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: GridWorld
TeamId: 0

6
Project/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab


m_BrainParameters:
VectorObservationSize: 1
NumStackedVectorObservations: 3
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Hallway
TeamId: 0

43
Project/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 0
numStackedVectorObservations: 1
vectorActionSize: 05000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
VectorActionSize: 05000000
VectorActionDescriptions: []
VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_TeamID: 0
m_useChildSensors: 1
TeamId: 0
m_UseChildSensors: 1
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114451776683649118
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: b446afae240924105b36d07e8d17a608, type: 3}
m_Name:
m_EditorClassIdentifier:
maxStep: 3000
agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
MaxStep: 3000
ground: {fileID: 1625056884785366}
area: {fileID: 1689874756253538}
symbolOGoal: {fileID: 1800868804754718}

m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3}
m_Name:
m_EditorClassIdentifier:
camera: {fileID: 20961984019151212}
sensorName: CameraSensor
width: 84
height: 84
grayscale: 0
compression: 1
m_Camera: {fileID: 20961984019151212}
m_SensorName: CameraSensor
m_Width: 84
m_Height: 84
m_Grayscale: 0
m_ObservationStacks: 1
m_Compression: 1
--- !u!114 &640264344416331590
MonoBehaviour:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
DecisionPeriod: 6
RepeatAction: 1
offsetStep: 0
TakeActionsBetweenDecisions: 1
--- !u!1 &1377584197416466
GameObject:
m_ObjectHideFlags: 0

6
Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3Heuristic.prefab


VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: c34da50737a3c4a50918002b20b2b927, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Match3SmartHeuristic
TeamId: 0

Columns: 8
NumCellTypes: 6
NumSpecialTypes: 2
RandomSeed: -1
RandomSeed: -1
--- !u!114 &3508723250470608014
MonoBehaviour:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
ActuatorName: Match3 Actuator
RandomSeed: -1
HeuristicQuality: 0
--- !u!1 &3508723250774301855
GameObject:
m_ObjectHideFlags: 0

6
Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3VectorObs.prefab


VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 9e89b8e81974148d3b7213530d00589d, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Match3VectorObs
TeamId: 0

Columns: 8
NumCellTypes: 6
NumSpecialTypes: 2
RandomSeed: -1
RandomSeed: -1
--- !u!114 &2118285884327540680
MonoBehaviour:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
ActuatorName: Match3 Actuator
RandomSeed: -1
HeuristicQuality: 0

6
Project/Assets/ML-Agents/Examples/Match3/Prefabs/Match3VisualObs.prefab


VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_Model: {fileID: 11400000, guid: 48d14da88fea74d0693c691c6e3f2e34, type: 3}
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: Match3VisualObs
TeamId: 0

Columns: 8
NumCellTypes: 6
NumSpecialTypes: 2
RandomSeed: -1
RandomSeed: -1
--- !u!114 &3019509692332007783
MonoBehaviour:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
ActuatorName: Match3 Actuator
RandomSeed: -1
HeuristicQuality: 0

22
Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 0
numStackedVectorObservations: 2
vectorActionSize: 07000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
VectorObservationSize: 0
NumStackedVectorObservations: 2
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 07000000
VectorActionSize: 07000000
VectorActionDescriptions: []
VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114505490781873732
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 5000
MaxStep: 5000
ground: {fileID: 1500989011945850}
area: {fileID: 1125452240183160}
areaBounds:

m_EditorClassIdentifier:
DecisionPeriod: 5
TakeActionsBetweenDecisions: 1
offsetStep: 0
--- !u!114 &4081319787948195948
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1500989011945850
GameObject:
m_ObjectHideFlags: 0

8
Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab


m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 07000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114812843792483960
MonoBehaviour:

m_Width: 84
m_Height: 84
m_Grayscale: 0
m_ObservationStacks: 1
m_Compression: 1
--- !u!114 &9049837659352187721
MonoBehaviour:

22
Project/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 4
numStackedVectorObservations: 1
vectorActionSize: 05000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
VectorObservationSize: 4
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
VectorActionSize: 05000000
VectorActionDescriptions: []
VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114937736047215868
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 5000
MaxStep: 5000
area: {fileID: 1464170487903594}
areaSwitch: {fileID: 1432086782037750}
useVectorObs: 1

m_EditorClassIdentifier:
DecisionPeriod: 5
TakeActionsBetweenDecisions: 1
offsetStep: 0
--- !u!114 &5712624269609438939
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1148882946833254
GameObject:
m_ObjectHideFlags: 0

43
Project/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 0
numStackedVectorObservations: 1
vectorActionSize: 05000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 05000000
VectorActionSize: 05000000
VectorActionDescriptions: []
VectorActionSpaceType: 0
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_TeamID: 0
m_useChildSensors: 1
TeamId: 0
m_UseChildSensors: 1
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114741503533626942
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: b8db44472779248d3be46895c4d562d5, type: 3}
m_Name:
m_EditorClassIdentifier:
maxStep: 5000
agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
MaxStep: 5000
area: {fileID: 1055559745433172}
areaSwitch: {fileID: 1212218760704844}
useVectorObs: 0

m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3}
m_Name:
m_EditorClassIdentifier:
camera: {fileID: 20712684238256298}
sensorName: CameraSensor
width: 84
height: 84
grayscale: 0
compression: 1
m_Camera: {fileID: 20712684238256298}
m_SensorName: CameraSensor
m_Width: 84
m_Height: 84
m_Grayscale: 0
m_ObservationStacks: 1
m_Compression: 1
--- !u!114 &9216598927300453297
MonoBehaviour:
m_ObjectHideFlags: 0

m_Name:
m_EditorClassIdentifier:
DecisionPeriod: 5
RepeatAction: 1
offsetStep: 0
TakeActionsBetweenDecisions: 1
--- !u!1 &1747856067778386
GameObject:
m_ObjectHideFlags: 0

22
Project/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab


m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 33
numStackedVectorObservations: 1
vectorActionSize: 04000000
vectorActionDescriptions: []
vectorActionSpaceType: 1
VectorObservationSize: 33
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 4
BranchSizes:
VectorActionSize: 04000000
VectorActionDescriptions: []
VectorActionSpaceType: 1
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114955921823023820
MonoBehaviour:
m_ObjectHideFlags: 0

agentParameters:
maxStep: 0
hasUpgradedFromAgentParameters: 1
maxStep: 4000
MaxStep: 4000
pendulumA: {fileID: 1644872085946016}
pendulumB: {fileID: 1053261483945176}
hand: {fileID: 1654288206095398}

m_EditorClassIdentifier:
DecisionPeriod: 4
TakeActionsBetweenDecisions: 1
offsetStep: 0
--- !u!114 &7840105453417110232
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1644872085946016
GameObject:
m_ObjectHideFlags: 0

29
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ModelOverrider.cs


const string k_CommandLineModelOverrideDirectoryFlag = "--mlagents-override-model-directory";
const string k_CommandLineModelOverrideExtensionFlag = "--mlagents-override-model-extension";
const string k_CommandLineQuitAfterEpisodesFlag = "--mlagents-quit-after-episodes";
const string k_CommandLineQuitAfterSeconds = "--mlagents-quit-after-seconds";
const string k_CommandLineQuitOnLoadFailure = "--mlagents-quit-on-load-failure";
// The attached Agent

// Max episodes to run. Only used if > 0
// Will default to 1 if override models are specified, otherwise 0.
int m_MaxEpisodes;
// Deadline - exit if the time exceeds this
DateTime m_Deadline = DateTime.MaxValue;
int m_NumSteps;
int m_PreviousNumSteps;

void GetAssetPathFromCommandLine()
{
var maxEpisodes = 0;
var timeoutSeconds = 0;
string[] commandLineArgsOverride = null;
if (!string.IsNullOrEmpty(debugCommandLineOverride) && Application.isEditor)
{

{
Int32.TryParse(args[i + 1], out maxEpisodes);
}
else if (args[i] == k_CommandLineQuitAfterSeconds && i < args.Length - 1)
{
Int32.TryParse(args[i + 1], out timeoutSeconds);
}
else if (args[i] == k_CommandLineQuitOnLoadFailure)
{
m_QuitOnLoadFailure = true;

m_MaxEpisodes = maxEpisodes > 0 ? maxEpisodes : 1;
Debug.Log($"setting m_MaxEpisodes to {maxEpisodes}");
}
if (timeoutSeconds > 0)
{
m_Deadline = DateTime.Now + TimeSpan.FromSeconds(timeoutSeconds);
Debug.Log($"setting deadline to {timeoutSeconds} from now.");
}
}
void OnEnable()

EditorApplication.isPlaying = false;
#endif
}
else if (DateTime.Now >= m_Deadline)
{
Debug.Log(
$"Deadline exceeded. " +
$"{TotalCompletedEpisodes}/{m_MaxEpisodes} episodes and " +
$"{TotalNumSteps}/{m_MaxEpisodes * m_Agent.MaxStep} steps completed. Exiting.");
Application.Quit(0);
#if UNITY_EDITOR
EditorApplication.isPlaying = false;
#endif
}
m_NumSteps++;
}

28
Project/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab


m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114492261207303438
MonoBehaviour:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114850431417842684
MonoBehaviour:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &5320024511406682322
MonoBehaviour:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &5379409612883756837
MonoBehaviour:

21
Project/Assets/ML-Agents/Examples/Soccer/Prefabs/StrikersVsGoalieField.prefab


m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114492261207303438
MonoBehaviour:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114850431417842684
MonoBehaviour:

m_BrainParameters:
VectorObservationSize: 0
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 030000000300000003000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &5379409612883756837
MonoBehaviour:

18
Project/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab


m_BrainParameters:
VectorObservationSize: 9
NumStackedVectorObservations: 3
m_ActionSpec:
m_NumContinuousActions: 3
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114915946461826994
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1194790474478638
GameObject:
m_ObjectHideFlags: 0

m_BrainParameters:
VectorObservationSize: 9
NumStackedVectorObservations: 3
m_ActionSpec:
m_NumContinuousActions: 3
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114800310164848628
MonoBehaviour:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 3a6da8f78a394c6ab027688eab81e04d, type: 3}
m_Name:
m_EditorClassIdentifier:
debugCommandLineOverride:
--- !u!1 &1969551055586186
GameObject:
m_ObjectHideFlags: 0

12
Project/Assets/ML-Agents/Examples/Walker/Prefabs/Ragdoll/WalkerRagdollBase.prefab


m_BrainParameters:
VectorObservationSize: 243
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 39
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &7408209125961349353
MonoBehaviour:

maxStep: 0
hasUpgradedFromAgentParameters: 1
MaxStep: 5000
targetWalkingSpeed: 10
m_TargetWalkingSpeed: 10
walkDirectionMethod: 0
worldDirToWalk: {x: 1, y: 0, z: 0}
worldPosToWalkTo: {x: 0, y: 0, z: 0}
target: {fileID: 0}
hips: {fileID: 895268871264836332}
chest: {fileID: 7933235354845945071}

5
Project/Assets/ML-Agents/Examples/Walker/Prefabs/Ragdoll/WalkerRagdollDySingleSpeedVariant.prefab


value:
objectReference: {fileID: 11400000, guid: 47e7c480450ec4dcd9e4a04124e14ed4,
type: 3}
- target: {fileID: 895268871377934297, guid: 765582efd9dda46ed98564603316353f,
type: 3}
propertyPath: m_InferenceDevice
value: 2
objectReference: {fileID: 0}
- target: {fileID: 895268871377934298, guid: 765582efd9dda46ed98564603316353f,
type: 3}
propertyPath: m_LocalPosition.x

7
Project/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab


m_BrainParameters:
VectorObservationSize: 4
NumStackedVectorObservations: 6
m_ActionSpec:
m_NumContinuousActions: 0
BranchSizes: 03000000030000000300000002000000
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_UseChildActuators: 1
m_ObservableAttributeHandling: 0
--- !u!114 &114925928594762506
MonoBehaviour:

6
Project/Assets/ML-Agents/Examples/Worm/Prefabs/WormBasePrefab.prefab


m_BrainParameters:
VectorObservationSize: 64
NumStackedVectorObservations: 1
m_ActionSpec:
m_NumContinuousActions: 9
BranchSizes:
hasUpgradedBrainParametersWithActionSpec: 1
m_InferenceDevice: 0
m_InferenceDevice: 2
m_BehaviorType: 0
m_BehaviorName: WormDynamic
TeamId: 0

27
com.unity.ml-agents/CHANGELOG.md


and this project adheres to
[Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Major Changes
#### com.unity.ml-agents (C#)
#### ml-agents / ml-agents-envs / gym-unity (Python)
## [Unreleased]
### Minor Changes
#### com.unity.ml-agents / com.unity.ml-agents.extensions (C#)
#### ml-agents / ml-agents-envs / gym-unity (Python)
### Bug Fixes
#### com.unity.ml-agents (C#)
#### ml-agents / ml-agents-envs / gym-unity (Python)
## [1.8.0-preview] - 2021-02-17
- A plugin system for `mlagents-learn` has been added. You can now define custom
`StatsWriter` implementations and register them to be called during training.
More types of plugins will be added in the future. (#4788)
### Minor Changes
#### com.unity.ml-agents / com.unity.ml-agents.extensions (C#)

- The Barracuda dependency was upgraded to 1.3.0. (#4898)
- Added `ActuatorComponent.CreateActuators`, and deprecate `ActuatorComponent.CreateActuator`. The
default implementation will wrap `ActuatorComponent.CreateActuator` in an array and return that. (#4899)
- `InferenceDevice.Burst` was added, indicating that Agent's model will be run using Barracuda's Burst backend.
This is the default for new Agents, but existing ones that use `InferenceDevice.CPU` should update to
`InferenceDevice.Burst`. (#4925)
#### ml-agents / ml-agents-envs / gym-unity (Python)
- Tensorboard now logs the Environment Reward as both a scalar and a histogram. (#4878)

- The `mlagents_env` API has changed, `BehaviorSpec` now has a `observation_specs` property containing a list of `ObservationSpec`. For more information on `ObservationSpec` see [here](https://github.com/Unity-Technologies/ml-agents/blob/master/docs/Python-API.md#behaviorspec). (#4763, #4825)
### Bug Fixes
#### com.unity.ml-agents (C#)

- Removed unnecessary memory allocations in `SideChannelManager.GetSideChannelMessage()` (#4886)
- Removed several memory allocations that happened during inference. On a test scene, this
reduced the amount of memory allocated by approximately 25%. (#4887)
- Removed several memory allocations that happened during inference with discrete actions. (#4922)
- Properly catch permission errors when writing timer files. (#4921)
- Unexpected exceptions during training initialization and shutdown are now logged. If you see
"noisy" logs, please let us know! (#4930, #4935)
#### ml-agents / ml-agents-envs / gym-unity (Python)
- Fixed a bug that would cause an exception when `RunOptions` was deserialized via `pickle`. (#4842)

while waiting for a connection, and raises a better error message if it crashes. (#4880)
- Passing a `-logfile` option in the `--env-args` option to `mlagents-learn` is
no longer overwritten. (#4880)
- The `load_weights` function was being called unnecessarily often in the Ghost Trainer leading to training slowdowns. (#4934)
## [1.7.2-preview] - 2020-12-22

49
com.unity.ml-agents/Runtime/Academy.cs


{
// We try to exchange the first message with Python. If this fails, it means
// no Python Process is ready to train the environment. In this case, the
//environment must use Inference.
// environment must use Inference.
bool initSuccessful = false;
var communicatorInitParams = new CommunicatorInitParameters
{
unityCommunicationVersion = k_ApiVersion,
unityPackageVersion = k_PackageVersion,
name = "AcademySingleton",
CSharpCapabilities = new UnityRLCapabilities()
};
var unityRlInitParameters = Communicator.Initialize(
new CommunicatorInitParameters
{
unityCommunicationVersion = k_ApiVersion,
unityPackageVersion = k_PackageVersion,
name = "AcademySingleton",
CSharpCapabilities = new UnityRLCapabilities()
});
UnityEngine.Random.InitState(unityRlInitParameters.seed);
// We might have inference-only Agents, so set the seed for them too.
m_InferenceSeed = unityRlInitParameters.seed;
TrainerCapabilities = unityRlInitParameters.TrainerCapabilities;
TrainerCapabilities.WarnOnPythonMissingBaseRLCapabilities();
initSuccessful = Communicator.Initialize(
communicatorInitParams,
out var unityRlInitParameters
);
if (initSuccessful)
{
UnityEngine.Random.InitState(unityRlInitParameters.seed);
// We might have inference-only Agents, so set the seed for them too.
m_InferenceSeed = unityRlInitParameters.seed;
TrainerCapabilities = unityRlInitParameters.TrainerCapabilities;
TrainerCapabilities.WarnOnPythonMissingBaseRLCapabilities();
}
else
{
Debug.Log($"Couldn't connect to trainer on port {port} using API version {k_ApiVersion}. Will perform inference instead.");
Communicator = null;
}
catch
catch (Exception ex)
Debug.Log($"" +
$"Couldn't connect to trainer on port {port} using API version {k_ApiVersion}. " +
"Will perform inference instead."
);
Debug.Log($"Unexpected exception when trying to initialize communication: {ex}\nWill perform inference instead.");
if (Communicator != null)
{
Communicator.QuitCommandReceived += OnQuitCommandReceived;

5
com.unity.ml-agents/Runtime/Communicator/ICommunicator.cs


/// Sends the academy parameters through the Communicator.
/// Is used by the academy to send the AcademyParameters to the communicator.
/// </summary>
/// <returns>The External Initialization Parameters received.</returns>
/// <returns>Whether the connection was successful.</returns>
UnityRLInitParameters Initialize(CommunicatorInitParameters initParameters);
/// <param name="initParametersOut">The External Initialization Parameters received</param>
bool Initialize(CommunicatorInitParameters initParameters, out UnityRLInitParameters initParametersOut);
/// <summary>
/// Registers a new Brain to the Communicator.

147
com.unity.ml-agents/Runtime/Communicator/RpcCommunicator.cs


internal static bool CheckCommunicationVersionsAreCompatible(
string unityCommunicationVersion,
string pythonApiVersion,
string pythonLibraryVersion)
string pythonApiVersion
)
{
var unityVersion = new Version(unityCommunicationVersion);
var pythonVersion = new Version(pythonApiVersion);

/// Sends the initialization parameters through the Communicator.
/// Is used by the academy to send initialization parameters to the communicator.
/// </summary>
/// <returns>The External Initialization Parameters received.</returns>
/// <returns>Whether the connection was successful.</returns>
public UnityRLInitParameters Initialize(CommunicatorInitParameters initParameters)
/// <param name="initParametersOut">The External Initialization Parameters received.</param>
public bool Initialize(CommunicatorInitParameters initParameters, out UnityRLInitParameters initParametersOut)
{
var academyParameters = new UnityRLInitializationOutputProto
{

{
RlInitializationOutput = academyParameters
},
out input);
var pythonPackageVersion = initializationInput.RlInitializationInput.PackageVersion;
var pythonCommunicationVersion = initializationInput.RlInitializationInput.CommunicationVersion;
var unityCommunicationVersion = initParameters.unityCommunicationVersion;
TrainingAnalytics.SetTrainerInformation(pythonPackageVersion, pythonCommunicationVersion);
out input
);
}
catch (Exception ex)
{
if (ex is RpcException rpcException)
{
var communicationIsCompatible = CheckCommunicationVersionsAreCompatible(unityCommunicationVersion,
pythonCommunicationVersion,
pythonPackageVersion);
// Initialization succeeded part-way. The most likely cause is a mismatch between the communicator
// API strings, so log an explicit warning if that's the case.
if (initializationInput != null && input == null)
{
if (!communicationIsCompatible)
switch (rpcException.Status.StatusCode)
Debug.LogWarningFormat(
"Communication protocol between python ({0}) and Unity ({1}) have different " +
"versions which make them incompatible. Python library version: {2}.",
pythonCommunicationVersion, initParameters.unityCommunicationVersion,
pythonPackageVersion
);
case StatusCode.Unavailable:
// This is the common case where there's no trainer to connect to.
break;
case StatusCode.DeadlineExceeded:
// We don't currently set a deadline for connection, but likely will in the future.
break;
default:
Debug.Log($"Unexpected gRPC exception when trying to initialize communication: {rpcException}");
break;
else
{
Debug.LogWarningFormat(
"Unknown communication error between Python. Python communication protocol: {0}, " +
"Python library version: {1}.",
pythonCommunicationVersion,
pythonPackageVersion
);
}
throw new UnityAgentsException("ICommunicator.Initialize() failed.");
else
{
Debug.Log($"Unexpected exception when trying to initialize communication: {ex}");
}
initParametersOut = new UnityRLInitParameters();
return false;
catch
var pythonPackageVersion = initializationInput.RlInitializationInput.PackageVersion;
var pythonCommunicationVersion = initializationInput.RlInitializationInput.CommunicationVersion;
TrainingAnalytics.SetTrainerInformation(pythonPackageVersion, pythonCommunicationVersion);
var communicationIsCompatible = CheckCommunicationVersionsAreCompatible(
initParameters.unityCommunicationVersion,
pythonCommunicationVersion
);
// Initialization succeeded part-way. The most likely cause is a mismatch between the communicator
// API strings, so log an explicit warning if that's the case.
if (initializationInput != null && input == null)
var exceptionMessage = "The Communicator was unable to connect. Please make sure the External " +
"process is ready to accept communication with Unity.";
// Check for common error condition and add details to the exception message.
var httpProxy = Environment.GetEnvironmentVariable("HTTP_PROXY");
var httpsProxy = Environment.GetEnvironmentVariable("HTTPS_PROXY");
if (httpProxy != null || httpsProxy != null)
if (!communicationIsCompatible)
{
Debug.LogWarningFormat(
"Communication protocol between python ({0}) and Unity ({1}) have different " +
"versions which make them incompatible. Python library version: {2}.",
pythonCommunicationVersion, initParameters.unityCommunicationVersion,
pythonPackageVersion
);
}
else
exceptionMessage += " Try removing HTTP_PROXY and HTTPS_PROXY from the" +
"environment variables and try again.";
Debug.LogWarningFormat(
"Unknown communication error between Python. Python communication protocol: {0}, " +
"Python library version: {1}.",
pythonCommunicationVersion,
pythonPackageVersion
);
throw new UnityAgentsException(exceptionMessage);
initParametersOut = new UnityRLInitParameters();
return false;
return initializationInput.RlInitializationInput.ToUnityRLInitParameters();
initParametersOut = initializationInput.RlInitializationInput.ToUnityRLInitParameters();
return true;
}
/// <summary>

SendCommandEvent(rlInput.Command);
}
UnityInputProto Initialize(UnityOutputProto unityOutput,
out UnityInputProto unityInput)
UnityInputProto Initialize(UnityOutputProto unityOutput, out UnityInputProto unityInput)
{
#if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
m_IsOpen = true;

}
return result.UnityInput;
#else
throw new UnityAgentsException(
"You cannot perform training on this platform.");
throw new UnityAgentsException("You cannot perform training on this platform.");
#endif
}

{
return null;
}
try
{
var message = m_Client.Exchange(WrapMessage(unityOutput, 200));

QuitCommandReceived?.Invoke();
return message.UnityInput;
}
catch
catch (Exception ex)
if (ex is RpcException rpcException)
{
// Log more verbose errors if they're something the user can possibly do something about.
switch (rpcException.Status.StatusCode)
{
case StatusCode.Unavailable:
// This can happen when python disconnects. Ignore it to avoid noisy logs.
break;
case StatusCode.ResourceExhausted:
// This happens is the message body is too large. There's no way to
// gracefully handle this, but at least we can show the message and the
// user can try to reduce the number of agents or observation sizes.
Debug.LogError($"GRPC Exception: {rpcException.Message}. Disconnecting from trainer.");
break;
default:
// Other unknown errors. Log at INFO level.
Debug.Log($"GRPC Exception: {rpcException.Message}. Disconnecting from trainer.");
break;
}
}
else
{
// Fall-through for other error types
Debug.LogError($"Communication Exception: {ex.Message}. Disconnecting from trainer.");
}
m_IsOpen = false;
QuitCommandReceived?.Invoke();
return null;

129
com.unity.ml-agents/Runtime/Inference/ApplierImpl.cs


using System;
using System.Collections.Generic;
using System.Linq;
using Unity.MLAgents.Inference.Utils;

{
readonly int[] m_ActionSize;
readonly Multinomial m_Multinomial;
readonly ITensorAllocator m_Allocator;
readonly int[] m_StartActionIndices;
readonly float[] m_CdfBuffer;
m_Allocator = allocator;
m_StartActionIndices = Utilities.CumSum(m_ActionSize);
// Scratch space for computing the cumulative distribution function.
// In order to reuse it, make it the size of the largest branch.
var largestBranch = Mathf.Max(m_ActionSize);
m_CdfBuffer = new float[largestBranch];
//var tensorDataProbabilities = tensorProxy.Data as float[,];
var idActionPairList = actionIds as List<int> ?? actionIds.ToList();
var batchSize = idActionPairList.Count;
var actionValues = new float[batchSize, m_ActionSize.Length];
var startActionIndices = Utilities.CumSum(m_ActionSize);
for (var actionIndex = 0; actionIndex < m_ActionSize.Length; actionIndex++)
{
var nBranchAction = m_ActionSize[actionIndex];
var actionProbs = new TensorProxy()
{
valueType = TensorProxy.TensorType.FloatingPoint,
shape = new long[] { batchSize, nBranchAction },
data = m_Allocator.Alloc(new TensorShape(batchSize, nBranchAction))
};
for (var batchIndex = 0; batchIndex < batchSize; batchIndex++)
{
for (var branchActionIndex = 0;
branchActionIndex < nBranchAction;
branchActionIndex++)
{
actionProbs.data[batchIndex, branchActionIndex] =
tensorProxy.data[batchIndex, startActionIndices[actionIndex] + branchActionIndex];
}
}
var outputTensor = new TensorProxy()
{
valueType = TensorProxy.TensorType.FloatingPoint,
shape = new long[] { batchSize, 1 },
data = m_Allocator.Alloc(new TensorShape(batchSize, 1))
};
Eval(actionProbs, outputTensor, m_Multinomial);
for (var ii = 0; ii < batchSize; ii++)
{
actionValues[ii, actionIndex] = outputTensor.data[ii, 0];
}
actionProbs.data.Dispose();
outputTensor.data.Dispose();
}
var agentIndex = 0;
for (var i = 0; i < actionIds.Count; i++)
{

var discreteBuffer = actionBuffer.DiscreteActions;
for (var j = 0; j < m_ActionSize.Length; j++)
{
discreteBuffer[j] = (int)actionValues[agentIndex, j];
ComputeCdf(tensorProxy, agentIndex, m_StartActionIndices[j], m_ActionSize[j]);
discreteBuffer[j] = m_Multinomial.Sample(m_CdfBuffer, m_ActionSize[j]);
}
}
agentIndex++;

/// <summary>
/// Draw samples from a multinomial distribution based on log-probabilities specified
/// in tensor src. The samples will be saved in the dst tensor.
/// Compute the cumulative distribution function for a given agent's action
/// given the log-probabilities.
/// The results are stored in m_CdfBuffer, which is the size of the largest action's number of branches.
/// <param name="src">2-D tensor with shape batch_size x num_classes</param>
/// <param name="dst">Allocated tensor with size batch_size x num_samples</param>
/// <param name="multinomial">Multinomial object used to sample values</param>
/// <exception cref="NotImplementedException">
/// Multinomial doesn't support integer tensors
/// </exception>
/// <exception cref="ArgumentException">Issue with tensor shape or type</exception>
/// <exception cref="ArgumentNullException">
/// At least one of the tensors is not allocated
/// </exception>
public static void Eval(TensorProxy src, TensorProxy dst, Multinomial multinomial)
/// <param name="logProbs"></param>
/// <param name="batch">Index of the agent being considered</param>
/// <param name="channelOffset">Offset into the tensor's channel.</param>
/// <param name="branchSize"></param>
internal void ComputeCdf(TensorProxy logProbs, int batch, int channelOffset, int branchSize)
if (src.DataType != typeof(float))
{
throw new NotImplementedException("Only float tensors are currently supported");
}
if (src.valueType != dst.valueType)
{
throw new ArgumentException(
"Source and destination tensors have different types!");
}
if (src.data == null || dst.data == null)
{
throw new ArgumentNullException();
}
if (src.data.batch != dst.data.batch)
// Find the class maximum
var maxProb = float.NegativeInfinity;
for (var cls = 0; cls < branchSize; ++cls)
throw new ArgumentException("Batch size for input and output data is different!");
maxProb = Mathf.Max(logProbs.data[batch, cls + channelOffset], maxProb);
var cdf = new float[src.data.channels];
for (var batch = 0; batch < src.data.batch; ++batch)
// Sum the log probabilities and compute CDF
var sumProb = 0.0f;
for (var cls = 0; cls < branchSize; ++cls)
// Find the class maximum
var maxProb = float.NegativeInfinity;
for (var cls = 0; cls < src.data.channels; ++cls)
{
maxProb = Mathf.Max(src.data[batch, cls], maxProb);
}
// Sum the log probabilities and compute CDF
var sumProb = 0.0f;
for (var cls = 0; cls < src.data.channels; ++cls)
{
sumProb += Mathf.Exp(src.data[batch, cls] - maxProb);
cdf[cls] = sumProb;
}
// Generate the samples
for (var sample = 0; sample < dst.data.channels; ++sample)
{
dst.data[batch, sample] = multinomial.Sample(cdf);
}
sumProb += Mathf.Exp(logProbs.data[batch, cls + channelOffset] - maxProb);
m_CdfBuffer[cls] = sumProb;
}
}
}

22
com.unity.ml-agents/Runtime/Inference/ModelRunner.cs


using System;
using System.Collections.Generic;
using Unity.Barracuda;
using UnityEngine.Profiling;

public ModelRunner(
NNModel model,
ActionSpec actionSpec,
InferenceDevice inferenceDevice = InferenceDevice.CPU,
InferenceDevice inferenceDevice,
int seed = 0)
{
Model barracudaModel;

D.logEnabled = m_Verbose;
barracudaModel = ModelLoader.Load(model);
var executionDevice = inferenceDevice == InferenceDevice.GPU
? WorkerFactory.Type.ComputePrecompiled
: WorkerFactory.Type.CSharp;
WorkerFactory.Type executionDevice;
switch (inferenceDevice)
{
case InferenceDevice.CPU:
executionDevice = WorkerFactory.Type.CSharp;
break;
case InferenceDevice.GPU:
executionDevice = WorkerFactory.Type.ComputePrecompiled;
break;
case InferenceDevice.Burst:
executionDevice = WorkerFactory.Type.CSharpBurst;
break;
default:
executionDevice = WorkerFactory.Type.CSharpBurst;
break;
}
m_Engine = WorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose);
}
else

17
com.unity.ml-agents/Runtime/Inference/Utils/Multinomial.cs


/// to be monotonic (always increasing). If the CMF is scaled, then the last entry in
/// the array will be 1.0.
/// </param>
/// <returns>A sampled index from the CMF ranging from 0 to cmf.Length-1.</returns>
public int Sample(float[] cmf)
/// <param name="branchSize">The number of possible branches, i.e. the effective size of the cmf array.</param>
/// <returns>A sampled index from the CMF ranging from 0 to branchSize-1.</returns>
public int Sample(float[] cmf, int branchSize)
var p = (float)m_Random.NextDouble() * cmf[cmf.Length - 1];
var p = (float)m_Random.NextDouble() * cmf[branchSize - 1];
var cls = 0;
while (cmf[cls] < p)
{

return cls;
}
/// <summary>
/// Samples from the Multinomial distribution defined by the provided cumulative
/// mass function.
/// </summary>
/// <returns>A sampled index from the CMF ranging from 0 to cmf.Length-1.</returns>
public int Sample(float[] cmf)
{
return Sample(cmf, cmf.Length);
}
}
}

12
com.unity.ml-agents/Runtime/Policies/BarracudaPolicy.cs


public enum InferenceDevice
{
/// <summary>
/// CPU inference
/// CPU inference. Corresponds to in WorkerFactory.Type.CSharp Barracuda.
/// Burst is recommended instead; this is kept for legacy compatibility.
/// GPU inference
/// GPU inference. Corresponds to WorkerFactory.Type.ComputePrecompiled in Barracuda.
GPU = 1
GPU = 1,
/// <summary>
/// CPU inference using Burst. Corresponds to WorkerFactory.Type.CSharpBurst in Barracuda.
/// </summary>
Burst = 2,
}
/// <summary>

2
com.unity.ml-agents/Runtime/Policies/BehaviorParameters.cs


}
[HideInInspector, SerializeField]
InferenceDevice m_InferenceDevice;
InferenceDevice m_InferenceDevice = InferenceDevice.Burst;
/// <summary>
/// How inference is performed for this Agent's model.

14
com.unity.ml-agents/Runtime/SideChannels/SideChannel.cs


using System.Collections.Generic;
using System;
using UnityEngine;
namespace Unity.MLAgents.SideChannels
{

internal void ProcessMessage(byte[] msg)
{
using (var incomingMsg = new IncomingMessage(msg))
try
{
using (var incomingMsg = new IncomingMessage(msg))
{
OnMessageReceived(incomingMsg);
}
}
catch (Exception ex)
OnMessageReceived(incomingMsg);
// Catch all errors in the sidechannel processing, so that a single
// bad SideChannel implementation doesn't take everything down with it.
Debug.LogError($"Error processing SideChannel message: {ex}.\nThe message will be skipped.");
}
}

4
com.unity.ml-agents/Runtime/Timer.cs


SaveJsonTimers(fs);
fs.Close();
}
catch (IOException)
catch (SystemException)
// It's possible we don't have write access to the directory.
// We may not have write access to the directory.
Debug.LogWarning($"Unable to save timers to file {filename}");
}
#endif

19
com.unity.ml-agents/Tests/Editor/Communicator/RpcCommunicatorTests.cs


{
var unityVerStr = "1.0.0";
var pythonVerStr = "1.0.0";
var pythonPackageVerStr = "0.16.0";
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
pythonVerStr,
pythonPackageVerStr));
pythonVerStr));
}
}

198
com.unity.ml-agents/Tests/Editor/DiscreteActionOutputApplierTest.cs


using System;
using System.Collections.Generic;
using UnityEngine;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Inference.Utils;
namespace Unity.MLAgents.Tests
{

public void TestEvalP()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
data = new Tensor(1, 3, new[] { 0.1f, 0.2f, 0.7f }),
valueType = TensorProxy.TensorType.FloatingPoint
};
var dst = new TensorProxy
{
data = new Tensor(1, 3),
valueType = TensorProxy.TensorType.FloatingPoint
};
DiscreteActionOutputApplier.Eval(src, dst, m);
float[] reference = { 2, 2, 1 };
for (var i = 0; i < dst.data.length; i++)
{
Assert.AreEqual(reference[i], dst.data[i]);
++i;
}
}
[Test]
public void TestEvalLogits()
public void TestDiscreteApply()
var m = new Multinomial(2018);
var actionSpec = ActionSpec.MakeDiscrete(3, 2);
const float smallLogProb = -1000.0f;
const float largeLogProb = -1.0f;
var src = new TensorProxy
var logProbs = new TensorProxy
1,
3,
new[] { Mathf.Log(0.1f) - 50, Mathf.Log(0.2f) - 50, Mathf.Log(0.7f) - 50 }),
valueType = TensorProxy.TensorType.FloatingPoint
};
var dst = new TensorProxy
{
data = new Tensor(1, 3),
valueType = TensorProxy.TensorType.FloatingPoint
};
DiscreteActionOutputApplier.Eval(src, dst, m);
float[] reference = { 2, 2, 2 };
for (var i = 0; i < dst.data.length; i++)
{
Assert.AreEqual(reference[i], dst.data[i]);
++i;
}
}
[Test]
public void TestEvalBatching()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
data = new Tensor(2, 3, new[]
{
Mathf.Log(0.1f) - 50, Mathf.Log(0.2f) - 50, Mathf.Log(0.7f) - 50,
Mathf.Log(0.3f) - 25, Mathf.Log(0.4f) - 25, Mathf.Log(0.3f) - 25
}),
valueType = TensorProxy.TensorType.FloatingPoint
};
var dst = new TensorProxy
{
data = new Tensor(2, 3),
valueType = TensorProxy.TensorType.FloatingPoint
};
DiscreteActionOutputApplier.Eval(src, dst, m);
float[] reference = { 2, 2, 2, 0, 1, 0 };
for (var i = 0; i < dst.data.length; i++)
{
Assert.AreEqual(reference[i], dst.data[i]);
++i;
}
}
[Test]
public void TestSrcInt()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
valueType = TensorProxy.TensorType.Integer
};
Assert.Throws<NotImplementedException>(
() => DiscreteActionOutputApplier.Eval(src, null, m));
}
[Test]
public void TestDstInt()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
2,
5,
new[]
{
smallLogProb, smallLogProb, largeLogProb, // Agent 0, branch 0
smallLogProb, largeLogProb, // Agent 0, branch 1
largeLogProb, smallLogProb, smallLogProb, // Agent 1, branch 0
largeLogProb, smallLogProb, // Agent 1, branch 1
}),
var dst = new TensorProxy
{
valueType = TensorProxy.TensorType.Integer
};
var applier = new DiscreteActionOutputApplier(actionSpec, 2020, null);
var agentIds = new List<int> { 42, 1337 };
var actionBuffers = new Dictionary<int, ActionBuffers>();
actionBuffers[42] = new ActionBuffers(actionSpec);
actionBuffers[1337] = new ActionBuffers(actionSpec);
Assert.Throws<ArgumentException>(
() => DiscreteActionOutputApplier.Eval(src, dst, m));
}
[Test]
public void TestSrcDataNull()
{
var m = new Multinomial(2018);
applier.Apply(logProbs, agentIds, actionBuffers);
Assert.AreEqual(2, actionBuffers[42].DiscreteActions[0]);
Assert.AreEqual(1, actionBuffers[42].DiscreteActions[1]);
var src = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint
};
var dst = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint
};
Assert.Throws<ArgumentNullException>(
() => DiscreteActionOutputApplier.Eval(src, dst, m));
}
[Test]
public void TestDstDataNull()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint,
data = new Tensor(0, 1)
};
var dst = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint
};
Assert.Throws<ArgumentNullException>(
() => DiscreteActionOutputApplier.Eval(src, dst, m));
}
[Test]
public void TestUnequalBatchSize()
{
var m = new Multinomial(2018);
var src = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint,
data = new Tensor(1, 1)
};
var dst = new TensorProxy
{
valueType = TensorProxy.TensorType.FloatingPoint,
data = new Tensor(2, 1)
};
Assert.Throws<ArgumentException>(
() => DiscreteActionOutputApplier.Eval(src, dst, m));
Assert.AreEqual(0, actionBuffers[1337].DiscreteActions[0]);
Assert.AreEqual(0, actionBuffers[1337].DiscreteActions[1]);
}
}
}

13
com.unity.ml-agents/Tests/Editor/ModelRunnerTest.cs


[Test]
public void TestCreation()
{
var modelRunner = new ModelRunner(continuousONNXModel, GetContinuous2vis8vec2actionActionSpec());
var inferenceDevice = InferenceDevice.Burst;
var modelRunner = new ModelRunner(continuousONNXModel, GetContinuous2vis8vec2actionActionSpec(), inferenceDevice);
modelRunner = new ModelRunner(discreteONNXModel, GetDiscrete1vis0vec_2_3action_recurrModelActionSpec());
modelRunner = new ModelRunner(discreteONNXModel, GetDiscrete1vis0vec_2_3action_recurrModelActionSpec(), inferenceDevice);
modelRunner = new ModelRunner(hybridONNXModel, GetHybrid0vis53vec_3c_2dActionSpec());
modelRunner = new ModelRunner(hybridONNXModel, GetHybrid0vis53vec_3c_2dActionSpec(), inferenceDevice);
modelRunner = new ModelRunner(continuousNNModel, GetContinuous2vis8vec2actionActionSpec());
modelRunner = new ModelRunner(continuousNNModel, GetContinuous2vis8vec2actionActionSpec(), inferenceDevice);
modelRunner = new ModelRunner(discreteNNModel, GetDiscrete1vis0vec_2_3action_recurrModelActionSpec());
modelRunner = new ModelRunner(discreteNNModel, GetDiscrete1vis0vec_2_3action_recurrModelActionSpec(), inferenceDevice);
modelRunner.Dispose();
}

public void TestRunModel()
{
var actionSpec = GetDiscrete1vis0vec_2_3action_recurrModelActionSpec();
var modelRunner = new ModelRunner(discreteONNXModel, actionSpec);
var modelRunner = new ModelRunner(discreteONNXModel, actionSpec, InferenceDevice.Burst);
var info1 = new AgentInfo();
info1.episodeId = 1;
modelRunner.PutObservations(info1, new[] { sensor_21_20_3.CreateSensor() }.ToList());

2
docs/Installation.md


installing ML-Agents. Activate your virtual environment and run from the command line:
```sh
pip3 install torch==1.7.0 -f https://download.pytorch.org/whl/torch_stable.html
pip3 install torch~=1.7.1 -f https://download.pytorch.org/whl/torch_stable.html
```
Note that on Windows, you may also need Microsoft's

13
docs/Learning-Environment-Design-Agents.md


#### Observable Fields and Properties
Another approach is to define the relevant observations as fields or properties
on your Agent class, and annotate them with an `ObservableAttribute`. For
example, in the 3DBall example above, the rigid body velocity could be observed
example, in the Ball3DHardAgent, the difference between positions could be observed
public class Ball3DAgent : Agent {
public class Ball3DHardAgent : Agent {
[Observable]
public Vector3 RigidBodyVelocity
[Observable(numStackedObservations: 9)]
Vector3 PositionDelta
get { return m_BallRb.velocity; }
get
{
return ball.transform.position - gameObject.transform.position;
}
}
}
```

2
docs/Training-Configuration-File.md


- LSTM does not work well with continuous actions. Please use
discrete actions for better results.
- Since the memories must be sent back and forth between Python and Unity, using
too large `memory_size` will slow down training.
- Adding a recurrent layer increases the complexity of the neural network, it is
recommended to decrease `num_layers` when using recurrent.
- It is required that `memory_size` be divisible by 2.

392
ml-agents/mlagents/trainers/buffer.py


from collections import defaultdict
from collections.abc import MutableMapping
import enum
import itertools
from typing import BinaryIO, DefaultDict, List, Tuple, Union, Optional
from typing import List, BinaryIO
import itertools
from mlagents_envs.exception import UnityException

pass
class AgentBuffer(dict):
class BufferKey(enum.Enum):
ACTION_MASK = "action_mask"
CONTINUOUS_ACTION = "continuous_action"
CONTINUOUS_LOG_PROBS = "continuous_log_probs"
DISCRETE_ACTION = "discrete_action"
DISCRETE_LOG_PROBS = "discrete_log_probs"
DONE = "done"
ENVIRONMENT_REWARDS = "environment_rewards"
MASKS = "masks"
MEMORY = "memory"
PREV_ACTION = "prev_action"
ADVANTAGES = "advantages"
DISCOUNTED_RETURNS = "discounted_returns"
class ObservationKeyPrefix(enum.Enum):
OBSERVATION = "obs"
NEXT_OBSERVATION = "next_obs"
class RewardSignalKeyPrefix(enum.Enum):
# Reward signals
REWARDS = "rewards"
VALUE_ESTIMATES = "value_estimates"
RETURNS = "returns"
ADVANTAGE = "advantage"
AgentBufferKey = Union[
BufferKey, Tuple[ObservationKeyPrefix, int], Tuple[RewardSignalKeyPrefix, str]
]
class RewardSignalUtil:
@staticmethod
def rewards_key(name: str) -> AgentBufferKey:
return RewardSignalKeyPrefix.REWARDS, name
@staticmethod
def value_estimates_key(name: str) -> AgentBufferKey:
return RewardSignalKeyPrefix.RETURNS, name
@staticmethod
def returns_key(name: str) -> AgentBufferKey:
return RewardSignalKeyPrefix.RETURNS, name
@staticmethod
def advantage_key(name: str) -> AgentBufferKey:
return RewardSignalKeyPrefix.ADVANTAGE, name
class AgentBufferField(list):
AgentBuffer contains a dictionary of AgentBufferFields. Each agent has his own AgentBuffer.
The keys correspond to the name of the field. Example: state, action
AgentBufferField is a list of numpy arrays. When an agent collects a field, you can add it to its
AgentBufferField with the append method.
class AgentBufferField(list):
def __init__(self):
self.padding_value = 0
super().__init__()
def __str__(self):
return str(np.array(self).shape)
def append(self, element: np.ndarray, padding_value: float = 0.0) -> None:
AgentBufferField is a list of numpy arrays. When an agent collects a field, you can add it to its
AgentBufferField with the append method.
Adds an element to this list. Also lets you change the padding
type, so that it can be set on append (e.g. action_masks should
be padded with 1.)
:param element: The element to append to the list.
:param padding_value: The value used to pad when get_batch is called.
super().append(element)
self.padding_value = padding_value
def __init__(self):
self.padding_value = 0
super().__init__()
def extend(self, data: np.ndarray) -> None:
"""
Adds a list of np.arrays to the end of the list of np.arrays.
:param data: The np.array list to append.
"""
self += list(np.array(data, dtype=np.float32))
def __str__(self):
return str(np.array(self).shape)
def set(self, data):
"""
Sets the list of np.array to the input data
:param data: The np.array list to be set.
"""
# Make sure we convert incoming data to float32 if it's a float
dtype = None
if data is not None and len(data) and isinstance(data[0], float):
dtype = np.float32
self[:] = []
self[:] = list(np.array(data, dtype=dtype))
def append(self, element: np.ndarray, padding_value: float = 0.0) -> None:
"""
Adds an element to this list. Also lets you change the padding
type, so that it can be set on append (e.g. action_masks should
be padded with 1.)
:param element: The element to append to the list.
:param padding_value: The value used to pad when get_batch is called.
"""
super().append(element)
self.padding_value = padding_value
def get_batch(
self,
batch_size: int = None,
training_length: Optional[int] = 1,
sequential: bool = True,
) -> np.ndarray:
"""
Retrieve the last batch_size elements of length training_length
from the list of np.array
:param batch_size: The number of elements to retrieve. If None:
All elements will be retrieved.
:param training_length: The length of the sequence to be retrieved. If
None: only takes one element.
:param sequential: If true and training_length is not None: the elements
will not repeat in the sequence. [a,b,c,d,e] with training_length = 2 and
sequential=True gives [[0,a],[b,c],[d,e]]. If sequential=False gives
[[a,b],[b,c],[c,d],[d,e]]
"""
if training_length is None:
training_length = 1
if sequential:
# The sequences will not have overlapping elements (this involves padding)
leftover = len(self) % training_length
# leftover is the number of elements in the first sequence (this sequence might need 0 padding)
if batch_size is None:
# retrieve the maximum number of elements
batch_size = len(self) // training_length + 1 * (leftover != 0)
# The maximum number of sequences taken from a list of length len(self) without overlapping
# with padding is equal to batch_size
if batch_size > (len(self) // training_length + 1 * (leftover != 0)):
raise BufferException(
"The batch size and training length requested for get_batch where"
" too large given the current number of data points."
)
if batch_size * training_length > len(self):
padding = np.array(self[-1], dtype=np.float32) * self.padding_value
return np.array(
[padding] * (training_length - leftover) + self[:], dtype=np.float32
)
else:
return np.array(
self[len(self) - batch_size * training_length :], dtype=np.float32
)
else:
# The sequences will have overlapping elements
if batch_size is None:
# retrieve the maximum number of elements
batch_size = len(self) - training_length + 1
# The number of sequences of length training_length taken from a list of len(self) elements
# with overlapping is equal to batch_size
if (len(self) - training_length + 1) < batch_size:
raise BufferException(
"The batch size and training length requested for get_batch where"
" too large given the current number of data points."
)
tmp_list: List[np.ndarray] = []
for end in range(len(self) - batch_size + 1, len(self) + 1):
tmp_list += self[end - training_length : end]
return np.array(tmp_list, dtype=np.float32)
def extend(self, data: np.ndarray) -> None:
"""
Adds a list of np.arrays to the end of the list of np.arrays.
:param data: The np.array list to append.
"""
self += list(np.array(data, dtype=np.float32))
def reset_field(self) -> None:
"""
Resets the AgentBufferField
"""
self[:] = []
def set(self, data):
"""
Sets the list of np.array to the input data
:param data: The np.array list to be set.
"""
# Make sure we convert incoming data to float32 if it's a float
dtype = None
if data is not None and len(data) and isinstance(data[0], float):
dtype = np.float32
self[:] = []
self[:] = list(np.array(data, dtype=dtype))
def get_batch(
self,
batch_size: int = None,
training_length: int = 1,
sequential: bool = True,
) -> np.ndarray:
"""
Retrieve the last batch_size elements of length training_length
from the list of np.array
:param batch_size: The number of elements to retrieve. If None:
All elements will be retrieved.
:param training_length: The length of the sequence to be retrieved. If
None: only takes one element.
:param sequential: If true and training_length is not None: the elements
will not repeat in the sequence. [a,b,c,d,e] with training_length = 2 and
sequential=True gives [[0,a],[b,c],[d,e]]. If sequential=False gives
[[a,b],[b,c],[c,d],[d,e]]
"""
if sequential:
# The sequences will not have overlapping elements (this involves padding)
leftover = len(self) % training_length
# leftover is the number of elements in the first sequence (this sequence might need 0 padding)
if batch_size is None:
# retrieve the maximum number of elements
batch_size = len(self) // training_length + 1 * (leftover != 0)
# The maximum number of sequences taken from a list of length len(self) without overlapping
# with padding is equal to batch_size
if batch_size > (len(self) // training_length + 1 * (leftover != 0)):
raise BufferException(
"The batch size and training length requested for get_batch where"
" too large given the current number of data points."
)
if batch_size * training_length > len(self):
padding = np.array(self[-1], dtype=np.float32) * self.padding_value
return np.array(
[padding] * (training_length - leftover) + self[:],
dtype=np.float32,
)
else:
return np.array(
self[len(self) - batch_size * training_length :],
dtype=np.float32,
)
else:
# The sequences will have overlapping elements
if batch_size is None:
# retrieve the maximum number of elements
batch_size = len(self) - training_length + 1
# The number of sequences of length training_length taken from a list of len(self) elements
# with overlapping is equal to batch_size
if (len(self) - training_length + 1) < batch_size:
raise BufferException(
"The batch size and training length requested for get_batch where"
" too large given the current number of data points."
)
tmp_list: List[np.ndarray] = []
for end in range(len(self) - batch_size + 1, len(self) + 1):
tmp_list += self[end - training_length : end]
return np.array(tmp_list, dtype=np.float32)
class AgentBuffer(MutableMapping):
"""
AgentBuffer contains a dictionary of AgentBufferFields. Each agent has his own AgentBuffer.
The keys correspond to the name of the field. Example: state, action
"""
def reset_field(self) -> None:
"""
Resets the AgentBufferField
"""
self[:] = []
# Whether or not to validate the types of keys at runtime
# This should be off for training, but enabled for testing
CHECK_KEY_TYPES_AT_RUNTIME = False
super().__init__()
self._fields: DefaultDict[AgentBufferKey, AgentBufferField] = defaultdict(
AgentBufferField
)
return ", ".join(["'{}' : {}".format(k, str(self[k])) for k in self.keys()])
return ", ".join(
["'{}' : {}".format(k, str(self[k])) for k in self._fields.keys()]
)
for k in self.keys():
self[k].reset_field()
for f in self._fields.values():
f.reset_field()
def __getitem__(self, key):
if key not in self.keys():
self[key] = self.AgentBufferField()
return super().__getitem__(key)
@staticmethod
def _check_key(key):
if isinstance(key, BufferKey):
return
if isinstance(key, tuple):
key0, key1 = key
if isinstance(key0, ObservationKeyPrefix):
if isinstance(key1, int):
return
raise KeyError(f"{key} has type ({type(key0)}, {type(key1)})")
if isinstance(key0, RewardSignalKeyPrefix):
if isinstance(key1, str):
return
raise KeyError(f"{key} has type ({type(key0)}, {type(key1)})")
raise KeyError(f"{key} is a {type(key)}")
def check_length(self, key_list: List[str]) -> bool:
@staticmethod
def _encode_key(key: AgentBufferKey) -> str:
"""
Convert the key to a string representation so that it can be used for serialization.
"""
if isinstance(key, BufferKey):
return key.value
prefix, suffix = key
return f"{prefix.value}:{suffix}"
@staticmethod
def _decode_key(encoded_key: str) -> AgentBufferKey:
"""
Convert the string representation back to a key after serialization.
"""
# Simple case: convert the string directly to a BufferKey
try:
return BufferKey(encoded_key)
except ValueError:
pass
# Not a simple key, so split into two parts
prefix_str, _, suffix_str = encoded_key.partition(":")
# See if it's an ObservationKeyPrefix first
try:
return ObservationKeyPrefix(prefix_str), int(suffix_str)
except ValueError:
pass
# If not, it had better be a RewardSignalKeyPrefix
try:
return RewardSignalKeyPrefix(prefix_str), suffix_str
except ValueError:
raise ValueError(f"Unable to convert {encoded_key} to an AgentBufferKey")
def __getitem__(self, key: AgentBufferKey) -> AgentBufferField:
if self.CHECK_KEY_TYPES_AT_RUNTIME:
self._check_key(key)
return self._fields[key]
def __setitem__(self, key: AgentBufferKey, value: AgentBufferField) -> None:
if self.CHECK_KEY_TYPES_AT_RUNTIME:
self._check_key(key)
self._fields[key] = value
def __delitem__(self, key: AgentBufferKey) -> None:
if self.CHECK_KEY_TYPES_AT_RUNTIME:
self._check_key(key)
self._fields.__delitem__(key)
def __iter__(self):
return self._fields.__iter__()
def __len__(self) -> int:
return self._fields.__len__()
def __contains__(self, key):
if self.CHECK_KEY_TYPES_AT_RUNTIME:
self._check_key(key)
return self._fields.__contains__(key)
def check_length(self, key_list: List[AgentBufferKey]) -> bool:
"""
Some methods will require that some fields have the same length.
check_length will return true if the fields in key_list

if self.CHECK_KEY_TYPES_AT_RUNTIME:
for k in key_list:
self._check_key(k)
if key not in self.keys():
if key not in self._fields:
return False
if (length is not None) and (length != len(self[key])):
return False

def shuffle(self, sequence_length: int, key_list: List[str] = None) -> None:
def shuffle(
self, sequence_length: int, key_list: List[AgentBufferKey] = None
) -> None:
"""
Shuffles the fields in key_list in a consistent way: The reordering will
be the same across fields.

key_list = list(self.keys())
key_list = list(self._fields.keys())
if not self.check_length(key_list):
raise BufferException(
"Unable to shuffle if the fields are not of same length"

:return: Dict of mini batch.
"""
mini_batch = AgentBuffer()
for key in self:
mini_batch[key] = self[key][start:end]
for key, field in self._fields.items():
# slicing AgentBufferField returns a List[Any}
mini_batch[key] = field[start:end] # type: ignore
return mini_batch
def sample_mini_batch(

"""
with h5py.File(file_object, "w") as write_file:
for key, data in self.items():
write_file.create_dataset(key, data=data, dtype="f", compression="gzip")
write_file.create_dataset(
self._encode_key(key), data=data, dtype="f", compression="gzip"
)
def load_from_file(self, file_object: BinaryIO) -> None:
"""

for key in list(read_file.keys()):
self[key] = AgentBuffer.AgentBufferField()
decoded_key = self._decode_key(key)
self[decoded_key] = AgentBufferField()
self[key].extend(read_file[key][()])
self[decoded_key].extend(read_file[key][()])
def truncate(self, max_length: int, sequence_length: int = 1) -> None:
"""

def resequence_and_append(
self,
target_buffer: "AgentBuffer",
key_list: List[str] = None,
key_list: List[AgentBufferKey] = None,
batch_size: int = None,
training_length: int = None,
) -> None:

3
ml-agents/mlagents/trainers/cli_utils.py


action=RaiseRemovedWarning,
help="(Removed) Use the TensorFlow framework.",
)
argparser.add_argument(
"--results-dir", default="results", help="Results base directory"
)
eng_conf = argparser.add_argument_group(title="Engine Configuration")
eng_conf.add_argument(

16
ml-agents/mlagents/trainers/demo_loader.py


import os
from typing import List, Tuple
import numpy as np
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents_envs.communicator_objects.agent_info_action_pair_pb2 import (
AgentInfoActionPairProto,
)

else:
current_obs = list(current_decision_step.values())[0].obs
demo_raw_buffer["done"].append(next_done)
demo_raw_buffer["rewards"].append(next_reward)
demo_raw_buffer[BufferKey.DONE].append(next_done)
demo_raw_buffer[BufferKey.ENVIRONMENT_REWARDS].append(next_reward)
for i, obs in enumerate(current_obs):
demo_raw_buffer[ObsUtil.get_name_at(i)].append(obs)
if (

if behavior_spec.action_spec.continuous_size > 0:
demo_raw_buffer["continuous_action"].append(
demo_raw_buffer[BufferKey.CONTINUOUS_ACTION].append(
demo_raw_buffer["discrete_action"].append(
demo_raw_buffer[BufferKey.DISCRETE_ACTION].append(
demo_raw_buffer["continuous_action"].append(
demo_raw_buffer[BufferKey.CONTINUOUS_ACTION].append(
demo_raw_buffer["discrete_action"].append(
demo_raw_buffer[BufferKey.DISCRETE_ACTION].append(
demo_raw_buffer["prev_action"].append(previous_action)
demo_raw_buffer[BufferKey.PREV_ACTION].append(previous_action)
if next_done:
demo_raw_buffer.resequence_and_append(
demo_processed_buffer, batch_size=None, training_length=sequence_length

40
ml-agents/mlagents/trainers/ghost/trainer.py


next_learning_team = self.controller.get_learning_team
# CASE 1: Current learning team is managed by this GhostTrainer.
# If the learning team changes, the following loop over queues will push the
# new policy into the policy queue for the new learning agent if
# that policy is managed by this GhostTrainer. Otherwise, it will save the current snapshot.
# CASE 2: Current learning team is managed by a different GhostTrainer.
# If the learning team changes to a team managed by this GhostTrainer, this loop
# will push the current_snapshot into the correct queue. Otherwise,
# it will continue skipping and swap_snapshot will continue to handle
# pushing fixed snapshots
# Case 3: No team change. The if statement just continues to push the policy
# Case 1: No team change. The if statement just continues to push the policy
# into the correct queue (or not if not learning team).
for brain_name in self._internal_policy_queues:
internal_policy_queue = self._internal_policy_queues[brain_name]

except AgentManagerQueue.Empty:
pass
if next_learning_team in self._team_to_name_to_policy_queue:
continue
if (
self._learning_team == next_learning_team
and next_learning_team in self._team_to_name_to_policy_queue
):
name_to_policy_queue = self._team_to_name_to_policy_queue[
next_learning_team
]

policy = self.get_policy(behavior_id)
policy.load_weights(self.current_policy_snapshot[brain_name])
name_to_policy_queue[brain_name].put(policy)
# CASE 2: Current learning team is managed by this GhostTrainer.
# If the learning team changes, the following loop over queues will push the
# new policy into the policy queue for the new learning agent if
# that policy is managed by this GhostTrainer. Otherwise, it will save the current snapshot.
# CASE 3: Current learning team is managed by a different GhostTrainer.
# If the learning team changes to a team managed by this GhostTrainer, this loop
# will push the current_snapshot into the correct queue. Otherwise,
# it will continue skipping and swap_snapshot will continue to handle
# pushing fixed snapshots
if (
self._learning_team != next_learning_team
and next_learning_team in self._team_to_name_to_policy_queue
):
name_to_policy_queue = self._team_to_name_to_policy_queue[
next_learning_team
]
for brain_name in name_to_policy_queue:
behavior_id = create_name_behavior_id(brain_name, next_learning_team)
policy = self.get_policy(behavior_id)
policy.load_weights(self.current_policy_snapshot[brain_name])
name_to_policy_queue[brain_name].put(policy)
# Note save and swap should be on different step counters.
# We don't want to save unless the policy is learning.

41
ml-agents/mlagents/trainers/learn.py


from mlagents.trainers.environment_parameter_manager import EnvironmentParameterManager
from mlagents.trainers.trainer import TrainerFactory
from mlagents.trainers.directory_utils import validate_existing_directories
from mlagents.trainers.stats import (
TensorboardWriter,
StatsReporter,
GaugeWriter,
ConsoleWriter,
)
from mlagents.trainers.stats import StatsReporter
from mlagents.trainers.cli_utils import parser
from mlagents_envs.environment import UnityEnvironment
from mlagents.trainers.settings import RunOptions

add_metadata as add_timer_metadata,
)
from mlagents_envs import logging_util
from mlagents.plugins.stats_writer import register_stats_writer_plugins
logger = logging_util.get_logger(__name__)

checkpoint_settings = options.checkpoint_settings
env_settings = options.env_settings
engine_settings = options.engine_settings
base_path = "results"
write_path = os.path.join(base_path, checkpoint_settings.run_id)
maybe_init_path = (
os.path.join(base_path, checkpoint_settings.initialize_from)
if checkpoint_settings.initialize_from is not None
else None
)
run_logs_dir = os.path.join(write_path, "run_logs")
run_logs_dir = checkpoint_settings.run_logs_dir
write_path,
checkpoint_settings.write_path,
maybe_init_path,
checkpoint_settings.maybe_init_path,
)
# Make run logs directory
os.makedirs(run_logs_dir, exist_ok=True)

)
# Configure Tensorboard Writers and StatsReporter
tb_writer = TensorboardWriter(
write_path, clear_past_data=not checkpoint_settings.resume
)
gauge_write = GaugeWriter()
console_writer = ConsoleWriter()
StatsReporter.add_writer(tb_writer)
StatsReporter.add_writer(gauge_write)
StatsReporter.add_writer(console_writer)
stats_writers = register_stats_writer_plugins(options)
for sw in stats_writers:
StatsReporter.add_writer(sw)
if env_settings.env_path is None:
port = None

trainer_factory = TrainerFactory(
trainer_config=options.behaviors,
output_path=write_path,
output_path=checkpoint_settings.write_path,
init_path=maybe_init_path,
init_path=checkpoint_settings.maybe_init_path,
write_path,
checkpoint_settings.write_path,
checkpoint_settings.run_id,
env_parameter_manager,
not checkpoint_settings.inference,

tc.start_learning(env_manager)
finally:
env_manager.close()
write_run_options(write_path, options)
write_run_options(checkpoint_settings.write_path, options)
write_timing_tree(run_logs_dir)
write_training_status(run_logs_dir)

22
ml-agents/mlagents/trainers/ppo/optimizer_torch.py


from typing import Dict, cast
from mlagents.torch_utils import torch
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey, RewardSignalUtil
from mlagents_envs.timers import timed
from mlagents.trainers.policy.torch_policy import TorchPolicy

old_values = {}
for name in self.reward_signals:
old_values[name] = ModelUtils.list_to_tensor(
batch[f"{name}_value_estimates"]
batch[RewardSignalUtil.value_estimates_key(name)]
returns[name] = ModelUtils.list_to_tensor(batch[f"{name}_returns"])
returns[name] = ModelUtils.list_to_tensor(
batch[RewardSignalUtil.returns_key(name)]
)
n_obs = len(self.policy.behavior_spec.observation_specs)
current_obs = ObsUtil.from_buffer(batch, n_obs)

act_masks = ModelUtils.list_to_tensor(batch["action_mask"])
actions = AgentAction.from_dict(batch)
act_masks = ModelUtils.list_to_tensor(batch[BufferKey.ACTION_MASK])
actions = AgentAction.from_buffer(batch)
ModelUtils.list_to_tensor(batch["memory"][i])
for i in range(0, len(batch["memory"]), self.policy.sequence_length)
ModelUtils.list_to_tensor(batch[BufferKey.MEMORY][i])
for i in range(0, len(batch[BufferKey.MEMORY]), self.policy.sequence_length)
]
if len(memories) > 0:
memories = torch.stack(memories).unsqueeze(0)

memories=memories,
seq_len=self.policy.sequence_length,
)
old_log_probs = ActionLogProbs.from_dict(batch).flatten()
old_log_probs = ActionLogProbs.from_buffer(batch).flatten()
loss_masks = ModelUtils.list_to_tensor(batch["masks"], dtype=torch.bool)
loss_masks = ModelUtils.list_to_tensor(batch[BufferKey.MASKS], dtype=torch.bool)
ModelUtils.list_to_tensor(batch["advantages"]),
ModelUtils.list_to_tensor(batch[BufferKey.ADVANTAGES]),
log_probs,
old_log_probs,
loss_masks,

33
ml-agents/mlagents/trainers/ppo/trainer.py


from mlagents_envs.logging_util import get_logger
from mlagents_envs.base_env import BehaviorSpec
from mlagents.trainers.buffer import BufferKey, RewardSignalUtil
from mlagents.trainers.trainer.rl_trainer import RLTrainer
from mlagents.trainers.policy import Policy
from mlagents.trainers.policy.torch_policy import TorchPolicy

)
for name, v in value_estimates.items():
agent_buffer_trajectory[f"{name}_value_estimates"].extend(v)
agent_buffer_trajectory[RewardSignalUtil.value_estimates_key(name)].extend(
v
)
self._stats_reporter.add_stat(
f"Policy/{self.optimizer.reward_signals[name].name.capitalize()} Value Estimate",
np.mean(v),

self.collected_rewards["environment"][agent_id] += np.sum(
agent_buffer_trajectory["environment_rewards"]
agent_buffer_trajectory[BufferKey.ENVIRONMENT_REWARDS]
agent_buffer_trajectory[f"{name}_rewards"].extend(evaluate_result)
agent_buffer_trajectory[RewardSignalUtil.rewards_key(name)].extend(
evaluate_result
)
# Report the reward signals
self.collected_rewards[name][agent_id] += np.sum(evaluate_result)

for name in self.optimizer.reward_signals:
bootstrap_value = value_next[name]
local_rewards = agent_buffer_trajectory[f"{name}_rewards"].get_batch()
local_rewards = agent_buffer_trajectory[
RewardSignalUtil.rewards_key(name)
].get_batch()
f"{name}_value_estimates"
RewardSignalUtil.value_estimates_key(name)
].get_batch()
local_advantage = get_gae(

)
local_return = local_advantage + local_value_estimates
# This is later use as target for the different value estimates
agent_buffer_trajectory[f"{name}_returns"].set(local_return)
agent_buffer_trajectory[f"{name}_advantage"].set(local_advantage)
agent_buffer_trajectory[RewardSignalUtil.returns_key(name)].set(
local_return
)
agent_buffer_trajectory[RewardSignalUtil.advantage_key(name)].set(
local_advantage
)
tmp_advantages.append(local_advantage)
tmp_returns.append(local_return)

)
global_returns = list(np.mean(np.array(tmp_returns, dtype=np.float32), axis=0))
agent_buffer_trajectory["advantages"].set(global_advantages)
agent_buffer_trajectory["discounted_returns"].set(global_returns)
agent_buffer_trajectory[BufferKey.ADVANTAGES].set(global_advantages)
agent_buffer_trajectory[BufferKey.DISCOUNTED_RETURNS].set(global_returns)
# Append to update buffer
agent_buffer_trajectory.resequence_and_append(
self.update_buffer, training_length=self.policy.sequence_length

int(self.hyperparameters.batch_size / self.policy.sequence_length), 1
)
advantages = self.update_buffer["advantages"].get_batch()
self.update_buffer["advantages"].set(
advantages = self.update_buffer[BufferKey.ADVANTAGES].get_batch()
self.update_buffer[BufferKey.ADVANTAGES].set(
(advantages - advantages.mean()) / (advantages.std() + 1e-10)
)
num_epoch = self.hyperparameters.num_epoch

24
ml-agents/mlagents/trainers/sac/optimizer_torch.py


from mlagents.trainers.torch.agent_action import AgentAction
from mlagents.trainers.torch.action_log_probs import ActionLogProbs
from mlagents.trainers.torch.utils import ModelUtils
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey, RewardSignalUtil
from mlagents_envs.timers import timed
from mlagents_envs.base_env import ActionSpec, ObservationSpec
from mlagents.trainers.exception import UnityTrainerException

"""
rewards = {}
for name in self.reward_signals:
rewards[name] = ModelUtils.list_to_tensor(batch[f"{name}_rewards"])
rewards[name] = ModelUtils.list_to_tensor(
batch[RewardSignalUtil.rewards_key(name)]
)
n_obs = len(self.policy.behavior_spec.observation_specs)
current_obs = ObsUtil.from_buffer(batch, n_obs)

# Convert to tensors
next_obs = [ModelUtils.list_to_tensor(obs) for obs in next_obs]
act_masks = ModelUtils.list_to_tensor(batch["action_mask"])
actions = AgentAction.from_dict(batch)
act_masks = ModelUtils.list_to_tensor(batch[BufferKey.ACTION_MASK])
actions = AgentAction.from_buffer(batch)
ModelUtils.list_to_tensor(batch["memory"][i])
for i in range(0, len(batch["memory"]), self.policy.sequence_length)
ModelUtils.list_to_tensor(batch[BufferKey.MEMORY][i])
for i in range(0, len(batch[BufferKey.MEMORY]), self.policy.sequence_length)
batch["memory"][i][self.policy.m_size // 2 :]
batch[BufferKey.MEMORY][i][self.policy.m_size // 2 :]
for i in range(offset, len(batch["memory"]), self.policy.sequence_length)
for i in range(
offset, len(batch[BufferKey.MEMORY]), self.policy.sequence_length
)
]
if len(memories_list) > 0:

memories=next_memories,
sequence_length=self.policy.sequence_length,
)
masks = ModelUtils.list_to_tensor(batch["masks"], dtype=torch.bool)
dones = ModelUtils.list_to_tensor(batch["done"])
masks = ModelUtils.list_to_tensor(batch[BufferKey.MASKS], dtype=torch.bool)
dones = ModelUtils.list_to_tensor(batch[BufferKey.DONE])
q1_loss, q2_loss = self.sac_q_loss(
q1_stream, q2_stream, target_values, dones, rewards, masks

7
ml-agents/mlagents/trainers/sac/trainer.py


from mlagents_envs.logging_util import get_logger
from mlagents_envs.timers import timed
from mlagents_envs.base_env import BehaviorSpec
from mlagents.trainers.buffer import BufferKey, RewardSignalUtil
from mlagents.trainers.policy import Policy
from mlagents.trainers.trainer.rl_trainer import RLTrainer
from mlagents.trainers.policy.torch_policy import TorchPolicy

# Evaluate all reward functions for reporting purposes
self.collected_rewards["environment"][agent_id] += np.sum(
agent_buffer_trajectory["environment_rewards"]
agent_buffer_trajectory[BufferKey.ENVIRONMENT_REWARDS]
)
for name, reward_signal in self.optimizer.reward_signals.items():
evaluate_result = (

last_step_obs = last_step.obs
for i, obs in enumerate(last_step_obs):
agent_buffer_trajectory[ObsUtil.get_name_at_next(i)][-1] = obs
agent_buffer_trajectory["done"][-1] = False
agent_buffer_trajectory[BufferKey.DONE][-1] = False
# Append to update buffer
agent_buffer_trajectory.resequence_and_append(

)
# Get rewards for each reward
for name, signal in self.optimizer.reward_signals.items():
sampled_minibatch[f"{name}_rewards"] = (
sampled_minibatch[RewardSignalUtil.rewards_key(name)] = (
signal.evaluate(sampled_minibatch) * signal.strength
)

18
ml-agents/mlagents/trainers/settings.py


import os.path
import warnings
import attr

force: bool = parser.get_default("force")
train_model: bool = parser.get_default("train_model")
inference: bool = parser.get_default("inference")
results_dir: str = parser.get_default("results_dir")
@property
def write_path(self) -> str:
return os.path.join(self.results_dir, self.run_id)
@property
def maybe_init_path(self) -> Optional[str]:
return (
os.path.join(self.results_dir, self.initialize_from)
if self.initialize_from is not None
else None
)
@property
def run_logs_dir(self) -> str:
return os.path.join(self.write_path, "run_logs")
@attr.s(auto_attribs=True)

7
ml-agents/mlagents/trainers/stats.py


def write_stats(
self, category: str, values: Dict[str, StatsSummary], step: int
) -> None:
"""
Callback to record training information
:param category: Category of the statistics. Usually this is the behavior name.
:param values: Dictionary of statistics.
:param step: The current training step.
:return:
"""
pass
def add_property(

6
ml-agents/mlagents/trainers/tests/__init__.py


np.array = np_array_no_float64
np.zeros = np_zeros_no_float64
np.ones = np_ones_no_float64
if os.getenv("TEST_ENFORCE_BUFFER_KEY_TYPES"):
from mlagents.trainers.buffer import AgentBuffer
AgentBuffer.CHECK_KEY_TYPES_AT_RUNTIME = True

9
ml-agents/mlagents/trainers/tests/mock_brain.py


from typing import List, Tuple
import numpy as np
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, AgentBufferKey
from mlagents.trainers.torch.action_log_probs import LogProbsTuple
from mlagents.trainers.trajectory import Trajectory, AgentExperience
from mlagents_envs.base_env import (

return Trajectory(
steps=steps_list, agent_id=agent_id, behavior_id=behavior_id, next_obs=obs
)
def copy_buffer_fields(
buffer: AgentBuffer, src_key: AgentBufferKey, dst_keys: List[AgentBufferKey]
) -> None:
for dst_key in dst_keys:
buffer[dst_key] = buffer[src_key]
def simulate_rollout(

60
ml-agents/mlagents/trainers/tests/test_buffer.py


import numpy as np
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import (
AgentBuffer,
AgentBufferField,
BufferKey,
ObservationKeyPrefix,
RewardSignalKeyPrefix,
)
from mlagents.trainers.trajectory import ObsUtil
def assert_array(a, b):

def construct_fake_buffer(fake_agent_id):
b = AgentBuffer()
for step in range(9):
b["vector_observation"].append(
b[ObsUtil.get_name_at(0)].append(
[
100 * fake_agent_id + 10 * step + 1,
100 * fake_agent_id + 10 * step + 2,

b["action"].append(
b[BufferKey.CONTINUOUS_ACTION].append(
[100 * fake_agent_id + 10 * step + 4, 100 * fake_agent_id + 10 * step + 5]
)
return b

agent_1_buffer = construct_fake_buffer(1)
agent_2_buffer = construct_fake_buffer(2)
agent_3_buffer = construct_fake_buffer(3)
a = agent_1_buffer["vector_observation"].get_batch(
a = agent_1_buffer[ObsUtil.get_name_at(0)].get_batch(
a = agent_2_buffer["vector_observation"].get_batch(
a = agent_2_buffer[ObsUtil.get_name_at(0)].get_batch(
batch_size=2, training_length=3, sequential=True
)
assert_array(

]
),
)
a = agent_2_buffer["vector_observation"].get_batch(
a = agent_2_buffer[ObsUtil.get_name_at(0)].get_batch(
batch_size=2, training_length=3, sequential=False
)
assert_array(

agent_3_buffer.resequence_and_append(
update_buffer, batch_size=None, training_length=2
)
assert len(update_buffer["action"]) == 20
assert len(update_buffer[BufferKey.CONTINUOUS_ACTION]) == 20
assert np.array(update_buffer["action"]).shape == (20, 2)
assert np.array(update_buffer[BufferKey.CONTINUOUS_ACTION]).shape == (20, 2)
assert np.array(c["action"]).shape == (1, 2)
assert np.array(c[BufferKey.CONTINUOUS_ACTION]).shape == (1, 2)
def fakerandint(values):

# Test non-LSTM
mb = update_buffer.sample_mini_batch(batch_size=4, sequence_length=1)
assert mb.keys() == update_buffer.keys()
assert np.array(mb["action"]).shape == (4, 2)
assert np.array(mb[BufferKey.CONTINUOUS_ACTION]).shape == (4, 2)
# Test LSTM
# We need to check if we ever get a breaking start - this will maximize the probability

assert np.array(mb["action"]).shape == (19, 2)
assert np.array(mb[BufferKey.CONTINUOUS_ACTION]).shape == (19, 2)
def test_num_experiences():

assert len(update_buffer["action"]) == 0
assert len(update_buffer[BufferKey.CONTINUOUS_ACTION]) == 0
assert update_buffer.num_experiences == 0
agent_1_buffer.resequence_and_append(
update_buffer, batch_size=None, training_length=2

)
assert len(update_buffer["action"]) == 20
assert len(update_buffer[BufferKey.CONTINUOUS_ACTION]) == 20
assert update_buffer.num_experiences == 20

update_buffer.truncate(4, sequence_length=3)
assert update_buffer.num_experiences == 3
for buffer_field in update_buffer.values():
assert isinstance(buffer_field, AgentBuffer.AgentBufferField)
assert isinstance(buffer_field, AgentBufferField)
def test_key_encode_decode():
keys = (
list(BufferKey)
+ [(k, 42) for k in ObservationKeyPrefix]
+ [(k, "gail") for k in RewardSignalKeyPrefix]
)
for k in keys:
assert k == AgentBuffer._decode_key(AgentBuffer._encode_key(k))
def test_buffer_save_load():
original = construct_fake_buffer(3)
import io
write_buffer = io.BytesIO()
original.save_to_file(write_buffer)
loaded = AgentBuffer()
loaded.load_from_file(write_buffer)
assert len(original) == len(loaded)
for k in original.keys():
assert np.allclose(original[k], loaded[k])

9
ml-agents/mlagents/trainers/tests/test_demo_loader.py


get_demo_files,
write_delimited,
)
from mlagents.trainers.buffer import BufferKey
BEHAVIOR_SPEC = create_mock_3dball_behavior_specs()

_, demo_buffer = demo_to_buffer(path_prefix + "/test.demo", 1, BEHAVIOR_SPEC)
assert (
len(demo_buffer["continuous_action"]) == total_expected - 1
or len(demo_buffer["discrete_action"]) == total_expected - 1
len(demo_buffer[BufferKey.CONTINUOUS_ACTION]) == total_expected - 1
or len(demo_buffer[BufferKey.DISCRETE_ACTION]) == total_expected - 1
)

_, demo_buffer = demo_to_buffer(path_prefix + "/test_demo_dir", 1, BEHAVIOR_SPEC)
assert (
len(demo_buffer["continuous_action"]) == total_expected - 1
or len(demo_buffer["discrete_action"]) == total_expected - 1
len(demo_buffer[BufferKey.CONTINUOUS_ACTION]) == total_expected - 1
or len(demo_buffer[BufferKey.DISCRETE_ACTION]) == total_expected - 1
)

29
ml-agents/mlagents/trainers/tests/test_trajectory.py


from mlagents.trainers.tests.mock_brain import make_fake_trajectory
from mlagents.trainers.tests.dummy_config import create_observation_specs_with_shapes
from mlagents_envs.base_env import ActionSpec
from mlagents.trainers.buffer import BufferKey, ObservationKeyPrefix
VEC_OBS_SIZE = 6
ACTION_SIZE = 4

length = 15
wanted_keys = [
"next_obs_0",
"next_obs_1",
"obs_0",
"obs_1",
"memory",
"masks",
"done",
"continuous_action",
"discrete_action",
"continuous_log_probs",
"discrete_log_probs",
"action_mask",
"prev_action",
"environment_rewards",
(ObservationKeyPrefix.OBSERVATION, 0),
(ObservationKeyPrefix.OBSERVATION, 1),
(ObservationKeyPrefix.NEXT_OBSERVATION, 0),
(ObservationKeyPrefix.NEXT_OBSERVATION, 1),
BufferKey.MEMORY,
BufferKey.MASKS,
BufferKey.DONE,
BufferKey.CONTINUOUS_ACTION,
BufferKey.DISCRETE_ACTION,
BufferKey.CONTINUOUS_LOG_PROBS,
BufferKey.DISCRETE_LOG_PROBS,
BufferKey.ACTION_MASK,
BufferKey.PREV_ACTION,
BufferKey.ENVIRONMENT_REWARDS,
]
wanted_keys = set(wanted_keys)
trajectory = make_fake_trajectory(

32
ml-agents/mlagents/trainers/tests/torch/test_ghost.py


from mlagents.trainers.behavior_id_utils import BehaviorIdentifiers
from mlagents.trainers.ppo.trainer import PPOTrainer
from mlagents.trainers.agent_processor import AgentManagerQueue
from mlagents.trainers.buffer import BufferKey, RewardSignalUtil
from mlagents.trainers.tests.mock_brain import copy_buffer_fields
from mlagents.trainers.tests.test_trajectory import make_fake_trajectory
from mlagents.trainers.settings import TrainerSettings, SelfPlaySettings
from mlagents.trainers.tests.dummy_config import create_observation_specs_with_shapes

VECTOR_ACTION_SPACE = 1
VECTOR_OBS_SPACE = 8
DISCRETE_ACTION_SPACE = [3, 3, 3, 2]
BUFFER_INIT_SAMPLES = 513
BUFFER_INIT_SAMPLES = 10241
NUM_AGENTS = 12

# clear
policy_queue1.get_nowait()
mock_specs = mb.setup_test_behavior_specs(
False,
False,
vector_action_space=VECTOR_ACTION_SPACE,
vector_obs_space=VECTOR_OBS_SPACE,
)
buffer["extrinsic_rewards"] = buffer["environment_rewards"]
buffer["extrinsic_returns"] = buffer["environment_rewards"]
buffer["extrinsic_value_estimates"] = buffer["environment_rewards"]
buffer["curiosity_rewards"] = buffer["environment_rewards"]
buffer["curiosity_returns"] = buffer["environment_rewards"]
buffer["curiosity_value_estimates"] = buffer["environment_rewards"]
buffer["advantages"] = buffer["environment_rewards"]
copy_buffer_fields(
buffer,
src_key=BufferKey.ENVIRONMENT_REWARDS,
dst_keys=[
BufferKey.ADVANTAGES,
RewardSignalUtil.rewards_key("extrinsic"),
RewardSignalUtil.returns_key("extrinsic"),
RewardSignalUtil.value_estimates_key("extrinsic"),
RewardSignalUtil.rewards_key("curiosity"),
RewardSignalUtil.returns_key("curiosity"),
RewardSignalUtil.value_estimates_key("curiosity"),
],
)
trainer.trainer.update_buffer = buffer
# when ghost trainer advance and wrapped trainer buffers full

15
ml-agents/mlagents/trainers/tests/torch/test_policy.py


from mlagents.trainers.torch.utils import ModelUtils
from mlagents.trainers.trajectory import ObsUtil
from mlagents.trainers.torch.agent_action import AgentAction
from mlagents.trainers.buffer import BufferKey
VECTOR_ACTION_SPACE = 2
VECTOR_OBS_SPACE = 8

TrainerSettings(), use_rnn=rnn, use_discrete=discrete, use_visual=visual
)
buffer = mb.simulate_rollout(64, policy.behavior_spec, memory_size=policy.m_size)
act_masks = ModelUtils.list_to_tensor(buffer["action_mask"])
agent_action = AgentAction.from_dict(buffer)
act_masks = ModelUtils.list_to_tensor(buffer[BufferKey.ACTION_MASK])
agent_action = AgentAction.from_buffer(buffer)
ModelUtils.list_to_tensor(buffer["memory"][i])
for i in range(0, len(buffer["memory"]), policy.sequence_length)
ModelUtils.list_to_tensor(buffer[BufferKey.MEMORY][i])
for i in range(0, len(buffer[BufferKey.MEMORY]), policy.sequence_length)
]
if len(memories) > 0:
memories = torch.stack(memories).unsqueeze(0)

TrainerSettings(), use_rnn=rnn, use_discrete=discrete, use_visual=visual
)
buffer = mb.simulate_rollout(64, policy.behavior_spec, memory_size=policy.m_size)
act_masks = ModelUtils.list_to_tensor(buffer["action_mask"])
act_masks = ModelUtils.list_to_tensor(buffer[BufferKey.ACTION_MASK])
ModelUtils.list_to_tensor(buffer["memory"][i])
for i in range(0, len(buffer["memory"]), policy.sequence_length)
ModelUtils.list_to_tensor(buffer[BufferKey.MEMORY][i])
for i in range(0, len(buffer[BufferKey.MEMORY]), policy.sequence_length)
]
if len(memories) > 0:
memories = torch.stack(memories).unsqueeze(0)

68
ml-agents/mlagents/trainers/tests/torch/test_ppo.py


from mlagents.trainers.ppo.optimizer_torch import TorchPPOOptimizer
from mlagents.trainers.policy.torch_policy import TorchPolicy
from mlagents.trainers.tests import mock_brain as mb
from mlagents.trainers.tests.mock_brain import copy_buffer_fields
from mlagents.trainers.tests.test_trajectory import make_fake_trajectory
from mlagents.trainers.settings import NetworkSettings
from mlagents.trainers.tests.dummy_config import ( # noqa: F401

)
from mlagents_envs.base_env import ActionSpec
from mlagents.trainers.buffer import BufferKey, RewardSignalUtil
@pytest.fixture

memory_size=optimizer.policy.m_size,
)
# Mock out reward signal eval
update_buffer["advantages"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_returns"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_value_estimates"] = update_buffer["environment_rewards"]
copy_buffer_fields(
update_buffer,
BufferKey.ENVIRONMENT_REWARDS,
[
BufferKey.ADVANTAGES,
RewardSignalUtil.returns_key("extrinsic"),
RewardSignalUtil.value_estimates_key("extrinsic"),
],
)
return_stats = optimizer.update(
update_buffer,

memory_size=optimizer.policy.m_size,
)
# Mock out reward signal eval
update_buffer["advantages"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_returns"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_value_estimates"] = update_buffer["environment_rewards"]
update_buffer["curiosity_returns"] = update_buffer["environment_rewards"]
update_buffer["curiosity_value_estimates"] = update_buffer["environment_rewards"]
copy_buffer_fields(
update_buffer,
src_key=BufferKey.ENVIRONMENT_REWARDS,
dst_keys=[
BufferKey.ADVANTAGES,
RewardSignalUtil.returns_key("extrinsic"),
RewardSignalUtil.value_estimates_key("extrinsic"),
RewardSignalUtil.returns_key("curiosity"),
RewardSignalUtil.value_estimates_key("curiosity"),
],
)
optimizer.update(
update_buffer,
num_sequences=update_buffer.num_experiences // optimizer.policy.sequence_length,

BUFFER_INIT_SAMPLES, optimizer.policy.behavior_spec
)
# Mock out reward signal eval
update_buffer["advantages"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_returns"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_value_estimates"] = update_buffer["environment_rewards"]
update_buffer["gail_returns"] = update_buffer["environment_rewards"]
update_buffer["gail_value_estimates"] = update_buffer["environment_rewards"]
update_buffer["continuous_log_probs"] = np.ones_like(
update_buffer["continuous_action"]
copy_buffer_fields(
update_buffer,
src_key=BufferKey.ENVIRONMENT_REWARDS,
dst_keys=[
BufferKey.ADVANTAGES,
RewardSignalUtil.returns_key("extrinsic"),
RewardSignalUtil.value_estimates_key("extrinsic"),
RewardSignalUtil.returns_key("gail"),
RewardSignalUtil.value_estimates_key("gail"),
],
)
update_buffer[BufferKey.CONTINUOUS_LOG_PROBS] = np.ones_like(
update_buffer[BufferKey.CONTINUOUS_ACTION]
)
optimizer.update(
update_buffer,

# Check if buffer size is too big
update_buffer = mb.simulate_rollout(3000, optimizer.policy.behavior_spec)
# Mock out reward signal eval
update_buffer["advantages"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_returns"] = update_buffer["environment_rewards"]
update_buffer["extrinsic_value_estimates"] = update_buffer["environment_rewards"]
update_buffer["gail_returns"] = update_buffer["environment_rewards"]
update_buffer["gail_value_estimates"] = update_buffer["environment_rewards"]
copy_buffer_fields(
update_buffer,
src_key=BufferKey.ENVIRONMENT_REWARDS,
dst_keys=[
BufferKey.ADVANTAGES,
RewardSignalUtil.returns_key("extrinsic"),
RewardSignalUtil.value_estimates_key("extrinsic"),
RewardSignalUtil.returns_key("gail"),
RewardSignalUtil.value_estimates_key("gail"),
],
)
optimizer.update(
update_buffer,
num_sequences=update_buffer.num_experiences // optimizer.policy.sequence_length,

3
ml-agents/mlagents/trainers/tests/torch/test_reward_providers/test_curiosity.py


import numpy as np
import pytest
from mlagents.torch_utils import torch
from mlagents.trainers.buffer import BufferKey
from mlagents.trainers.torch.components.reward_providers import (
CuriosityRewardProvider,
create_reward_provider,

for _ in range(200):
curiosity_rp.update(buffer)
prediction = curiosity_rp._network.predict_action(buffer)[0]
target = torch.tensor(buffer["continuous_action"][0])
target = torch.tensor(buffer[BufferKey.CONTINUOUS_ACTION][0])
error = torch.mean((prediction - target) ** 2).item()
assert error < 0.001

18
ml-agents/mlagents/trainers/tests/torch/test_reward_providers/utils.py


import numpy as np
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents_envs.base_env import BehaviorSpec
from mlagents.trainers.trajectory import ObsUtil

action_buffer = behavior_spec.action_spec.random_action(1)
action = {}
if behavior_spec.action_spec.continuous_size > 0:
action["continuous_action"] = action_buffer.continuous
action[BufferKey.CONTINUOUS_ACTION] = action_buffer.continuous
action["discrete_action"] = action_buffer.discrete
action[BufferKey.DISCRETE_ACTION] = action_buffer.discrete
for _ in range(number):
for i, obs in enumerate(curr_obs):

buffer["actions"].append(action)
# TODO
# buffer[AgentBufferKey.ACTIONS].append(action)
buffer["reward"].append(np.ones(1, dtype=np.float32) * reward)
buffer["masks"].append(np.ones(1, dtype=np.float32))
buffer["done"] = np.zeros(number, dtype=np.float32)
# TODO was "rewards"
buffer[BufferKey.ENVIRONMENT_REWARDS].append(
np.ones(1, dtype=np.float32) * reward
)
buffer[BufferKey.MASKS].append(np.ones(1, dtype=np.float32))
buffer[BufferKey.DONE] = np.zeros(number, dtype=np.float32)
return buffer

13
ml-agents/mlagents/trainers/tests/torch/test_sac.py


import pytest
from mlagents.torch_utils import torch
from mlagents.trainers.buffer import BufferKey, RewardSignalUtil
from mlagents.trainers.sac.optimizer_torch import TorchSACOptimizer
from mlagents.trainers.policy.torch_policy import TorchPolicy
from mlagents.trainers.tests import mock_brain as mb

BUFFER_INIT_SAMPLES, optimizer.policy.behavior_spec, memory_size=24
)
# Mock out reward signal eval
update_buffer["extrinsic_rewards"] = update_buffer["environment_rewards"]
update_buffer[RewardSignalUtil.rewards_key("extrinsic")] = update_buffer[
BufferKey.ENVIRONMENT_REWARDS
]
return_stats = optimizer.update(
update_buffer,
num_sequences=update_buffer.num_experiences // optimizer.policy.sequence_length,

)
# Mock out reward signal eval
update_buffer["extrinsic_rewards"] = update_buffer["environment_rewards"]
update_buffer["curiosity_rewards"] = update_buffer["environment_rewards"]
update_buffer[RewardSignalUtil.rewards_key("extrinsic")] = update_buffer[
BufferKey.ENVIRONMENT_REWARDS
]
update_buffer[RewardSignalUtil.rewards_key("curiosity")] = update_buffer[
BufferKey.ENVIRONMENT_REWARDS
]
return_stats = optimizer.update_reward_signals(
{"curiosity": update_buffer}, num_sequences=update_buffer.num_experiences
)

15
ml-agents/mlagents/trainers/torch/action_log_probs.py


from typing import List, Optional, NamedTuple, Dict
from typing import List, Optional, NamedTuple
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents_envs.base_env import _ActionTupleBase

return torch.cat(self._to_tensor_list(), dim=1)
@staticmethod
def from_dict(buff: Dict[str, np.ndarray]) -> "ActionLogProbs":
def from_buffer(buff: AgentBuffer) -> "ActionLogProbs":
"""
A static method that accesses continuous and discrete log probs fields in an AgentBuffer
and constructs the corresponding ActionLogProbs from the retrieved np arrays.

if "continuous_log_probs" in buff:
continuous = ModelUtils.list_to_tensor(buff["continuous_log_probs"])
if "discrete_log_probs" in buff:
discrete_tensor = ModelUtils.list_to_tensor(buff["discrete_log_probs"])
if BufferKey.CONTINUOUS_LOG_PROBS in buff:
continuous = ModelUtils.list_to_tensor(buff[BufferKey.CONTINUOUS_LOG_PROBS])
if BufferKey.DISCRETE_LOG_PROBS in buff:
discrete_tensor = ModelUtils.list_to_tensor(
buff[BufferKey.DISCRETE_LOG_PROBS]
)
# This will keep discrete_list = None which enables flatten()
if discrete_tensor.shape[1] > 0:
discrete = [

14
ml-agents/mlagents/trainers/torch/agent_action.py


from typing import List, Optional, NamedTuple, Dict
from typing import List, Optional, NamedTuple
import numpy as np
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents.trainers.torch.utils import ModelUtils
from mlagents_envs.base_env import ActionTuple

return action_tuple
@staticmethod
def from_dict(buff: Dict[str, np.ndarray]) -> "AgentAction":
def from_buffer(buff: AgentBuffer) -> "AgentAction":
"""
A static method that accesses continuous and discrete action fields in an AgentBuffer
and constructs the corresponding AgentAction from the retrieved np arrays.

if "continuous_action" in buff:
continuous = ModelUtils.list_to_tensor(buff["continuous_action"])
if "discrete_action" in buff:
if BufferKey.CONTINUOUS_ACTION in buff:
continuous = ModelUtils.list_to_tensor(buff[BufferKey.CONTINUOUS_ACTION])
if BufferKey.DISCRETE_ACTION in buff:
buff["discrete_action"], dtype=torch.long
buff[BufferKey.DISCRETE_ACTION], dtype=torch.long
)
discrete = [
discrete_tensor[..., i] for i in range(discrete_tensor.shape[-1])

2
ml-agents/mlagents/trainers/torch/components/bc/module.py


# Convert to tensors
tensor_obs = [ModelUtils.list_to_tensor(obs) for obs in np_obs]
act_masks = None
expert_actions = AgentAction.from_dict(mini_batch_demo)
expert_actions = AgentAction.from_buffer(mini_batch_demo)
if self.policy.behavior_spec.action_spec.discrete_size > 0:
act_masks = ModelUtils.list_to_tensor(

16
ml-agents/mlagents/trainers/torch/components/reward_providers/curiosity_reward_provider.py


from typing import Dict, NamedTuple
from mlagents.torch_utils import torch, default_device
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents.trainers.torch.components.reward_providers.base_reward_provider import (
BaseRewardProvider,
)

Uses the current state embedding and the action of the mini_batch to predict
the next state embedding.
"""
actions = AgentAction.from_dict(mini_batch)
actions = AgentAction.from_buffer(mini_batch)
flattened_action = self._action_flattener.forward(actions)
forward_model_input = torch.cat(
(self.get_current_state(mini_batch), flattened_action), dim=1

action prediction (given the current and next state).
"""
predicted_action = self.predict_action(mini_batch)
actions = AgentAction.from_dict(mini_batch)
actions = AgentAction.from_buffer(mini_batch)
_inverse_loss = 0
if self._action_spec.continuous_size > 0:
sq_difference = (

_inverse_loss += torch.mean(
ModelUtils.dynamic_partition(
sq_difference,
ModelUtils.list_to_tensor(mini_batch["masks"], dtype=torch.float),
ModelUtils.list_to_tensor(
mini_batch[BufferKey.MASKS], dtype=torch.float
),
2,
)[1]
)

ModelUtils.dynamic_partition(
cross_entropy,
ModelUtils.list_to_tensor(
mini_batch["masks"], dtype=torch.float
mini_batch[BufferKey.MASKS], dtype=torch.float
), # use masks not action_masks
2,
)[1]

return torch.mean(
ModelUtils.dynamic_partition(
self.compute_reward(mini_batch),
ModelUtils.list_to_tensor(mini_batch["masks"], dtype=torch.float),
ModelUtils.list_to_tensor(
mini_batch[BufferKey.MASKS], dtype=torch.float
),
2,
)[1]
)

4
ml-agents/mlagents/trainers/torch/components/reward_providers/extrinsic_reward_provider.py


import numpy as np
from typing import Dict
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents.trainers.torch.components.reward_providers.base_reward_provider import (
BaseRewardProvider,
)

def evaluate(self, mini_batch: AgentBuffer) -> np.ndarray:
return np.array(mini_batch["environment_rewards"], dtype=np.float32)
return np.array(mini_batch[BufferKey.ENVIRONMENT_REWARDS], dtype=np.float32)
def update(self, mini_batch: AgentBuffer) -> Dict[str, np.ndarray]:
return {}

12
ml-agents/mlagents/trainers/torch/components/reward_providers/gail_reward_provider.py


import numpy as np
from mlagents.torch_utils import torch, default_device
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import AgentBuffer, BufferKey
from mlagents.trainers.torch.components.reward_providers.base_reward_provider import (
BaseRewardProvider,
)

Creates the action Tensor. In continuous case, corresponds to the action. In
the discrete case, corresponds to the concatenation of one hot action Tensors.
"""
return self._action_flattener.forward(AgentAction.from_dict(mini_batch))
return self._action_flattener.forward(AgentAction.from_buffer(mini_batch))
def get_state_inputs(self, mini_batch: AgentBuffer) -> List[torch.Tensor]:
"""

inputs = self.get_state_inputs(mini_batch)
if self._settings.use_actions:
actions = self.get_action_input(mini_batch)
dones = torch.as_tensor(mini_batch["done"], dtype=torch.float).unsqueeze(1)
dones = torch.as_tensor(
mini_batch[BufferKey.DONE], dtype=torch.float
).unsqueeze(1)
action_inputs = torch.cat([actions, dones], dim=1)
hidden, _ = self.encoder(inputs, action_inputs)
else:

expert_action = self.get_action_input(expert_batch)
action_epsilon = torch.rand(policy_action.shape)
policy_dones = torch.as_tensor(
policy_batch["done"], dtype=torch.float
policy_batch[BufferKey.DONE], dtype=torch.float
expert_batch["done"], dtype=torch.float
expert_batch[BufferKey.DONE], dtype=torch.float
).unsqueeze(1)
dones_epsilon = torch.rand(policy_dones.shape)
action_inputs = torch.cat(

43
ml-agents/mlagents/trainers/trajectory.py


from typing import List, NamedTuple
import numpy as np
from mlagents.trainers.buffer import AgentBuffer
from mlagents.trainers.buffer import (
AgentBuffer,
ObservationKeyPrefix,
AgentBufferKey,
BufferKey,
)
from mlagents_envs.base_env import ActionTuple
from mlagents.trainers.torch.action_log_probs import LogProbsTuple

class ObsUtil:
@staticmethod
def get_name_at(index: int) -> str:
def get_name_at(index: int) -> AgentBufferKey:
return f"obs_{index}"
return ObservationKeyPrefix.OBSERVATION, index
def get_name_at_next(index: int) -> str:
def get_name_at_next(index: int) -> AgentBufferKey:
return f"next_obs_{index}"
return ObservationKeyPrefix.NEXT_OBSERVATION, index
@staticmethod
def from_buffer(batch: AgentBuffer, num_obs: int) -> List[np.array]:

agent_buffer_trajectory[ObsUtil.get_name_at_next(i)].append(next_obs[i])
if exp.memory is not None:
agent_buffer_trajectory["memory"].append(exp.memory)
agent_buffer_trajectory[BufferKey.MEMORY].append(exp.memory)
agent_buffer_trajectory["masks"].append(1.0)
agent_buffer_trajectory["done"].append(exp.done)
agent_buffer_trajectory[BufferKey.MASKS].append(1.0)
agent_buffer_trajectory[BufferKey.DONE].append(exp.done)
agent_buffer_trajectory["continuous_action"].append(exp.action.continuous)
agent_buffer_trajectory["discrete_action"].append(exp.action.discrete)
agent_buffer_trajectory["continuous_log_probs"].append(
agent_buffer_trajectory[BufferKey.CONTINUOUS_ACTION].append(
exp.action.continuous
)
agent_buffer_trajectory[BufferKey.DISCRETE_ACTION].append(
exp.action.discrete
)
agent_buffer_trajectory[BufferKey.CONTINUOUS_LOG_PROBS].append(
agent_buffer_trajectory["discrete_log_probs"].append(
agent_buffer_trajectory[BufferKey.DISCRETE_LOG_PROBS].append(
exp.action_probs.discrete
)

mask = 1 - np.concatenate(exp.action_mask)
agent_buffer_trajectory["action_mask"].append(mask, padding_value=1)
agent_buffer_trajectory[BufferKey.ACTION_MASK].append(
mask, padding_value=1
)
agent_buffer_trajectory["action_mask"].append(
agent_buffer_trajectory[BufferKey.ACTION_MASK].append(
agent_buffer_trajectory["prev_action"].append(exp.prev_action)
agent_buffer_trajectory["environment_rewards"].append(exp.reward)
agent_buffer_trajectory[BufferKey.PREV_ACTION].append(exp.prev_action)
agent_buffer_trajectory[BufferKey.ENVIRONMENT_REWARDS].append(exp.reward)
# Store the next visual obs as the current
obs = next_obs

8
ml-agents/setup.py


from setuptools import setup, find_packages
from setuptools.command.install import install
from mlagents.plugins import ML_AGENTS_STATS_WRITER
import mlagents.trainers
VERSION = mlagents.trainers.__version__

"cattrs>=1.0.0,<1.1.0",
"attrs>=19.3.0",
'pypiwin32==223;platform_system=="Windows"',
"importlib_metadata; python_version<'3.8'",
],
python_requires=">=3.6.1",
entry_points={

]
],
# Plugins - each plugin type should have an entry here for the default behavior
ML_AGENTS_STATS_WRITER: [
"default=mlagents.plugins.stats_writer:get_default_stats_writers"
],
},
cmdclass={"verify": VerifyVersionCommand},
)

29
ml-agents/tests/yamato/training_int_tests.py


import argparse
import json
import os
import shutil
import sys

log_output_path = f"{get_base_output_path()}/inference.{model_extension}.txt"
# 10 minutes for inference is more than enough
process_timeout = 10 * 60
# Try to gracefully exit a few seconds before that.
model_override_timeout = process_timeout - 15
exe_path = exes[0]
args = [
exe_path,

"1",
"--mlagents-override-model-extension",
model_extension,
"--mlagents-quit-after-seconds",
str(model_override_timeout),
timeout = 15 * 60 # 15 minutes for inference is more than enough
res = subprocess.run(args, timeout=timeout)
res = subprocess.run(args, timeout=process_timeout)
end_time = time.time()
if res.returncode != 0:
print("Error running inference!")

else:
print(f"Inference succeeded! Took {end_time - start_time} seconds")
print(f"Inference finished! Took {end_time - start_time} seconds")
# Check the artifacts directory for the timers, so we can get the gauges
timer_file = f"{exe_path}_Data/ML-Agents/Timers/3DBall_timers.json"
with open(timer_file) as f:
timer_data = json.load(f)
gauges = timer_data.get("gauges", {})
rewards = gauges.get("Override_3DBall.CumulativeReward", {})
max_reward = rewards.get("max")
if max_reward is None:
print(
"Unable to find rewards in timer file. This usually indicates a problem with Barracuda or inference."
)
return False
# We could check that the rewards are over a threshold, but since we train for so short a time,
# the values could be highly variable. So don't do it for now.
return True

439
Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.onnx


pytorch1.7:��
Q
vector_observation34Slice_0"Slice*
axes@�*
ends@�*
starts@�
Q
vector_observation35Slice_1"Slice*
axes@�*
ends@-�*
starts@�
G
34
1network_body.processors.0.normalizer.running_mean36Sub_2"Sub

36
7640Div_3"Div
4
4041Clip_4"Clip*
max�@�*
min���
G
35
1network_body.processors.1.normalizer.running_mean42Sub_5"Sub

42
7946Div_6"Div
4
4647Clip_7"Clip*
max�@�*
min���
+
41
4748Concat_8"Concat*
axis�
�
48
/network_body.linear_encoder.seq_layers.0.weight
-network_body.linear_encoder.seq_layers.0.bias49Gemm_9"Gemm*
alpha�?�*
beta�?�*
transB�

4950
Sigmoid_10"Sigmoid

49
5051Mul_11"Mul
�
51
/network_body.linear_encoder.seq_layers.2.weight
-network_body.linear_encoder.seq_layers.2.bias52Gemm_12"Gemm*
alpha�?�*
beta�?�*
transB�

5253
Sigmoid_13"Sigmoid

52
5354Mul_14"Mul
�
54
/action_model._continuous_distribution.mu.weight
-action_model._continuous_distribution.mu.bias55Gemm_15"Gemm*
alpha�?�*
beta�?�*
transB�
156 Constant_16"Constant*
value*J�

55
5657Mul_17"Mul
F
57
/action_model._continuous_distribution.log_sigma58Add_18"Add

5859Exp_19"Exp
=
5560RandomNormalLike_20"RandomNormalLike*
dtype�

60
5961Mul_21"Mul

55
6162Add_22"Add
=
5563RandomNormalLike_23"RandomNormalLike*
dtype�

63
5964Mul_24"Mul

55
6465Add_25"Add
5
6266Clip_26"Clip*
max@@�*
min@��
)
66
80continuous_actionsDiv_27"Div
5
6570Clip_28"Clip*
max@@�*
min@��

70
81actionDiv_29"Divtorch-jit-export*vB76Jl�[!?0rp?F�$?�%?�c?̒(?�*?�`T?f-?Y�0?V7D?� 2?�7?�22?��7?�I>?L�?�)>?UqF?.?�fE?��O?�[�>G�M?��Y?I��>e�V?*RB79JH�X=�Z=$�\=�1^=l`a=Ijb=��e=0�f=��j=�6k=�So=��o=�t=�t=��x=rx=��}=~�|=* B80J@@* B81J@@*AB/action_model._continuous_distribution.log_sigmaJ�����*=B-action_model._continuous_distribution.mu.biasJ�H���)N�*��B/action_model._continuous_distribution.mu.weightJ����:C���A[��P(<{�<�&��H�<T�<�W}<�F��> =���=nbf=�d���P�|L<��"���=����.=��=�B]�Ju��`��<�=/}|�N���bk(=�����%��舄���<JՐ<Ͻ�5�h%
<C��>Y=�C�=��;����<z��=��2=m�)����<�U<��c>^S=㓽q��<���S#�<��=�7��m���k�/�c=�֖=��|=���<��P��x$�を�?=<� h�J�<_˺85ٙ���H����U��=z^�;U�b=֟�<=�����;�.=���=�ڐ<���=�e���`���f��n�h<��J;x'���������L̤=��>�v=�3Z�"F��@!>q:D�'=�� ���ǀ��0=�$��;��j�M< O}<33���*0<��C���1���K������<�� �e'�:���;�*�=����"������I�<�`Ƽ�e=��T�Am����=W��=3��=�Կ<Z������=�E�=ْ���|=[��=L���`�*�Bs�=���Ÿ/=��.���)='��;r˟;Qa~����BW��e�<���<B[�<� ݼ��E��û��<�3�=᎞=� ڼ��w<�[���)O=�4�<3��<ψ�������(���8;�t��4�����T<��=U�P=�<K�}ꃽ�C潿q���n=��_=Z~{�nԯ��;U�+k��ɻ�^�>z�=ɘ�=�c4�t�=�z�<��<vB=�@�=�᜽�ݘ=jw�=f �V��<���<��%��X9�%�=k�����~n<p^��'s��� �=��<�%0=
�=>�c=���=F𼡼���ἔ��=1A;���ĽJ���>� ���B�=�_ּ��]�A��<�߽2sE��C�=���;��?��޹)�=���=���yz�<`׼����57<�u�=��=�\=��=�Zw:� =��s���E=��:�E��Ю��k2 �C��+�>�g�>����;�Z�<���o������<k�=*Baction_output_shapeJ@**Bcontinuous_action_output_shapeJ@*!Bis_continuous_controlJ�?*B memory_sizeJ*��B-network_body.linear_encoder.seq_layers.0.biasJ�f�=D{��|=����h��=�;��=,#<,5e�]�Q�[���@��;N�:�J��2����͝<9���*+�<i?=CԺ7��<��6<U�c;���; ��<mN��K]v=i�M<�ms�V�5����<��;=���<���28=���<��;1�<��v=ﴼ�*�;M�=T~=q��=z�g9��=���� T�<�����"����"�@&%;~rI< 0���<4�W<�s�;C�f�y9U� �=�ې=�F��Ã�<�@L=Zc��܌%<pN�=�Y;Eὣ�^=�œ:d�m;��M���������'X=gaC�-����#s=��<<�����O�J|<�}�;ѸV���;3r��^r���;�{g<��=5��<Z���=v=:�}�s��<�g'��R��GB�����e��<�����[2=��Ž+剻�:=�V="kp=�TP<��<=��¼�Jc<�+�<��<e��<M���Gi���{=����~��<�4�<���#5v�vC��9��*ͽ�HȽ*���-B/network_body.linear_encoder.seq_layers.0.weightJ���@U=6\��쯀=��";��U>�gu=�<.�� #=M6ýo�ۻ����a��>� �<�KM�_U#���ٽ��>�g/�/�m��~<��1����F��d��Z0���F��:�> �8�V����tg߻�����>���'�<�C+>Bu��y<���=D%>1�Խ�8=��u<yg1=� �>Mke�s%�=l�����V��<� �Xa����"�5�V������6=/3</P��1�>�m����x�8�)���=!x��k�>'m>�S(���A���6��g�=U�<Ը�=���>Lmuk@��d0=J �i#]��E���0��?���ߎ=m4 >��7=2᯽��=��*��1��Gr�����"��=G���M{w����<)�-��H�;�i�_ >�.=��=��ý�
>���^#>� ���>��=�Y���n�>4j���0>B!��5S��B�n�+�>���"���q�>���=�(��D#�=3½�@�=: �[�<��}<^�>�_�X����<ۅ�>�1�� s> �=��=�P�<gP��o;xZI=A�<�g->
;~=�����=�H�<�ed>E4M=�����<���A��U=t>&�� �B��Un=q��$>�">��ܽh�ý~:Q�X�-���_>��=��@��%D>�\ټ7d�=���=��$���=�tb=Y��=�7>a��{/ >��׼�BB�\J�<���p��=��=�5Ƽ�q�>0����$�>`��2���5S�VN�=fE�>1����>�U�<&��)���+G!�_|�;�}ν�\=;�8��o/>|6�>.ʓ�u'�>�X>wS����>T��=ęa=+R�>?Z�;�O���`�=���=�I���E�j=���KX>E��+�0>O�����R����
>h�={V(>:�Q>�Up>�l>����,˽e��=�C$�a(���Q>Ҙ>�$������W����x>wR<��'>W1���=y��<n�>�* >����9ئ�<=����R�>�&_;�$JA�f�s=1��=T���wٽ�c�=T��=o�;�Z> �">P����>;e� �>p;ͼV���A��%>�]�9>������?3�=�x����<pt\���;7��=�9���+�=��9� �P�t�Pr�=����{�>�^�����=%xE>pg>W߁����>�#>oG=�z>��>�4Q>,��=�y*>��%�
��=�F&=�\�=+g=��B=�� ����=����1�I>���=�AP�?�޽�F�>��=rU¾EU�<�@���9>;���'�'��=1���kD?��r�O�C��;#��h��/> D輔%�<gT2=�Q�=��z>��=�p�d�>>��V>�tٽ4���q�=q��=>!>?�����M=��>�� ��j�;”�ymݽ-�s��f>�X�=�c8=�=�>�����~@>Ȋ�>"�>����Y�$>$'���%>�E�ZY(=�X]=z�>=uv���D=>�Y�idI>���!Ӡ;���=d�m=�ſ��y� hU�M��H��=8f>X�E=V�#�8��=�ý}�>&aV>x��<�dM�d΀>��9>�]��*Ľ\�>�O/>5�<<�r�>,/�2V,� ˜�x%�=�J�=#�:�G���Y�=�|<�B����Y�W���6=Jy�>��=�p��RJ<�A��g�ּ�?何�Q�wR>X0#��P�����=�_Ҿ�4G�qf>Q\��� =2
B>��cF=2�k>��<��>��>�m���͏>h�o>i�P��A�=gU�=]�=��Z=1P"=$u>X�^=7��<�t��=�<8$>���>Ÿ���U�:�W�=+I(����0n�>\��T� �!>�ǽ�����#�<�Mq<�Ҭ��� ���T�n������=\!�<C�X=kq�b�� �w�:)�ez��� ���{��0>�=\>�>A��=�4�yv��(�=�*���<��g�=d��=\L��಼S�0>��<"��>���=F�-�*s�����>^h;>���>�Z�X�!=0(G=��=��>&�?;&o�=:Nj>$�<�����BD�1���a�</=]~��)H�=�����^> �1��h=z�<=� -�Fۘ�WĨ<�ߊ��&Y>�7��c3��V=��L<�ł<5�%�%q�<;��=��>͕޽ӆ]>I�O�eI�+Ap=���
-�=4�׽;����,R>U�y=��g='�>C����=L�+<��\�sL >|��=�}>��T>�u����O�#>��g&�J">L>ߪh��O���L]>o�c���l�R=�ꐽ��'�'DM�k=�L��=(X%;�{��e�= C���=!�T>�ܡ�2 $�j^#>ET:>0Žo�>��&>ٯ/�:�k>(�0>2n��9���X�= �6>v�2�
Ӽj�x���=�R�= ��=u��=�>�L��h���h�=7�">ƼG�O
0>�&�>�$��0C��%J�>��}��� >��=݇�>���5��������>oQ�����z��;�ݑ��a���b����� >�b�>m�k>��'���=M̽$��<]�e>�2¼��=����c=hKH������!L����>`n4>ͦ= �<lq��뼭�V�ؽ)R\�¢�� >��=�⵾ �þ���{�=�>�ꪼ���<cw�=
>�$�=B�>��<9(�<2u�<��:>X_o>VK>#׽����
>zߠ�Mm>��`>3J�9���j=ECV>�%F�e�*��T�����=U�e=ԃ�=[G=���]�=Y�ɽ@�`>�L">3�><���= �>\&�>�-I>Rפ=��>p� ��� =�zS>���<��G=���<8::=���OF�������4>����iZ����<��̽��"> �=(�&��-T>��B=��>�W�=�#�=e�d��Y��D3�=:��<�*B��L�����>���=u��=�����3�a��=����,
�%E0�1��<�����b�>>���i���\��pgX����� ��<w^����=��>s3��x��w�c<�&�3�>����@Y�<�(Y=ewu����=8��=iE>��X>���jIM����,��f;�ȭ=���=� 9> a��ѷ>�ӽn =�����94>;��=V�>} �>.�ż���=��=��<|��=�����n(��|�����S�w>L��=�WQ�EV>?pK<��&�r�=�S(�2P=�D�;��<��ܼ��3=�X�� NǼ��>_�/�6�->�_q�C��=�Wq�N�%=��Ɠ���UU��=������o�>�����D�<B��B)����D�{�ھ���dCνpl�on����KX۽�m�=5�c>2ս%P�=�޽=!�h�UM��M�=��,>�l*��ѻ���>��ݼh���\�=������</����]>�τ���0=��=��1>�Ԃ�������>Dk�=�x*�+�
=��q��nK=嘽P��=�����,���׽=���t��|��=���1� >���=� �p��w� >gj�=#q�>)�>Sv=#��=�K�=vY>ѭU�5]�=? &>�nѽ5����G>��2=���=�F>�Q��^5�/ߗ�!��<�p�����$6�����N�(�j⧼BC��&�>7ژ=U&����=VE�l��=�r��.X��G��v�=�����=¾�=��
�*S�����=��;�&!>�2����=ʘ�< �e�{���_;>`��<b��>�H����<��=G�B=�b=g�-=��<�!=�;��Q�O=>*�>����:pE�V�=�Z��A�X���n>���>
����:<�Ն=~�S��O>`�t>~D=�=�F �*�a>�Gx�j��R'`=6�C���-��V뽠>���O��O9�����ډ����=:PL���2��X>Z��=/�K�;�=�ŽS��pR4>��/�/�p>��1����=�� ���ݼ��S>v��=���wv������F�ȼ�~ҾHVp���>��"�:+d>�X�=����ZJ� �x=�O�>��(�R�F}0>e��;EJ>J��=��0��P���k=�Ȯ>�V��xn>%�������.��=���ɽ"!�=!�,�})�>N�;�/�����4�=�k��|">
t&=N���0�G���ʽ�@��H�����> ,/>�>_<�\�=��>�\G>��Q���S=#Ga���l<��=��=��4>3�̽��R<L~0=�*�>.;ڴ�=�=T%�=�$7�ٱ�>r�=�G>���={��<H�==*��>��>jՓ<��=�n�>�vi�L�=>Z��<(���8Q���-�`��q�E>�di���l<:����}O>A�>�y�=<+_�l0�C�=�弞W�>����G��f��<6!�����jI>��?����>���<��g>>L�:d5i<��ԽuŽ��>Չ��6 =���=��<�8 >s��=]�>�x�NB�>2�w�6kw���L�A�=(���_I=��0�8.d<D*��4>�l'=��=��\>8�g�� ����>N��<G��=d�+A�Z/f>��>D�)�ВD��kռ�t�<$��<)[=��>sf��w*>�� >ZJ �Pr >=�3�Po7��Ɓ=��^>m"�=������E�ʼ'>5�_=�B$> � =�����O��Ate=%���5���(�=f�@>�,S�ڿ�>�?�=s�A=���n��\�i=�m%�b�>��O��<���d����=j����v4>��=cy�@=�#1>^s����=��>��=���>�����a=>�!>x�#��᣽�:Խ������=��>�]�=vF=�s�=��1��7=���>������&��4> i�=��üO�)��s��|��=O$�=d6������W=�!4���,��.K<����9vL=�ν���}"��y:�>�W�;�%��yi>�JҼ��%>���=lԸ��e-��;>�|��G=C�1>������$=�[>p��=$�������7�L"�(B>;����]>2b��or=LR���m����C>>�-��3:>:�c�<�=�|�=�A�t�
>y�C>m
u=VԷ>U%C�������ý��"����<�{�< ����65=���=�k�������7> -��پ��(mܾ�⤾i&�ʸ����=�~��S{�&�L=�B��k�绦��=L���W>cڽ���� ~x>��A=�jV���Gt�=y�Q9C�4�| <;�< T׽;�m=�~$>��>j��=RL��MO�> ����L�b>=���r��|�����=�qs�pB�>� ]���q�~������=�Q�=�%�<E%��w}h����e� ��� ��=�o��� =�y �����}��<ޅc�ʧ�=���D4<lz�<V�־� �����=�� �=f�g>��3=�c`����j�=ʼn[��Ζ�g�����=��=��j��1 >�3>H��=��u>��$=��N=�V0>�̼�!��ȴ�>sT�!�=���>�����P �Mm�< C��v>&�=�(�=E��>���->�
�����$��� �qP�)��<xZ�����=D���n+��;�>���:g6��,k�)ڽ]-L���&���I>��7��1ȼ����W�=S��=W:>G)ƾ}���+J>�`,>� >>���>��K>c���e^��Ђ�>�0>��
��&�D���&=o��>�ޱ�ӁE����� J�(Mt=(V>M!>|>!���q �<��q=!��=7r�;ZX���:ܽ��>;Z�=g��=y������ ��Yѽ�7!�X�3>ģs=��>=}>�>D�����h>�n����.�!�q��2��Y��=I="f;��W�QO�=�vy�A� =��I����>N�>u�=e���M{>��>�/����=%"&>-�/=���hA8����=��<��g���Y�5N)=Ǝ���Qٽ�@\=�J��H�/�b1���=��O�s�Y�㾑Tv���;��΍�������m>�p�=�����΍��<g�Y����t�=���=����T1������'1����y彪/ٽa�>���<7��!�>�T�<���%��=-ƍ�5�G��+G�qb��$Z+=����n�o�$����=?�?�C{/�^ �s !>�4��̘0=c�=�.q�F���߷�=�m���]>b��vӤ=�=HM�=0�=�R>���=�Nҽ��>���� {�m�i�r������2����� MϽxf ��[>� ��g�=�Y]����<(ؼ~�%>2��=���<u���ֽ�?=��P>)�=�k{>��>?���:�=X�#�$��=�� �Օ!�ʖ����=��[>��ٽJ����=�H�=z�@>�~����=��M��Oܽ�\�����<���)��= ^�����q�a;��ԺC�>��:�à~�^�<�>&ގ=2j@��vK>��>�}~>�R��+0=�9��k�[��`�:�Ȍ>ܘ�;������G���=��>2��#��=n� >��=��/>���=1$=#�$>W��=��U>QeT> '_� 9=~6�7�}>�0�CzG>E~H>���<�����&g>X{>�M���<R>DA�{J�`��o����Gj= ">�|:=F�i>��>+��=;�N<ƽ+>�8�jr!�!ɝ=��K�L��
��=���>Z>�/=~�>>ݕI>�&A�V����e�-$�=� �=J>e\F���a�%��==7��x��=���<��z�ѷ�=� ���#��}
:�n��'p�<Pu>[���3�����>��<s��Ģ�>�<�ו=1�Z>sp=���:M@ҽ=�!> ��Z���H[6>�x�>-r�=�^=a���ieb��֝����Xɦ=�#u��o(>��K>l�.>��w>�R�=��(>M����K&>���c�Ļ2/�;�� �(v�>:T�>a?�=��>ҷ�=���>�·<�E��4�>r7"��A�R�=Р�</.���w|>���<1��IϽ���=ܩ�=��5�!
���=|��=�0�= ����
��B�ü!Q��W�ӽ<wL>!Ҭ�Ƣ�>�̝��P�=j�*���c��Dz�'ჾ ��g/��v� + �X"��ݼh+ ���=E��=�r(�I�[=/��x�m�GY�=���< >r����=���e�=��y�(3�>��>���>�ͼ�B�����B����o.�B��=�T��e�2=.��=�"�=A���$>Q�O>�G���>��J=Y�g;��Լ��C>;Q>кR=�%>�G�=��"�=.���k��>q���� Q���<=�� �f��d�н���<�P{�Ț�=��'9�+������B�i�����#�=6�= J�=/���#wk�1��8�=>��x�ܔ0:�]*�M��=K�x��,M>�K=�I=~�<>�#9�S,����>��=-ڸ�$��>-ŵ���=x��=��/>]�,9�y���IJ�\S�;HǼo�ǽ�3�>�I�_l�>?��>y���=6��Ԗ�8|���$��u�=u����敾�{>�� =®n�-s >�T>�`�=N5��#������=Nl�=���='��r �=|-�>&R��nj��K�;+6(>�?���=#C>�t>r �>���=c� ��hM�(A�=ˬl>{+�=/p�=���>y��>��N>4x<��章�vF[�j�>���<�d��B�>>�Q�_0>��s�h�����t��§9<\�=%J?�,���y5��/=�`�=���恙>��>�Ę=�w���t�=�oV>R��v쓽��H��Q�������+�=� ��z7N>��>�򗾨e>��u>�K������]�����=a�
>$T'>Z`�>����f�e=I�p='Tf��@e�N6�=�� ���Z>*u
>����e�����;��~ۍ�!{>sb�;��>��=�o>�@L>�Ý=j�=l�>vR�9N>4��>$нx^�س>->[�(=;8?=��&=�-�=�Q� ��,�4=Ǵ>,�>.
����ཎ]&��0U=��=�� >3N>�=<75�=��5�R��=`��=�6t�쩲=Z�=�&�<&֏>��1>��>�B��<><>�AD>Y�"�L����⼽Q���jwx�E,��j��PD��̜�y��>^�νt*�=�T�>:��=�=�?>��=�����m=B|��G��<q� >�&�)���ѳ5���,>�-�=N�>�*����� �����="[>��7��H>�=�A�=1�.��� >_�>>v�=_�O>P=�>B�>X<��3��c�������I����c=0����Y�>�r�;�Z�=A��� >��W=d��>�8�&��`uW=�!�=�>�.q=u���>�0��Q1>z��|׽��#>X.!� ��덽J���q���o| >:�]�(ٽ�wm���\�j;;9{{<%r=e��=K�T>�x�=(��>`��= >ie��K�=�W���KO�5�=�H\�:sd�)�~B���>|<g�=J�B=�U���S�>ػ��L����ჾ �>k�U�c�W�gͽ.'>vp�=GfO�<N�>󞥼�/��S�> 61�mƖ�0�������, �x��������i��E���l>���<�Z>q,���<�������=
_ >j�f>~ ��m���@�=���=��>uJ$�b��=1?7�/20���O���.>�-��*���mJ�=�_�=k�s=R��>iF!�]�½��:��(=TH�<(��<`oz�1ȍ>�d[=]Ɣ��l>��Z?->��!=�:q�j�F��
5�n~4��B��h�>4��� ��=`y���V
> �e���D>֐<��+<��?>����Zw<ϝ�=V�gw���p�Q�����=����7��{����]�����>�Z����=� I�`�.>=�:�w-'>�L �$"�>�}��i�u���>���ν#�~���ͽwQ0<HU`�`QX=�^=[]��������������D>ǁǾ̷��,�>���ThL�Ӳq=T������p}*�tiʽ"��="��<��n���h^<�/�>'�������‰���*>���>
�r�_� �P`��Tx�N��YN�|�?��<G =}�&>@��>�ڃ=�eW>��1�N��5I+> 7���N��NP�r����n�;�k�<&�=Z3�<.����0�=+������ �w��B��G���\�Y��=1�k�����7=�>\������>KB1��{=���H�=U7>%�O����=<�]=Dt��xݬ�g�+=��k>d�>���嘕>xy�=�?����=qW;<7�.��U7��Pν�p��5jའ�,>5�t>�.��I�>&�=��Z���d��Jܼ��+���>b����>�]����N��>A�*�2�f�˶=�bw�§����S�>��>8}x���P>ɥ,�&�*�g������=��>>K*��;�P�T�>�[����=����_��=l�ҽ
܊=����ソx�>�y =��O��S:F=�]�=8�Ǽ�s^<�়��a���==���˽��1���c=��ٻ�n��#���0���|=��l���*>=d3>��>>v
>L7/���L���*����=�=0o=c:��x�����`�{��=�?�=��3>�>>Lr�=���=�[$��6��r�����B�a>_�=�����y���g�*$K=p�E����<)�&�&>�5f>H��>2@>��R�g�=�+�>�!>X/��q5>�I�<�����׻1>��ʽ���m�=IKI>�/��J��=2�H:��=,H���>��.�vI��&��;�:��G0=+z�hL��1�ǽitɽ�2t�j�6=]��=���>�O\=�<7��i)>jv���+�<�_�<�c����=N�����=&��>���<�wo>� E>e��>_�^>�ub<V�Z<�F���z��^>1��<�ݮ���5>Ӏ�=�H��
D>#"v�l�=fj8�z*�����缡=Ж:e2<��B�Ǯ�=�h=1-e�>��>�%h>�w�?Ts�ɉl==�=D;�=�l�=cY���Y��ҝ=U�o����eh>���>��ۼ�'�;0
>P� �$�=�^���|<��?>����>�1o= ��<���=]}>�ܥ0=J���=P�1=���dB<L�½��c=��ܽ���%�9�4L�>�ǟ�R��=6 �=��t��#��q=>�2 ��*�a��m���j(=�`�=.0=F�>jm�<���>*Y����v>�'�lz�=-���p�=<��о��=���� �I>MѾ'v�Y�.�4�T�i�~-�ƶμ��+�@^�=9[�=d^`=c��"O&��=��%�P�+�$߽?4=:� =��޽��$���L���=�M����<��0<�)z=y���{=�_\>��ν.��������y>%Mѽd ��� 8>L
���q��ᆃ>�!>a���M�'���2=��e>��<
3�K����� =��>��0������\��e�I�n����~�=+O���Җ=4�:��� >��8���QzL>���<B�9��j�>��p=7��=#�)>U
'��;>U�����<�+F=�Դ=�w����M>d)=��=�J���*ռ��>���=)�=.$>���>��b>o��
����|=Y�=�?�=w�c=0�Ľy�J=�_"�>��=�
<�*ѽ��$>s�9>�K��0�B[6��"&>��=��'>Gnƾ(�<ٺ������x�O���=tފ>٪K�Z7��h�e>�u<��mA�����肾�<>���;Vdƽ��j>��h��L�>�@X>>x�]���u�{q�='浽�0�����<���=ݝ��
�$�%>]���}����������=[��=�:h�ij�;�ˡ=� i�5��>��Z=�G}>��>=��a=,Xu�R��=�< =K�I�N-�>��ƽ��<������(��L�="O
=�+�¼2Y6�̶����<Pž��˾a�ս=��=ؾ(�=�"��4b>d�غؔ�=��[�d�,���Q=�5">r���ULA>IA˽���>޺=`�+>���$5>M�&�"�S=��>k����F�=']��&[��1W>>^/� Q�=���9{�=n>�T��[�hSS>�Nm>n)W���>��f��8����� �|$R>GJ�<H5k��.v��n=��*>�:u�KV�=�,���s�|@V<�6M=b��ܸ=- >�\轃�輹>#>"���{E=t�:��T����5U����=�b�s�u���S>s��<�0:���=���=�t >��=�э=�>��p�Q���ҡ!>X������Q�X�Y���i�"}>��Ž,���>�� �
�U=�X���۽)�=o���f�=�s�8D >t�}==A���Ͻ?,��w���3<Ao[>B���u�ʽ?�>��x�I�����P�|k>+sw�}�Z����=�d���.���>�@!�Huw���e���V>J�h�Q����2=�V�����H��=�Y;��l���-��c�> �=
zy���=���<�>��u�0#����=H ��#���
�%�ڽ�w�=(�����<�>� (=��l$ >����f�=��@>�^=r��=A�&=6
>+M�=�(<J<��S��u|=Wex>sd=<I��^�9����iU=X�O>+N[�.�(�=m꼱��E��B��=�R�����DbZ<<��:�/�=�_�_�]=�F>�&�̋o>�y��p|�=;z�=�%�=�p�>���>Q�/�*��=��`>v��b S�)�f=5N�=6+�!����<��;>��1���=�٩=-�I�� y>�\�=�����=�>lx���F�=c齒�ӽ�[f>�t�+�>S�����ļ�ן=Y�1��iʻ���>|[�>���^H��%�=VŠ���0>7�%�
;���H�^�f�*\��*
>�{��s0>a謼�|�ܱJ>����(���E>���=C� ��T�=/~�=�fg��jo�k�>��,�R'���=�9�<7������=g*#>{7���x�V)����+�=�D�=ѣ�<Z >p��=�PW>5F6�p؇>&>�>V�� n>��=>�Hr=�vo�� ����>c��}�>?'����>���=��z����>v�p�W�����>�)�=�p)>��+��_ �(t���)9=�0>I�����>b޽rwS�"h��=c�<�����/>��=Cm<�kV�Hp)�[�N�Vd�m>�FF� ��={��>��p>��J>��>�{���6�Ih8�$��>J3��tv�������=��ϼb�d�̋S>�ˑ�#Q>5�W>�"�<�U>���>Sؿ>�W->Pn ������>��=ʎ��`aF�Å=�Ч��\<륞=�� >9��{h�殔<���ћ\��<=�>��<� �=R嘾9�K��{ �м��L� ������Ѣ: �~>U>=�4~>���=_��=t
y<!С;R���`�ۻ�jh>B�=S�53칂�w>Hx۽\pm��~W>\9���&1>c�:�����ٯ���־����[��Zb>���Bv+����Q�)>d�H=e}=���=N!��J�>�3�>�fB��s��7P�6�9�~+%�� ��O
�=��>��=��>��=}c�OM�Cl==�!=�{�=ydX<��I��s=SƖ=���=����+�cS�=��P>l��>"UY<<!��Ro���=:X���A>6�=!�>��#ڽy��=Eڍ�c
>iP����{�=�F>�/�=;���H�� s1��x�������>�<��,r���>�Gb�y��W�A>�D+>�d��!0��͸=��O=�ո<v�!>�p���� ��jW>�I~=�:�ı�H:�>�����1���(;!�=�(y=���=x���_�E�P�=v �[�>�5>_�ɼo ��e��<=��=`D=�+��\R>�c�=ɋ���3l=�&>�+*��_$��I�<"b��8vQ��*��T���/���8P=��>?�����2�>�8�����<�O�n��=��4>�����S<�J��ڬw�]SL��O>��߼���9�S=�"=��>�w~>�+���佮zC>3��<�3 ��wD��#f=��(�;�0�t-�������<> 
=� �����`v�n ӻ�$�=�k罻�4�'����ӾlY8��ք��eI>@�����-1��K���[��v�m=G.��r���Z���ѩ>�������#&=�!���-��1�=�۽�A�<V�X<F��\9=�����>���Uo�:�4��`�=P*�=!=B��Hi>z���B;=u0������3T�=]���Q��=:�J>�
W=]M=��>����_�=� 6>�䜾��U��_q>�W>���+��=�� >�29�M׿�^J=`�">��r�8z��ј�4� ��l�={�2<���5;*�Z�޼HuϽ6�N�#>T:��B�G�@��=��|�2���j����1>�t�=�� ��q>\| =ϖ�����=��̽��s�Q$���Xӽ��>�9H�9Xᾲ���Z�>A�>��h�3�=��(=0h(>/�d�_�=����@��Ս�UQ=&Լn/�<���=� �=oC���'�=�>B=P�J��=yUȽ�\k��ݛ>�}��5X>���>��.>�������=�[�c[>��G�<Gh���h>�ڒ�Y܈>jzi�؋��"�K��q�>�f����O<cjq>YD>�5i�_ܽfX ���W�z���Ȏ�<�O��AX�� b�{���<��=p>r>D-��v��<����d�Ľ_�>��.>�h<�������7r��ޜ���н,TT����1n�<��=d�<���=�U�v�ؽ
�>>���<�p0>ã�>��C=j �)p�<�?>��=�^�==�&>�O3=����@����8>B%�>M����:ǽ`���83>�O>�WV=�+�<gq��yi���C�4 k=�d��_b��f<�=���Fv�z?�=Ϳ=$j��ʾ�=�30�l�=�? ���<j k=�c���h�Iq���[�ZoC��3�\r
�+h�B�;�yQ=��f��޿�&���#�R�=}� >�ri<d���=�k/�s�� o>p0!���7=��̲�>=W$��ϑ����>etI=<��=�����2�=���LA>W��=����$��bR���P>�|L��|������eX=���=�ў�Izս��@��%>=u��=�<�ǟ��Ԯ>��&�����Ս=��%�=Ai=+�-��p��}��5����b��bY��m�<ġZ��N��}�>`,->0�*>�ދ=�����ڼ��h>�Oн����U`;�V =h��==���h~���>�� >�}>T�OE=7~ ���=�� � ʝ�W_�d =OwG� k��ױ��^x=A4/�'.��נ<���<��#>(�=� =�?ڽ�>񯎽�=��=�c�=5]'>���L��fq�=I�����=?*=�����_>�)�>�z�>0\���<s��=Y|=�:vc>��>������=�3>,2/=��4�58���p�=��=���4G=�1�=�s ��=��Zm�����>+�\<zD����=�'��[��Вz�QȪ=�I(�B�缌� >B2ֻ�˽���<�0�-���C����r���=�\��=���D,J>����S�>��m>jj^���\=H�=;>E<6��,�:>���� =���;�R�����=�-k> k��iK�񾩽@^��V�k9v�"=8sI�l�@=����p��M���
�{>6!>�ɐ�(A��@t� Ti>�E�Z���X7�ś=����S�#m�=��:�e�>G�
�)����OT>JT<�þA��s"=ґ� 'E>�=ad�=P��;�x�=�����?%����=Q��<�ׂ>��,>��v�GJ���'C>��=
ɽ=�xս�(��]]>8�j�;�#�� ,>���>�"�;�qH>��&�8o�;{J�>s9�=��+>L������B����)���ؽN��e�y��<8?��t��8��;-�缛L6=�]?=�z�=��f={��=�a��b@>?������:5����k���׽p�+�= ���t�=J���������=H��=^ ��%�<S��=$$J>V<��?r>�|���B���F���g�=���-�,�����i������m��=F&�<���2c<
���{:>6E����= iZ=ra���=C6�=R�A>ڵ0���ͼ�5H<W��>��>(!���νt<XsQ<� N>��>��<���� ��X�q<s��R��:�ؾ��Ǽd��>Ӎ�<dJ\���'=�p7=���L?��j���;��V��"˽�ҭ<����P�=�꾻�9>i{=|�Ѽ��޽�"I��{˽����7�=��>�ܽ��>���<�8�=��� ���CF<DM���l_���"�7ﰼ\���?�`>c�*>H0�Po>>ê<�$5�
�)����+�S���M�j>~X>>M��7�=��}��(��o���0�-�OW�<��� S�='{*>8N���T�2�=ҷ����=�c�=.��>oK>��j>>�<���}=���<�꾽n?��tz�>z>��>t��r��=9魽���=�ѣ=Yl����� ?�:ܜ_=�k���W���u�#�8����<��������^=�!���w �[��<�P=p9ĽX-�NO,>b�&>���>R���>��:>AX�>��������Mg���g=5lŽ�=,�1>��=]k��e/��ІT=���=�á=��<��->���=F&>���{6��ё;b����K�<�T?��>x��@����罧� ���o<s"I��Mɼ�%�=����ʼnL>4��>R��<��Ľ˽U �<2�:��~��)��^� ��3�>�f>�B� �x=�Y�̘T�p�>mr��)B=%;���1N�I?#>�4d=����t{�}��vO�>1.���B��L�>%\
��Ȼοy=�����Ϋ�'��=A�'����=V�3>9�=�ǃ>�� >�n>�$;�܇��;ܽ2=.��=T��=7d>���o�=q6��Ȭ���t�٩��2�>+]�꣯=�0)�x�.�;M�X>>yy���b5=�V8��[ <�o�������=��,H=��+> ޠ>�]}=;��=�"�=�.>��n=�T�=m��=0I >� �>���=��w��.>�.>=���M��=SÁ<C�>l�ʽ����=���P��.߾y��>2�����ϻ�����ǽQW����8���J�&�׽X>����^�X��n�����=�=��X>��>�7;m>
�o>rR<5Q>\U>�l"����= � >��W=�DT��.������Y���p�=�"�=yA��N>)&>u�ҽ���>�5��{罡�"���L���=�i(�xS=~���-��= ��=�59>q� ;G����*�d�=��úi�>�E���X����������g=��N+� ۽3��=t�ƽN�����=�e��.h>���=A3��&�<�G>cWj������=>2e�=o�~�� >ƻ�=Ο>��.�s=�9���g��<|��=Vj=�<�q�����s�礙�M[�=#9>a�m>��v> ���1>p?彫��;�|Ⱦ&t�>��>�����=�'a>�z��#6��ث=�����
>�Q���-I=�<%U�>�H�<#�->�=>`���l�>�i�>"��=��O=����w��]�<� m� C�=�b�=2�=�aM=��-���>p�W��G�� �>��.>M�̺i#/;���<?Z>4�=�6�<��a:���=�g�">�R >M&��J�߼��=o�=��=F��</�#>R�ݽ�r��M
��9�^=h�)=���2$�X�z>� A='e>����?�=�y<\���#� ��%�>�j�<�ü?��;J��<�J=�ν�v=��k=ˌ-�W�&<�r���R�� �_>��x=ɤL>���=ƴ4>�����8>m
���7>!��sc=S)ؽ����a�H=��aE>�⬽�G<���Ƚ7��=�&V>�9= ֽ�0��>��j==�d��z�T��=ɣz9w�=b�v>��X=��N��+>��'>�'>*�����@��ҡ�K�������P���z M>�-;>�7�� 7^��Ā=�bV>��P��ݘ�����'`b>�dT�i��=_�,>�,;��D=Uz
�ى�=��>���<�9�=������<,��=����H�?>R���q->G�p�qH+=�h���d�����;+������Z�c>�����ڼE@�� '1��>>8t��\"���7l�!$�z<�>�G�B>w=��#��7�=���=CB�~�����=��Ͼ�s�<���VX��;>��8>�LZ�[0�{�r���=B�^>�\>��������ڽG*W�b��=Ux^�[���G��=�>�>�H>m��=LQW>�>N>^B�=T�;��p >���>Op���sz��jX���{��]pӽ�����0<k��>�|���X��e�������5>���.+�m����[7< 5/=�_��q��=V��Do>}̀���������2��6&�<���� �$x�=Aj޽����Wt}=n�a>
��=RV���*>��i>Qk�=�z|�4�@=�I�=�
��}�>V������Hʠ=<ދ=�$>H~;�����{��x�����>�q(>�u��&�ѽ��>feq�fɃ�jʜ���#�����{t�{�m�΂,���9���½�m�>
D <����>l��<���=��^>(� <$�½X�_�u���(��s��۬�>��|={�<Ƹ�P���mk>}rT=W
>��\��e�"����H>���>�%1���R>78�|����=�W����ˤ->^< >��r�Q��=�$=��>�#�=Ր�=&��M� =p���A��$>O�=����{=!��&Ө>Img>������ �%=O/�����=$\�=��<����tQ=��*��y�=�@>�� =��Ñy��噽sÞ��(N=,@u>
=>����Y ����=^ؼ_��x��=�����==x��ؠ�����=2I���v:!4�?]��7>�>^���4^�s��=�������=%�ּ���=+�:>�7Ͻ���>�X��(�>������=��5>��=��H����>�����j>�d/=}x>� �}= !��D> 9(�_Jؽ���=��o�Q���5r�=��0�����v�A��u=g�߽�p>�1��x���E�q�ޡ�<nڙ��\b<�c>��=�ǭ� `�>�ǽWDU=�����=���~��h>?><)��w=z�>jo=�'��m¾��= ���U狽��<����m>��輢�2��eǽ�q@>q/>VM�=��8<փ�;j"6��m4=c��,>��P�!����!>W))����=,�;���!��q����&>Z=9� �t����w=��`��б=�[:>��w�t>��$>����m�R����8p�=��1>6[��.���>��ؼ�0=���>�:��B��tʽ��<�f�� Y�=����3 ����=%�F��_��&�=���=q�>�����{����Њ�=�$���a��Q��<���<����3ݿ��@;>+y;<C����=劾��2��-H=�뛽�߽�fI>K�&<|]y=\�=_>�D>Vw�=�D?���f=���=l�E�ڇ�>��9�G�ν6^�=�(��\����=1�m���<��<A�<���T���=�s��8j�=�rz�����m�<���<��r<��Y=���׽(�нE�(>(g|����=�x��-�>#x�<������<�o�=�>�=��g=U�;��1<�gq=���=�Ac��Ǐ���'=뎻�P/���#>�t�<����!.�>,(�1�S=X��UC����<e���J�����U�J�ߣ>bN�;nR�=��=��]�j����!��¼+<*��ў�<�r`�p��=K���������=�� >;,�%�>���=�z>�LP����+�a=-��=ܻ��?3�=@0=k��=ì��ug�=��׽ �>4��=N6��.�=7�=��c>^-�<3I >4�P> q����U;����0<�)���� z> Y`�;+6>���;�>�wP�.n�>r���+to��d��3 Y���J>��U����?<�=h��=�I�=�=�a8�${}�М=�'V=��=Y��>�=��!<�]j���:�!a�?m��%��47>����� ��>�JU����>1b�>J��=v�+���|���{=%�*�fX�`ҽ�=�>}>�C�>��<� �>�Yw��&�<@]>�'��a#>���=e�?=TF>-��=��>���L��a>�A&������]$<iԚ>�_>����L�����<n�+>�����`��ʍ���ȼ{���C��ϥ����=� $�$�t>��:��< ��=Q����1>��v>�_��F�<|e��g�ug~< ���T<�+>� ��!>Q�伒���v{��ͻ�>�x<+_i�o���8>LM+=����>l>Eӵ�+���B9j>� >i��^Q�<�f�>�B޽@+7=)� �l��=+�E<%�C=6Oj��PY>�t$>�(\�->����>���=J��=�@J>��k=���>�p>�o˽hz%;��Y�+|2>N'>��';�7<:T3�~�����>�̄��:�=1ɽ�=s��=� M��ؑ=�x���y����A>?������h>��^<�J >�r�����=�(�=�H���7��&�6<� >�Zr>�?7>֙N>7J���=�Q>q �I8>�2>��>.�=�Ċ>Q���B��=�j>6l㼿�>�|ʼ�}�=��=�����9> ��V���9�=7YB��(A=�MH�4�3��F�=�»<;���ɖ>Ȃ�=�D���<�o�< ��=?��<ze�=(*�=Z���38>ɂֽ��B� b�<�j�=q�*>�M�bD�<%ۥ�b���.��=�㥽<�=Ŗ��8�#=�Rh��)��E��m�<TS ����zQ=3��:���=���t%g>�*�=d=���Ѯ��@Z>�����)=�|'>��R�/��=�f_�,6(>��S�+��:���>�0>D�V���ԼS�,����=�"��g=>xAz=P�l>lp�����=�I���,���{>�<Bl�=�7>�b�|mW��5E��8>Gs>'wx>��2]>����>8b�=�vY�J。J���X=�~��a8���V#>�i��d_��wR���=���',=ΧA>b������=W��`��=���=%�R����=�����<�>g�L��=�-�=����{�=W�<P����=��<��뼕�����%�3_7>�k<����� ��w��<���b��[ϣ�T�O>̏���SD>'��j��=��>>Éj<&�ؼ�;�M~�N�=;��>��=3V>[)d=Y�=_�
���I��2>�!>�挾Q�T�����mw�<����[�>�!�9����I�=Mh ��J�=��=I�[罛Ǎ�}2@>n�,<��⾺�=@�i<_A4>��v>ǵ���b�kû��Y� ����.=�k>#���?�O:��ž� >Ӽ�<�9��� k�9��v�)���Ͻ�-�<m+�<m4���I��
�=�G���X=��5�9�P>Ӳ�=���=?�>h��=��>a
��t:H�ni9=��ͽ�g�=�)�P {=M�I=&e��Rj�=� >>(�=Eѽ���=�s�=EX����>B�1�7���+J>��#�d�ǽf�,�ʿM=���=��=7�սl����=���=�Q�=�p&>ӔH>\u=�-�>��Ľ-����jH<_��=�_k��� <�e@>g]�<V��=�*>���:�>î=Ͷ�<���-�>�lF���DN>�,�(4�=\Tt=xS9>�=�G%>
>��\��b�����=�Y��M�������*r��Tx=�����
���>�=�/�>o�;R}Ľ�)�>�$���%���b=���=��ټ9(���D|�3"�=�-ý��ٽ�9��\eӽ��<m�A��->�BK������<E[�az=P�>ti�;��)>Zjܽ6p�>�5�=a*�#7c>|�ӽ,ͽ�ҝ��iڽ�@;���=m9�=�E>ӎ��vщ�s�>��=��Ͼ�M>DNK��;�=���=i�=r?�d{��Bљ=(UM=��d��6�>x�g=����Zy >���L���l��</A����>�|>�n3��š>� ��[N#��Ľ��A�̾��$
>�0��9ؽ�r��{�=�s>47"��=��!>A���{�O��=��<z���˯!�-9�)~+<�$�=L� ��U�<?G�@����Z}����;t.�>�=L>��=^�?<�ѽ �*>�+?>�����/@�6�0=߬=�� >��}>|q�����=i�Yt?���?=$xz=����ym����R<�i�=|'�=m^��4�<١�=��o>z/��H2>��\=��=0���j>i���/���ff��v>\�?>C���Eн���<��=�b=l��=��>fӶ���D��=���=j���>9&z��gv����=[v�>� )�[���ɠQ>1`�=������=)��=��<��5>+�ý�">�+���ɸ=>�a�6#>C#>��!�$�r>q�=�l�_a=�ľ��=��K�ʁ��<M�$@K�k���>y�=~�=_.�
+���@�=�`1�2X�=5����瓼�H�=�<O�^�׼�}>�ʏ�.��2��憾w�!>񽈾�6���$M<�u/�Z��=�x��x��mK��\aB>G�>�zC>�� ���>!퟽�ٳ>PA{����= �=~&>�]�;� '>FD���;2�����! ��� ��h��>�=V��"{=8���1��0�9<����D� ���r>�i9>uKɽ'�L>����_�g�$l��Z��<���=�j}>z�=�U>� <�6b>�9=pPD>�!�=j��=�i뽂�j�'���F�0�n�2�%R�=�h'�m�ɼ�څ��,>��ݽ�$>�@z���d�'�>��U��}>*���(_>*�z:H���Ȓ��\���N��S}�:�=�8x�(�(���>��Xؾ���=PJ�>���=��:S�<BN����=��>�di>���<{W�<��(>o.A>T�=� ½zC�|N>���=3�P��]��n��ӕ=Q��=� >�X>t�<C֜=���=Rˤ�`���>�M=p�˽���>K��Z��n��=
J>��ȽF;�<dl �䂣� v�U�޽�`V�Q ���4�>���n̾��=&��By(�_D�;��� p�>��a>
�>�׾$�L�,M�=h�=μ�=9�W�䛹����OQC����;.�;���>�.n�7 �=u ><J�>!�^<���=a㷽�G�����#0�]�=1=��(>��>R<)>���>�$>�>Z,��A{=S�>BUX�
���н��=R�7���W��=�� >��d=������>���<6��vI#�5�S�A�ݽ�(�=�%w=*xJ>�B<�|�=����j7>$^ٽ�4M� �������JfB>I(ɽi��>{��=[Q~;+���/ǖ>�!a���=Sf[��L ��+��l(>cX+>�)$�E��=�ŽR��>am~=3� ��u�>�P^���Y>����ֽ�ݴ=�@�_?>9���A�c��>P��>T'������/����t��0��F��逦=4�W�*��=j�<�%�=R�<$|W�^��J?�>+ʬ��&�;/��=��=� =�'v�(%0>>d�>:|=���� U�O�;i;/��r�=}R��9=)��<o4>�u��bQ>��4=-v�>��>�/4="ϋ�X�ý�+�<�<���= ?K=P���ږF>g���@����e�=�z#>��E�k�G>��9<� �=k��=���W�=Cz�>{��|�.=�9�>��n���c����?�AŖ=��`���=������=��=O���u�<��Ὄi��� �<j�˽�������=��{�AB�>K�(> ��3���2�����n<}F�=G��=�%Ľ�yP�(n�=m>���<�ﰽ;7�=tm�=_a>��Y���m=�, =����.Ȳ=~V�;*��B-network_body.linear_encoder.seq_layers.2.biasJ�8��< ��=I��=ܵ���m�<���<.f�<̢�=�%=��E<���<�:;8/»�><�� =G�e<JT�<��Ѽ|�u��0/�<̶�<uV�F�1����<仡<� �=nd���f==[=� =u����x�!�=C�4<YӼ��J<ÀF;+ �;�`�<��!<���<�'�<,u~=_I���;���=9�<=i�=� <<�� =:g�λ3�<ي�<J�r<Dy ;��纜�º�*����<�X#=2�l=�"D=���<�@��-~~�x]�=��i=~��<��<Ϋ�ϭT�l�<���<�s�<`i]���=LE=l;):�H=��=�:�<�g <b9<�/�<DB=�d=�
�<��\=��<=���<U8=�ų=}D�D��xj�<Z =������=w ?=�<.<�u�?F`=���<?�Ӽ��<�}J=�*k<�����<^�;�9x<��t%=�==� �;!{�=�K�<��< 7F={O�=�]�=M�[<�=�ܣ=+{��(!�*����B/network_body.linear_encoder.seq_layers.2.weightJ��s��>��s<����;�@���8��z[<F�������;�G�=�䆽���<�����9��>�ڽr�u�H����&�Q���>=E�:c½qU5<��� Y ��Tq��>V� Yݽ��>��>�� >@P<>�dh��7�<Q����=օ�<l����=���y�=7䇽�=�53���"�=m��= G��Y�=a�*=(-�?�o>�U,�4I�r��=��b;BY ����;�ҹ<��= g�H}<���=h�����>l�J���,��̀���{<�c���}�����==y�<���=J��=��5<�/�<y� >"�I<���`@,>�b=u��= ���]���T�����=h=�ۼ� k<�F�<3K��i��KM�IN�sۅ<{�D�f��� �.��Ȏ����<�l$>M)˽�׎=N�F�Q�̺�쁽� =l-鼨� ��e�q'�=�*\;�8�=�ͼ�Ȼ=�X=p���8��y4�9�>�|~�{�=+��sDN���R� oغ���<<�=�����������Dv=�����p�1�?�x�e��;/r�=M3
=y��������������=�r�=B�<>�b�+E�=������>�?�\��V{�j|�=C��= ��� s���z�=��x���;��#��Q�E�����mj����=3(M�W�&>���-�>�����%���tS� + �ٳ/=^W�=���=�O<1!�<��=ܼ�=Ll��h3�lN�;� ��Y=�3��<��=A�>���=m!���V�����B�%�ʼ����=(��=��m��=u��<���=�:&�I� =}�<�^��Ľ;�� >�^߽4�2>��λ�
>�ۃ����=ȑG��,�=#����.��p�<��<���YM��ň��/ƃ=����8o���ɽi�0�R`:���<1;J�����_��6�#�7t���p�<n�E=�yʽ.3��?u[<�J+<$C�=�z�=i�=,�=@$�����C ��� =.�<[p7<�z;�.�=!�=�N���'<B����<��=9�Q>b�=�|*��p�=<�<V՞=~��<� ��y��X����4>�Y�=� �=�%�<��/��T�=��TI<�,K�� �=Ǧ_=Pǒ<�,>���� �=��=۩"��(n=�QV=�Z���f��W;s�ٽ�Ǡ=_Nʽ\��=��N���G=���0h�<+�3>��<�7�=��D�C�.�<�_���j���=����N�=�8��"߽)�K���D �� #Ὡ� �h�ۻ�ν�ލ="=��5>�G=��=���!??=�F��Ǽ:� �ޙͽ
�˼+��<L�>�ɝ<;�>��<�� ��0�=Z�<�R=h=;o���=�r�9?p=�����,н����9 ;��$=@м��h�������\ټ����{��<�ԉ;����)��K��2�t��ݔ=9�a<�9���T=��ۼ!�����_�=聁��g��=�V�m��=*�d����<ka=�� �'�
>
��>t'��'�lɕ����=!*��L^���(>���<��=�Y�=fS�<@�ƽX3.=��C9A=�Z�<�|�<G�*�����Ws�"<۞>�����{����;qx�=^��=NX�|�)=nؽ�<�;��<�c7�� �vt4;���<�ބ=$���G��s<xjȽQ�����}=N*>���;��i=G�=_G=�=�h1;�n.� I�;��ν)ϻ�Lq�</�G>͌P=��<���w޼H��ᢼ+��<#�>���=��;2!��?�<�b ��e��[{=�\�<�����>3�N>��="Q >$JM=�{=�Jf��y>~��;� >�F@>����ls�>SB�,��<�7�T�f=���=�Ñ<��9�;=�ϸ=����==��,�=�Fs<&qټ F�)Y��#&E�4b�=�%�=���=�Α����<;�B=gX��uA@=t�=��u$��������=Y�ּ�6ؽ2�4�3�!>LD�<o��=���m+�=�N�@l�<���=Ճ�=Q)%��K>��;;b^�m����3��Ka1��$��<=�G�b����d������o�hǮ��<����ͽ� �=ߔܽk%�(�=,X�=���<�I��E/=-����GZ�(F�<��ɼN��T� �w��t4���=uF�b�>�g��@x=q<!��1�� �=��<ݏ��hl����=m���MX�7u=�I�����A��~~<�O3�E��=L�����=�Ph��mܽ��o=�࿻E�V=�%~=Us;��b��l�w� �2�=�C�=߈��{^f�����:?�8� ��ɤ�.ʘ=���=�j�<=���ÍļEN3=�A>*�=[���֡=3̃���=N�3=ݛ����4� Nd=Ne��\�
�K#�;�c�=-ۊ���0�
B���2�;�C�=l��ҽ�r�=.<�|4>*�#�W���i;��a����O_޽b�=����Q��쀽��}"J<t�|=;��=�k��r�$�)>I7ٽ�i�:���;hK�=c������;��ú �"=�<z��X����=+S�u@=m�=�K>�6�=�����=�8�<ƀ��i=�۽(Y/��;U=ֽ�kP=~ �=�y�m�*���;�A�<�_i=�{�=W-���=��"~_�w=L=��2=��k��� �:%�B� e�<^m�=�M�=H�����=��<̖�<� ���4-=�p���������<�=���<�|
>;�
>�Ď=���=@��=��ѼLG=�'�⽛[i�9��g��=��D=����)6.=(��=xS>����Y�=����+7>`7�����=��Y=L5���������fb��Ÿ �������<��ӽ�_=��f�w���v=?,�=�s���y�</>���<R8�< ��0�m�yo�=�'4�V�=�t�����< �v���!��S=�=:i=y�^��rL=�>���*�=l�~����=��j>v�"=I�ܼ�Jǽ�!����S��j�:�>�=mm};&K>�ƍ=W�q<��A;˼� ��;dKF�P&�b��╽�t=L�O=�C��'��������=���;�X�<�v"=��>�K=[���%��=���=~6>�*�
>{ @=���=WQ<��x�#�a�s9�<0ɑ=wX�<[dk<K�&<ڙ���a�=d��=���=)����;�ꖳ�Ll�� �v*�5��;ڊ�=5�S=��<:7R=J4��$�O����=���=��&<,5I���C=u�=�_�kgT��!�������G=���=)����>���<���Yi�<[� ����ay¼�'��%>j�"=�8h>_̽���=��B>F6�=�6S�s�'��)�<H�4�r�=q*2����<���������\&�S]=���=j#i�/�׽�t/=�����O>�qF=�H �fZ>>L�=�I���j��PX=��b��m=����m��~�.=�<>�1$>�_j=�_���ֽIH�<Sm�=�z=:z�m �N��� =�&�hW�/�=�o =���=<[�=Y#b=���>�= =��<��>�͞=(a��]�Μ�;q�u<������(<(��:�F,�C��r� �h�v=ѠK��I�ļP<D%��X=/�=�^m�{e����=�?��� �=�AT>��<��<�O��S<[���J=�~��Z�i=pCܼ�D��⃽���=��ԽR� = �<=_н��,���f�d��iY<��:ӼF�`<:kg��mʽ�L���j7=] �������n޶����<�����]><���Rk���;�m�ի�=��`=�4����{>���<g����:%�Y�Ͻ�e=��=;-#��� ���<��ڽJ����G����=!�=>C��Ѭ�=q���y��=,t�=��u�j�=𛐽�L�=oH=&)�<�zJ�e�r��W/>�'7�G~�=��4>��������V�G<ԍ�=�F�=��Լ"<<�`�j6J=X�>�4ҼQ雽+�:�����Q��4�+�ꕚ��� <�U{;�l���+�g�=BE<��>�aj=����mp���Ƚ�'»����Ӎ�m#�E� ����=;߻�?K��������7+�<�i
= �����=z��=�|>��� h>{�<�)�<�ܫ�Ֆ!=��>��4�!�I=�u ���M���8�%O�=$��=x+=�q�=��F��c�V=Ō�C�;���=�փ��߽��c�����WQ㽉2��S =]�"���4=�@�����=;^=>�b����=���<V:��ƙ>������![�<�- >������7=����[2=5�=���f�#�Mh>QϽÿ�<��
��w< �ݽ}X�<��>,�`=��=�r>��"����=�G�� P:}Ys�=��=qڻi�1��A�<֚= ַ�PL=6���a��d�=&-�:zܳ;��콇�ͺP�#��V�<�p�=����"�S��'����=��\��u�=Q�ڼن<M=<�[�=0��=��>�"н�>��Ne�=� �ɫ��%�ؽ$鏽\�}��Zi=����%�O>��=ȋ��xv��N[J=-?o��L�=d� �G��"z��p�.�B�!=H4�<��=�mq=WX�2�>��佃1��2=����}��ְ=B�ѽ�l_=)�����_o�)H��N4>�P=��k��]�:4@�Ĭ��.����_<���Q�<3͌�����rEK=�;��D���ZK�<&�=!5��b>� i��1>�>��;�>)�=��սIq:��>��8>�O���>-����4��T��D#�=Ԅ���B���a�<ݠ��_50>^9=�<���^F�1��<7���5ӽe4�>�a�=5U�=�1T�ar�++��#C�=�6�(���7�<�Ո=� ?��Q�v}���ڏ;~wԽ��=��-=�.;=DM�<�d1�(��=x�0��r�=�|q=:����<B`P�n�;=�r>�B�&�9�����������<� u<^�
��Dž=7-V�z������$ U<ޞȽSg�=6��i>��V�xh�<��N>o8>~A>��/�!ќ< 4佲��<��L>O�=uZ�=�h"=���&�x��2P�M1L���5>���&Y=>�t)>K̓�N��=�A�И��?�� (7�=�(h��H�2A���$�:z[=�b&=��ɼ���R.=�ρ�G� >�!�^ڹ��u�Pu���>� �/3�=�F�x�>R�>�F9>��}B<���;�����D:=������|�� �=
E��}�<G��=��I>hw0�����D������=M$;��?�<ķ���*���ٺ�_����=�s�<� �<����I�x��e�[� =��p�ǝ���˽���
����e}=l�=ـ��RR�=��|>�@���FH>f�׽�K�=���=������)�T�<u��<5gP���h>���(:}�SJ}�9D�� o=�t.�M��=�z��31X=��-<A -���L=�ȽÆ�;�h콤�&=���=�p��56>5�f=��M�W�p�t�>���p�<�l&>BN|=
�"����<!��?7��yj��x����?;5c<t�qz����>>�,D�+V�;�Ͻ`Sr=/ڛ����=F��=!� �d�м/�T��K>����' ��><��<���=7g�<@-�=3=�����?=���=꘽���=J�m�m��������}y�A�¼���=xl7��+�=F��=�j�<��]�
�ؽi~����=�>K=�Pd=�[��YU=ډU=D{ٻ��z>���=���=lt�<9Ʌ=C�(�|-[�6eĽAV =Ƨ/��v)�����@�T���=f=B����1 =�J��x`���B�='8�<-�<y����p<���=��a>-��;znU��3�;�$=� �y�?�]�G�9{Ҽ&��Γ8��(���h��c�=4c����#�$ˌ=z_R��=r=�#�� �����ɢ���=ݛ��B� >��m=�m�%�4�[��=$u,�� /�
�-��o�����<�L�=OqN��ݘ=�؝;�|j<T>��{��=�A��F h}<�ǻ��wJ=�Kǽ%�c�y� �U����kýGpH=��>6`�=F�<����U5���T�=9��������Z��d=Զ���j�<��==l�*��=:�J�3��=6_�=X\'���>H� �������;Xɜ=�r3�
���=�����ٮ���@r$>�~G�6'n�3���$`�} �=�V����=�4������{�=����3wx�e����E>�Km��Ϯ���=� ���o;�(��=ѫ>���<�����='yO�����<{�%8�%�����<�^Z�I�=�l�=Ϻ>��m<�L=B>x�� E<֣o=Jb�=3�=����|�<�]�=|���`A=����W]罵�=†����0�Ё�D����hc=3�;��E��x������>�/����=� >U��=��뽪p�=���m=��~��K�=ɞ�������a�`uR� 2>Y$:���=�O=Z��;��3=�[��OW�=_(>���<� v���=������N=2����=��+= 䡽l;>�
=$�=�<�I=��̽ܪ��ؚ�|��<�Q=+��<�� �����}�E�0<���l�=r�G=a��жݽ)��=�Bp��u�����[m��=���=2(s=W3h��7>(Y<�"���Ľ
aR>B?`=2�,���T��1/�?��=ډ�=Q2�=Z��=l�����>����G��=[�<ę*=�՗=ږ�=.�~���k;�{��Wkd=9\����5>��<<g�=�/=uD��h��-f���C�M�<4��<�������<bh�<>�(=��.��=_�>�S =y��=�����Ľ
���x4>��ڼd��h"��8=�!x=C�S���H>��=>��`�����\<��9� Q$�3���
�EL:͚P�4z�����=�6½'���-�4a��d��mh>:�t�>G�4��IX=��;��1����=�D0=� k<�`˽0���[��=g�<��= ��;�.���ٽS�<!��=۳�<�;�;c���>�rн�[�=��X<m��=�$�<�O�=�f�= ���S��=<����Z�=&�=I#>��ֽ� �;��(<���쇉���r�b�%=����;.<�A3��߳�����0��F���<�=����.}�<Ee���;E��%�w�;g�#[=��2�ϼ��,>ȧýw =I��=j��=F�&=^�=�F������E==�E�=+5�=���i/��Ix=�侽�%�<������):���=Y�=�!a�P����>ׄx�"Z><����S<�
׽��L=DQ�=���e���z =�׌��+t�o�e>?#�=k�'=l�0=�>��ս��>�8'=/|=��4��]���>�^6���<2F=�dr;S5>�R�;a�=��> ����g<>�e�.��̀��vҽ�>��m�Ƚ��=�O��!p�<G�7�p}�A�?��F�<��!B����=���谽; ���hS��3�<�&���Sܼf=�CW�_� �Ȃ'�6���h�z=߸��Bj={!��r��=*��e=,�LBͽm�=A|�= {=�w�=[p���};��ݽ��̲���h�'.�����<�p�=#^=#`=���=�k�=��2���T�ܲ1��:;��`<�w�?�.���v=�؈='b�<9�>�ɉ�۬�=�����&*=�RF�*�=R�����=1�-=�J>����`��x B��rҼ�?~�>t����V�X����)$=���?s>Ĕ����+��.i�{":>�RO=���=������=1���F��<�q�=Ү�=Kp��d�=^m=���=y.��^�==`��ײ�=L��K��<��4>�g�=�H��k���`��u �<��ټ�����m��: �� �����#+�a�=9��u���e*=Ր���*q��Z�<^ �F��=�_p��j>%�Ž���<�Ď=������=ja������:�3���Ǽ� <lF
���)�D>�'�v<RkA�̢S=@g�;X��Ϯ�<"g����_����=
������x�=@ �< +���g�����=�:�����=��="pA=�=�2x=�=��!���Hۼ���!+�/�>n��G�<T��5*>/�<=�0=r-�=-���(L�=�����8;>3��������v=d>��:
>>d��?�����;���=��%> �V���ܽ�P�<�Z>�
����>���=��=���=�`2>�R� �=�t�=&Ao>Gm�� ���V>��J�:3�8�������= g�<�ڮ<!��� =�o�W��p)�����DB� �>��=e� >��B=J}>1�ټ2��=��/��޽Nh�=h�>��k=e~I=�t�=����˞+=��ۼ��<_ ��F����<Tم�2� >��=��!=_�� ��'>01�޽>�V�w���D���p�'�3/1<�+�=� <�� =ϥ8����=6c�8������<��Q=t���Y|�'?�<��>��<�<�^%�e��=A ���y��1=�ˍ=��/�� ���۽�=�cҼ�}#��<���>�ZT�8��|]���>E�a�>�����V-����=��Ƚk��=z�>���<1�:T2�����<���<P=*�p�3Vu= AH=�������=��>=� ������T�=�� >�`X;�7�:�">��&���]=��#>@�
�4�=_��թA>'mD��a����=�L=����QX]=@�D=ϼF�f�c��'��-���s=��B<�}s=���=θ�=؆�yx�<h�ν���<
+>=��=��=��<���� q��8�|��n���.;<wC�<l&��<��=P8&���;��D=�7>>�����H==�o�)���:�E��"˃=`�t��:�=�M^=�
D=���<�Y�=���;Pݸ����9'pi=L��=A��=����"E�;O�K=��z��[a=�#̼�C㼼a���["���ν#1��<�Լt�=����T�����N���;���o;�=�o���^=�5=��=h g�7|���b;.��=�B�����v�C>#� �rn=��ۼ�K5���E>l�˼̦>�g��/� ���Ͻi$�����=����H >�������" �=���='c��a������3+��V��$�I>�(�=�^Y����^�>=��λWo%��|>+D�=���<�=���= �<���<.>%�=x)=�VG�w5B�/}���,J=�I�<a��=�E�=)��O̷����=���.���r->�
0>5�m=%#���;��z�� �=��<�I�=&�˽�mq=b�P=%�Z>�]�����W >&�������>�|0=�@�=��<��<Ј�;�-��g���nսzh%���4�<�<l�3����;<�� v�:�0>�$�=݈q��ߤ=�!�=F\�=B=Ļ���; 1��9��!=�.?>��=h��<� >h$�=�=�\�;.�=32��5����u�tN<��c�f�3=�֦=v�=���/+��=i��= ��;��=���<E�=�1>"�X��;��L��<��ʻ��"������D�����+ڼY?`���EF>�%�<���=�� �b�<�"=�ݽ�%�سj<�X=����Q�h��D-��D��d��a��ӛ+�d����j =Gⰽ?y =u2h=��������H�<��>ỽlf�qJ����?�ӼF;�$��'S'��l<Q��<w��=�_=��-�������=o�=槬�╹= �;jd��󿽽 ��|�=�����Q�0 f���=��7>�=��ڽJ�p>6GI9� ���E�L��<�'>ƻa�Х�=�4k�l�=�=��`�3� ��\K��ͅ��CҼirI=���l�<��z=��)=~�#�&��<� n=��[��د=����/�� �B3<���=���sHa��p�|��<�νj)��r�<^�н�\f="�;�]n��x�=I��`���$=�I0=�pb=9�.� ����=+x�=<���~��p4��^�7=2;!���/�ST�< @��~Ш�u����9>T����OW>�o�5B^��i=P!N�яY=h퓼�\8����=�?=zLl�� ���+x��]-<�� >�ށ<�Kۼ��@=Р��,�>��޼���#�=��7�ճ���"�5;=d)�=b݋=q =A��6�O=z@a��H�=�N�=�6>x},>��V�߄=� ��0�����ά< H�ߛ>���>��o�ɽ�ߦ��F�;���=,F>��=[�4��*$�ג!=��c=�>2�|�=}���*�=#�y=X,=�_/�5�e��#<@C>t�<`��� `i<kMB�
�м��{���޽և��< ���=qż=�n����
;�i�.��q�;UAʽܭ��������J��'�=�T=k�y��1�=͹w��WZ<`��=Ϥg�ѱ�����8�Y� Y�=�w��ط;�]���GA��`+��=�ż.'��:6T=�ŝ������zF����=h��d�?�~�>�w?��O�<*t�;T��<!�Y=2��=��5�7[�,���D�>��˽o�����>kZ������ �<�r�d�:1ق;n'�����CΌ�l��D'=���
�h˼��<D)=��=>��i4D=$wG<��=š<9� �P��-�7����o= ������=<ϼ�=>= 1�-8��*{}��X�=w���Dbʽ%�<UƆ=M�=��h�?ռ���<,����?=�Dd>�����G>�(T=���=bқ<���9���ʎ�Ȝ0���-�&�O��L#���R��<�5�=<T�<3��=�K��&���t���z�@>aB�d_�=�Њ��:��Y�����o=V���}�=���=���=�B`=�R ���D���g_ڻ�s���+��Si=�͏<�a�=*q;��m���Խ?:}=_�t����V�����>�n���c�"�<�"��1n�=Q�#>���=�a>�������c�.��3���=�]��h;=�q=��=xR��y�V=�9�;��l���=`O��r����<~Dm=(��<����Ρ =�S=�������)ڼq2�&o�=��x���>Gs��k��=�/��樰�rˌ=t&"�>PE�`g�=����e*�=9�ス����+�<=D=��U�#>�`�;'�m���,�=�=� O�e�;��O���t=��;�)�=!2ؼ ��<��5<­�Q��+M4����ܚ>�_�=M��=����8vq=)�但���O��=��罰����P7=�y��F��=hQ>�мʧ��@Z���ֿ=��=��<��3���W> Z�=����$$<�?�=
�_=���;��<�;��J�,�?> 9>X���Y�<iW#<�9��1Eʽa�<-�=%����)�����,�ݽ},��)����;D��d�=>�=\=@��7 ����>��z��l�=pv �pY��,g<=@����0b�ER9�"ѕ���Z��BϽ`(�ď����<�"D�%��.�E�E…��r=�o1= �r=�vT=��=��U��S-=�>>��0�vl�����WE<0����O=����|+���B)�p�Ľ��=� -�V�=2:�<��=C�<ix�<�����}�=M^l<!��=#���.�"=.3K=ꤼg�ɼd���"��6s=/�t<H�����m<U+Ǽ�b�Ll;=�н�=ύ�����I뵼3��9ڔ=�`�=�I<� �=&��=�-���t�=!��=���<�^� �G����<w�B��������i�K6 >��Q���!�4-�=ߧ�=mİ�<�i���P=-t�<��L�A����4>$��=G8m��\i=��'=�=�ڍ��cѽ�=�=^߽d�b���=dy�;6�
�'��<0)�z��.o8>u ���L���X�h���U���s6;-���I�л�%�=�:<��]�B���=�b��tq˹$��KY��<��RFD=��<}G�;����f5�=��=g;Q<Ĉm:�_��p��=�ܥ==����=}7�=%%�<���l?=R=Y�4*�=����ֽ���=�Z�=��O��8�>6�3>�4�=B������0>��=;/M�ߍ#>LM��!� >M��;~!<��ڽ���<p#s�7���G�~>� {���B:-S��s;���<QՆ=�E>��H>Q�/�/"$=�z�<`� ;K����R#>�������v��<�ש����|�Ƚ+"ļ�U0=�?�=Ay(��
7>%8�*�#�
�����0�j��<��ቾn�I���0�� ��]c>a�e=!�7��7�v�콫���_�>�(�����;�-���x=�������|�̽V>:�Zu�<��Ͻ���<o{ ��'�=�j1=i��c&��~�)�5O���N�l��=ԣ>��3���P<�(j>n�r�tT�=��C��#�=f�5;���o0=}�)=��~=#�=$H�� +�<�#�Gzf>��>�լ<$�6=�7���M��֓���P<*B����L=��?��1<|Ŝ�����`�=A$��& V=�)[={� =k����\<3�Y=C�m�ςA�.��<*�=c���F�n�j�B��s��t�=%�U={��}F5=��,>��X�닿���<�H��4�=��#>�X�Ӳ0>4 =5�����>�4F=hY�=����������'=�:�����S���ռ�:Z<�XH;��<=W�սM1�=� �j'd=��=�
��D�<��v=�s��w?�<�je� ��=�qe=l��=�v>�q�t����ݓ�!�����ĻW,M��ӊ=w��b���#�/��=#��=��ǽDN�<�Nj���`��RE=M�w=U�[��b�<9����>�V��,�= � ^>���=�徽���=T=T�P��Ƶ���> }����@<@��</'��:=�<������^��><?>��<V>Qoy�S^�< ��;уB�\�;l^?=+�=�,�=��$�/�=���"�<�h����;���l4�� =�8���Ľ*>A[>�j�C��=7������=�#��r|��z�>�(�;����;O��)g�<�S��G��2�<����B=�zҽwk󼢻 ����[<>�6 =W�=�饽M�q<uK���%9H/ ��|�<=�A��FM�8x������RG��|�9>u�">X�=�=�X��ɚ���H>���=�S+=��~>`�*>��8=r��>'#��;�=��=��)=�:��i:sh��v��R�L������:�Ȕ�59�Ľ����)'��Nі=X����B=3l׽f\:>�ⱽ�C���=[Ȼ.*�<徉�U����V��Y�=Ct׽c_��Б���]d�<<������>�D ���M>q�|�'5̽Nt��5�=%�j�>=q60�j�e��k��6�R��=@Z���=$Գ=���=P�սɀ�=@)>q�<^��=J <Ґ�=Y`���=�T<� <�!=��!=��>�.�=��^=-�E�/�b��k���(��F��<Y�ƽ���<c�����ҽ�g?���V����.q��z ��F�<#o%�� ��)�����=�B@=,P#�g��=-��;�����j#��B���� \�q�/�꟤�_S=�"�=�����Q=)�Q�wH�m��<�½�b½�k��������C��K.��g/=����$�<��������)ɽ�6�=a��=�q�=N��<����4=~<ow@�!~y<Y+���9:`��S�ѽ*���8>A`�=`���c���G>�[G�Ʈ�<o1�=�5 >z�=�=��;�<����<����l�;�B��=�
�=y�=�:���-8�=׵�O�轃�ۼ��-�ֆ���R��ީ;©��;�j<v]�=*|�=�]޽�<<o��= \�<�-@=�ۉ=�R�=�s+��
����;�u@>�4r��[����e=��H�W���>'-�` �<<"K��1���f��� ��M,���8���b;x��u��IL�<��<<�~>�X��,8>7 �и�<�:E�w�R�7u��f�<���='j������E�_@��)�=�-�=/$�>�� =�p���j��ɶ�<�G�=������=�!��b˽�c>���<̧;����������=B��g�T=A
�=C�>>]�">�(���������4=���=x=Sӗ��,;��oλ�@��K=�^E��C%>�2G���s��� "��+>�����.��h�=<��2�,��h��k�����Ǧ<=��c������l=�ą�Mg=���=7/>(;q=��3=H�W�i� ���=�.!=H�=�sF>�V��^x�<��}�=B�<���;�t�<\:4��>=��׽��f=���=h@�=ƠF�eb<���<���=�Ω=�A=�]�=��>D �=��C��p���#�=�7���TW��3�=��<�!佨��=���=�P>���=�}����� =`��=�QʼQ�޽{�'=�S�AÜ���=���=\d6�%��=raa=��|��p1�pS�>7c=>�:=���=����dt=F��<h�e�����\)>�o�=2|�=u��=<��9H(�AƬ='sP��J*�1?�<^a�9zz<v�{�� ���R0>7$�;��A3Ỏ�=�l>�dz< Е=g�u�=����96F���4��\ֻ]�@�� �YJ�=wR(��4g�¯b��v�������#�����J_ �S��<������"��݃r;���=������=`��=.�k�iï��hA��G��׳3�'�ؽ �E�jp�=�����Ԇ=�U�.�=Dw���:H�����1>� ���>>ׯ=_���� �<�ǁ=Q�����f��K�=�d�=����"{=Z���O��_�'��B6����<��1�s���ٷ��XE����$���_��pS����=�x���L�k&�)�ڻ��=��_ZL�_�h�О�=Y �=u�����������>Ȇ�+�ͼs�F��R������h�-�h�|�j�%>��=U�;<�7><ZV�
!��&� =��<�M|���H� jy<y�`�E]�y��<��=�iмp
����=G��<W���mZ ��)мy�=�V>~<ʽ�3a=k�< Xʽ�%�
��=���=|>J"f>��Q�
Ș�R�mӏ<��I=V�a<��|������V��X=��.����{ ��}% ����_���J{>�������<�����=���{�ؼ9�G��y`=�9�<(�D�]�ż�����^<��=d�,�T��흃�膌=k�=,`N;���=F�����=�Տ��DG>���=���<v����>;���=q%�<�g���\�=�����V��c��ﳽ�'�=>Wv��9�=O�r<7@���o�=-<���<��j���'�-�=�έY��>\�e�x��w�=sp�,�B��� ����=���=�<$���J=��<�)˽��I=��=���,����}�<.����=��=�#��% �=�=�v8�'ͼ��n�� ޽�]��t������Kը=��(���%��s���^�=)�<Nu�=�k2�!Ǔ<�����Z=9m=7�����=��B��'B��3=��:�ya=�˽���<���� '=�<�����J[����C���w���~=��޽�G!�Qp=I"�=� ��uI*����= m�=�7 ��g�oY�_�!��݉��4��^1����q>t�,�Y������ӰԽ�E���G�=5ؽ��2=B�E<���J=s����\ݻs����h�t=��#��=;H����=Eӛ�8�<� Ab��Y�=�\����=V�Y=J�L=��>���=l���,ۀ��? =#Et=�&@9���=�B9��Չ��в=�����3�=�3=��T=erq��=�Nj<�t`���R�=�u<9�]�i0+>�9�=߬h=�U����A=�7�=�Ƚ��;=%:�=��=bھ=ˮ�=ĸ=>�J>��)��< =��=��~���;_<7=�~I��]=6���([��uN�LS�<����e9�<i��=��� �����>H���|��������2�F����;B^ս�w=�>�>w��= =��f�@Q���`/����;�%6�Y&2�8�=�@5�d5���P�=����'��;�)>9aüF�=1
>�Xl��č��ܼ�*v�.)i��
p��m��.��;}����d��ν,AD���*=W��S�5>9U��}1=L��=�Ͻh��=��;�T-�� D���K>��@=fd��Dɼ�_%=�l���)���%=��=�f�<�e�=t��= �ä<0&;;(�����N�/=zvN=L;�=�<�
<ǯ�=^@���g=�x�����=I?����>���=�곽u���E!����ܼ-Y/>�d����=n���B >�~��CE>٥!>Ꜳ�t�=�ξ��_�A<Z8����8�_>�]�=��ýk�=4 >�����-P=:�>=X�=�����'�\2��̵������Js�`E>�=Y>*��=�����(\=9��=��Ͻ� ���g=P�X=�h�=G�޼�/|=���;�4<�7�<Xu���\��<�\���8�5���t$� ܏�hU�=�
�*��=w�F�(���F=�>½�*<���=Gx���ڽ��<�P=��E=5�=~y=��0���<OG��mS{<IБ=���=���=��d����헚����<',=
�=fF;����-�=éA<�C�p䳽�q�;w������$y=���=I��`7��.�<�<���=����Qh;�u�=R ���쓽���<���=���U�=�A��+�)�=t��=4�ϼ!ǽ�oi��� >Ļ�<�P�%��:��
=�T'=���<�ǰ���q:sI=� ����� �<���RVC<^�/�<y ��ۜ�O�: z�<�ݦ���i=���=��m�/>m=i�O��Ɛ=���a��B ��L��s����Q>у�<��\��L�=ǒ`�n���t����>�1N�Y=���=]���8:�>e
��U>�q��� $�Z�>�^�=�o>�h�=G�ݼ}��;��q>?*�e�`=��z��C�c���^�4����>r�>n��U�<SD�����=�ս$|���>U->���=�O��0= >oB= �;=a6���=�|���@G�B�=�8�;�0A>�(�;���D��<I�(�r�s<�A��ggϽ�Tp=�$o����ud-=3۽+�1����r Y�(��C�q=m.1�����o <�w��U�5��=��X�����|M���N"�[���=��@>����=#\=�c���ؽQ�L�N7ӼvWx=9��=����ag=�Ǩ;6o%<�:�<b0+��U����z
>��ѽ7I���<"�����2;���.�=�t����+��l�U6�[T#>�(�=@v��٥=(�=����m=i�U�GV8@����� }=L��d��5�4=Pk�*���Q�r��l�=/�H���a���Y='��=��Ѽ���=c��<�������>�/>�;��B���)>Іb���=�\v=��b=�\����Ѽ�� ��u�=15"���+�Y�W�����*�=&Zļ��Ҽ�n���3�=�>jl���Xo=ht�<E�i=�����:��+�<� =�r���3<�3�=E�^=j`g�m��;��7>]��=��>��������=�=��->)۽<uy�gT�=���������p�xf =񄄽���:�ȴ�����X��py���B_<�P�=C#���dI=E#���V=�D�<�EM��B�@��=ن
>�d�4���7���5>���='�����=mUR=�Ͻ<�>Q������>�#�=��p�F������'u=ws���9>� ; �U��@нv���(6w�}�i9Y��=�w�=��=(Ǐ= �⼝�ҽ�:��~=r��=��d=m���=��=Z���� �<uk�sh�<U����ǽE���Ƕ>=���B�=
�[=x�ýV�=�K����=,[="���-n^=d�O>�In�YY���p�� ��;h���Wn��:7���O+<��ɼ�m�=D2�=^�>��= >/��/�`����=�н����] <�Z>�5<Msh�b�<>k>۽u>-޳< �����=�d)>^����<�1���c�i/�=�m@�w1�=i�ѻ��>p>[>6���8�=\�I=p���|]�j�=�Z�������R����=<[�����< ��=��=Y�=���0۽���<eg=�]Y=�ږ=�Ɨ��,}����k"�\��<y\�<t諒`9�<���kC�ӥ��Q�=��=��P����=o,2���A=�:���/=�
���H��ʮ ���ܽ���<�F�=i�O���l:���=xk������:>ƁK��I̼�t�<��=0o=]�=�=�A�<GC<sR^���="=,�r�����\"�`�=lS@�r ƽ�'%>�~��p���<3o�����$����=Q�=[;��i������:=ֿ=˔��j�)�G�>L+<V�e��hн�4>a�Խ�ֽ_��=ⷓ�0�b=���(~,<2��T5H��>�=;��=���=���/�<��{��)н��5�ɹ��VM��3���W�>4���Bf�=}�}��\����0�/���.v�� ���x>����q��=�b=a�Ƽ1#�=����g���� ��z���C�{;��ئ�<����E=J]=��(�����g�=����iH��6�V�y���� �� ޽��,=nz<@�>�U�=��Ƚ�_��=;2<y�㼜]< ��=ᖽ�Gǽ�n;�}��ר��R�����_<y<�<�_���iN�A�\�u��<r�<�9<����h!��f�|��*78>�¯�6ּKP=dA>�ӧ���=�P��c�]=eJ�=�O�C�U��:&x
>���;�<���;#K#��X���lݽ�� �)�:=%�}ҍ< ����ͽaS���Nc���z�\�5>��<[���S�[=��<�̼E{�Q� >��2>�/ �� �m��<�V�=���=Lj���*=;�>�����=M�t���Լ���=��$>P�,=O��=����W����=#���LJ�=3}���H�P2׼���<R�Ӽ�@<� $��{>�z�=������=������L=���o�$>dW{�̈�=e�s>c�<��=�n���N�j�;�ͼvR�=`�g����}g<��M>[8a�X釼�Y>��ӽ��S=(=�=�թ<Ѓ�����=��8���A=[[���#�<���=Iv�=�I���@�3G�����<�@0>� �=�ݛ����={It�|�������\>&���>���%=,�<� |<ؽ���B=�n���V(��A7���]�̨='m�=��=5��<�R���<���� �B�ܻ2"�ȦO=@]�=�+���i#����;������ �
�b-|=���v�"�+�=��N=p�$=f>}���>����$� �Aa�=�fz�3]�=(� >7}����޽Mδ=X>���>��K=ѝ<<��>%�U�VA;x��=�d��M��jmQ�����y��=��I��D�d�R�U�ȽA�3�`��⩐<X����w�=g�;����h�<p�Ͻ�c�< ]�;j�>�r��q>��꽵�X=w�>�Dd��쉽X����;�Q������ur=��s=mځ��삾\��9-�D;�>�K">���=1a�<��@���e�zG���W=�����=�w�=20ݽ�9��A� �ͽ<xe�+M��k�=�� ;��|��u�=�<T����=s�ȼB����N>�ԼUֽ�WS��_�=Qe�<ql�cb�<������Z=|����+�I���V>X�=�L�_��>G�S��%�=-,ɼ5���%��=ە����=>���V��;�����+�i-+�ޙ��\z���ϼ�F>T팽��Ƚ�,���2��zl=w�Y�®����>�� �c���,�ǔ*��=�=���=�gͼ�dܼ��:�Z��qs�=YZݼ/����^�D�ټLҹ=�����:=�1���u�����M�;��^=�ݺ=�@r<͂�=Z�=�_ͽ@�=���<��R�ԭ=[�<�䰼́��"�<VN�;u��=Um���:���+�Pt����>_�l>K��''�V!�<�B�=c�7;~�Ի�1�+�1;=�1���y=&~���T>�l�<�W;�_�<����S�=HH��"�=���]����Z�=�m� ��=�{(>��ںro��J>� ���"��MC=}��<[ǔ=%���"&���=$�<��ļe��=�~Ѽ��|����=����Ž���e-t�$͇=xN�=��v���I��\���潚�=�%>��5���j���)>ި������,�<]"=�o&��ɽ7� >��s�3��=d� >#|���=�:� �=��1>Lrw���B� �<#8�=�H@����/tb<�c��rc��W�T��i5���t���ļ���:IX> �O�G���ϻ�;Őѻ ��<��>�|j=,-#<R^��@=���Y'�
���wzc�f,��Q��S|�=�l���r�<�o̽�C=b��=)�Ƽ��4�#`�=+v�=�|�?d.�@���=����f���J�S��:W�m�I롽Ŷa�Nh�<]T�;�lw��{=�*�;,^��T�����
�:�怒��Fi� F�=�5��^)� �=�b=�#�<"gs��s�=��=kǪ=�2}���8���=֙���w�=���=,&��@𿻎<= Ջ=���>����b�<��;=�xw��#�����l�w@g=��*>m�/��9S��NK=]�B��������#��=�־<Au?�RD��� ���(=�pAG3>c�b>��G�7۰��Vp�y/�=$��=�m���6�= 4U���=���=:#=�����J���:���ν�V�����ev>�F.<�f���K=!���쒽�z�<h �5�8=i����f=1��<��+=�+=�В�{
�D�.�sn����f=sA �6s¼�(��;B�9 J;���=�lX>� ,��=��>� >�"i�*��=�������Yҽ�Y0<5h=b���R���$���u�=��=��]=E�\=�(=��R�x�9��!��X� ������=~�[�gQs<G�=��켎$��R�#<]� ���<rJ7=E� �7��������� >*0M�� #<��'��{1�%m)�W"��xv��`�=C}ʽƐn=���=��=���5�9o@Q���]�*���oZ��v&��'m����=4=˪K=H`=S�=Bh=h]��h置�3=��;�]�)�ǰ���/t=u�j=|�F>[�&�G��=��rU@<A4�+U&<~�E=�D��)r�=G�=�e�=�H.=g!����._+=<���!�uP����u���Y"�LE��^��tAb=>�<�uý�g�<T��=t��=�?�ԣ>�'㘼�Л������,>f|k�m���F˼�w�7�W��p�=]�)=t�=T�?�N����M�=�����;� ��d���q�>�+��=�_>��(����=�Lj�[0&>�I��J��="�½��>�ݝ��)��V<>xhM>��= �<s����E�p�\>(M�=J[عz ����=�@r��}�<����~�=>���=��߽�&�EH���fF=*���*�QI=]M>>���X�ϽK�=�����#=j:1�ؕY<���<j�N>�˅��q����$>�3�=Hۅ�}0<=�> 8P�*���ގ� �����'����=s�ӽ�|�=[�=�=5���=��D�Ӓ�<�'�<�h��bFʽd?����=/��=���_�����m��=J�"=�X^�ʭ�o��=;��<���=א�=�l=y��� �=3�=�P�=����<$�ϼe�쵽H�<'�\�Q��=�D��B��<!�V���=J������+�V�t<~�,�L����=��p=�J><��> {$��;;k.�=,�s=8��=�T��‡=e|����v=�m�=��5=WB���� =�O�=j�����>��&���P�C�>Z���S}F�����u��� �=� =�:)��wκ��>n��X݂=�'=�_ѽ��
�t(=܉;=R>���=���<��$��������L|=,`��#�)=>��=1])�H:�=~0+<ϐ��_��M���1 ٽJe��s
��4�U�/��\F�:����r�=��'�2�F�&�P�4��=5��=p�1>��;�y�|� �Ӌ<�-==m��{/=�7@�=�o)>)������x���� ��=�p��4��u���A�R��;\������<��xt�<� �=_��<'0Q�n<a�*<�2�����i1����=�C�=웏��p=���=X’�͑ýI,�@r�<M޺Q�'>�_u�Zb<� ��ٗ=��=q����ڽ-=������b=K��<��<�y��Q�o=7ku>���;,�ĽFS�je�*���kV���l�<UY<�U�wV��5���U���:CKi���=j*�����ܽ��>-�Z=a�=��<Hn=�P>�6�=�@��; ߼NL>�_�=�M�=c��;�XȺ�$=�
�<�/=��5���>�i �ʓ>6Ǵ=d
b<}ٽ(4 �������u=�J=P���<A!�=�F<�.��mr� 1}�ӂ >�̕��L�=[I >��5�!��<I=:�r=ƅ˽�oL<_<*> P�=�JE>��^�[�5=��;R�м[���1��S��x�M=]�=�#w=F݄�"��=��6��p<=SB
�o�<�f�;>M����=Z��_�b=+�*>�@�=���<`%;�٩�|ه����=��=�N�q=��!>����nb<TZ >��ɽ����I�>n:>�SF���9=~��4jƼ�u<��9���=�=����ؼ<���(�����;=OS�Q����ڼ�.�=�K�=@��e�=���U�>�;ս��}����<�����p�$�B=9���Z?�=Ɉ�=H歽��P= �إn=D�c>�W���+�=�z�ș�K� =��D=�6� ]��Yϋ�*e��̓w=�[m�= ��̠=$�;r旽9�b�(=�=��9�Mn�=�j�=��>:��L�p����*�=uս=�+~��_D:����� @�9���T�->�2�&8�<ׇ;��Z�=\�a�+\5��=�=�,A�x�M��~Ӽ/�(>�]�=!���ާ�=;��8�*�����T =yS.>���=�zS=��9>�x�=VS(��Fл% .<O��������qw��aV��s�Z=�L�=��U�\`=���=�X0=��׼w� >�-�=/!�E׎��s=���=�^>/ =���=N�>�Zf=m�<��5>"jW��z���y/�E=�({=t慼F��M��<�M�<�dB=���<�`�=�R�� _�= "�<��8�j�
�n�B=S�B= ��=�57=��j=��~<e(>K�ڽ�黽�0=p����^>H���>z2���>@<� H< �<x�����=��&�_fP����9���(��(^���>����b��=f��U
���u��S�=��ɽY�����;�7���:ٽ�
i����=H�<7������ %�������N�����=R�>j�ٽ��m=�c>��{� ]���@>Ifʻ�.�=�p=7F�=�o�9�G���=O>�#9���E��k ��9>Kl��6o�Т= ��=�'ܽ�;��y�2��E[E=��g��(=����A�� ��H̉�f�}�D1��4ݽ;��� P����C>'�6�%����x��4�=ޗ�}:0<N3=�ڹ�uS�Ә��;���d�;"���m=|L�Az�W�q�K
6���i=��g���</iŽC �=M�=�>�r���!=��>���;�L<7�&�^�:��닾����λ�9����%>���=r�7�E��wZr��~�=s&��Okt� ��=;��<=ݩ��+^=/Kc=�_�M5��B#x�I��=�Z��tx=7~<��D��H=.�=����B`��D����k���9��N�>�����p�=S}>�e7�=ҁ=#o�=>����L��=�=�@�<E��<������|�Ԃ��k��c�=p�Z=� �=9J�;�T<ȣ���Ž�r(��H}�<e��|ߨ=8��,$��`�"�������&= ��=�t��nR���oz=9�4��:(�d�:-�>jj���4!;[[�����DL�4��O�׼��W<ٺ;!$r�M�=�/J<�v�=a >8�ʽ�� ��j����;�e��=��E����=�ޅ�S���`iW�Ec���5���߼�T'�wK�������7:��~ �(�������� �<��>'&p�� �ܼa�}�,>i��m��O(D�����<e<F�b=%����4)�)z=��8=��<b�,> C>K�=� >0
��챑�����8pW�d����B=��&�������<�f�<�掽�UP�L6��;�)��5��!=��bFR�b�>�s�~=ZU����ֽg1:�}w��~2����=��y�/hF:��K>�-p=ǥo={���K�s�������=Z��=ԫ���G���T�=B���b50>�[5<�aQ���=�f ���z�D�4��� =�H>�=�= ��f2��?K��y���X�~W��J&�=_#(=Ӣ�=r$U����n�
��=W����A4=�Q�=5�=;=⹸��՗�24����x�>�D�=ؾ�=(Ǭ;~?=�~X���T��=�����t=%j���=��#��k�1��gڼ�s�;���ɉ0=+�����=*��=��>��L=Ջ�<Y��2���������ڽP/<�1��E<���ꉼB��=��^d�q��<�ʑ�/g1�j���|�)�� �ʽ�2��CJ�$_�<��-=�ڽ
�=�@��Վ5<�)_�|t��bF�=����I�7�`� =�]P�[�x<��K���=�c=F~ >�->/ռX���� ɻ��)<���^�X��>"����O2�=U�����̼2�sD@=?���h<=�x�=�j0<d@�=r����<?s=��-��Ҷ<���%lN�"��;��c<�Z=��P<�ߌ��>\5���#=��d�u��X?)=_u<�4J����Ꭿ�OZǽrQ>E��x��= Eq<�
�(j�<q�m�5� >����i೻}�+�Sn�l������%��Wٽ��>>`%��O���A=lT=�2=շ�=+G߻I2i���!��S6�zr�=����x�=�ؽ��'���=^�>���=24�<�2�=<� ��)�=({�<�½-��=� =���=F)=�� >2�`�5>���=��;$E����Sý���ϲ7<��^:=t�<H�a��<�Z = t��3$>�'�C8[��;�= \M�WS���y�;� =n/���m�=�H���a���-�Qu̽N�>�'>�uc<�͊=s�x��a�=�G;<H�K�oD����!��N=��+�u&����=� �<�vD=�[R����=$i�=���=v
m;��ɽ�>�@>���=1�۽Z҂<����>aVμ˥;37>�,�� >���T=3�n����=V��=W��{`�mȺ=�Mw>f����e�� ��U<���5;���;���<����0�(>��=b���1S)>K?z=��>>���<�=��;Վ�� I�=g�Ľ�8>h:�>��sQJ��ȑ=� �=c�-����=�A��r���l��r�=�$=���Qm =�;�� ��=����@���J,=J��=�i>�+���&�v��6��=B�����=������=|�=0y��b�=EF�= �����u=#=��6����5uF�< >�
>+�<~�:���ټK.]�uN5=,��=;�y�h��=9KE�S=����6J���3�
H�=�n'� U<���=�S{=ڋh���,���=P=Y>0?#=��=Z�E<���E���X{G<:�=���=�Ц���彵ߚ<�[��c�=%Y��N}C>��s=>�>�j߽� =�s�<���=��r��v���) >���)F<�Wi��F=P�|=~�,/<�E� }#�'�>���?����=GȤ=�T[=�A�L���f�=��ƽwW>>� �;�B����9JC�� ��=ߕ<����=�&�=FX�̧ � z������1�� (>3�>nn���= M=x#�(��<5&�X�<�t�������������0^=�w�</с��/ۼ�+�[���끽LY;=4f�< ��vm<#��=F�#$�=�=s�ս ö<�%�=��z��K7>�E�=ΰ��*��Ɂ8=��I���8�?��>oM�<�F�=��W=��>*� ='�9�D�޼G���(D���PF>�S9>�У�ND�
��-˧����� �G=AAn�`ǽ*+G��� �%���&���W%u����ʉ"�,\>:g�=K^ϽCEG>!̽�e2=��1�<�=�� >A7�=N�\>�&^�,z� w�=����="��<j4���u��.1=<w=�XM������5�5�=�,�=5�����=������-���\>�w�=�6;;�&r��-|���=ly���-=&��=���=��/�/�����oZ˼�
�<�{�M���N4>�i=U
>���=
�0�1)���J`=Q'�����<��<�8=�����ǽ�F���<,��=~�k<�k众��=������=�6�=Q=rR��G� ��h�=(����=1��=�s�=�SK�/�@= x����ڽe�I>�O�o (�V����=��$�l5��ci; ?�� >>@ڼ�pW=��h�6���Z���`��;�FX>�"<����:1<��N<�*=�4��'c�=\л3@�pY�=[~i��������=�7ټ����6����=h���gӽ���=���K"Z�*,��U�R���L(>���=ԫN=����T����� ���/>�����=gb�<=(�=v
>�ý��z=�I%� C�K�ؽ;���M���T=��,� ���e �6��<,P�=6l�;@Ǽ��ɻUb=4��=�wC=Z�4>���=�qԽ�9d�:0�� =��@>d�[=L~<�讼��!>�Փ=��>����fp�?Ŗ=:���(�6�w�Y=nh��������
q��u�<9�����;�����e}(=�
Ž8{r���>���<Ö�}�E=���<i�/=���<>���b콐�����t=��";G���m=�F>62)�T�
>#�<)B��ӏ�F0޽z�>1ץ=�+ >�7>s[��M��=B��=�C>�߾=�$)����9@�}=ɡ�\� >��K=���ŭ@=�Nf�Ӛu��Bl��'���tm=����C込��ѽ�Sn=+�>���=��<)�ռ�>�5����=��ѽ&``���,>�L��Ikh�H�ゥ����H����=��U>�sx�EN >�'�;�w���e��Jw7>���=��=� ø���Q%�=�t��.�@�||���켝��=_Ai��!��wX�=ax=-���F=ͷ�=�>;=��8=�=耨=�Z>�u<<�>�c��-�>ۨ�;��<=�l;>���kȽ�h�� >4r�;��;��H�#գ����<(�o�0��� 6�����=t�콕�>�CC����`ZF��{��(= �ĽbT�=�̱�2�����&������
׽c)8�Y�-�e����\����zO����<mL=������!�2Ɋ���n�0O�=sܽ��=R����
��O�<�w>�8y�:J:>��� 5��S{[=�������=�ռe|u=���=�}@��)g=S{ƽ���=O�v=Gr�d>�}��b��<,�1>┆��!<>L�;�q|>�NĽ2���h����=��;G >���=���=~��=Zsȼys5�̫�= �!�uÏ����=$���Ż>512�M$ͽ {}�Ӹ���������7< �_՟�D�=��j��=�*��K��<�jʽ�x>�C�=����7R>�C=3]��f_(��j�=�@=Wf=p��&6�7�3���׻��D��<=����� w=��=O?U>�$?�.&7�� <<7�O�b=p���9Q��:��x�!>��G�
���=����18k>m�8�t���NI�=�ܽ�fG<�'�޼�=�j+�O�k�i����켠,>ɴ�= ��(��v
�9�\>uU=��=����!�7=*�\��Ǖ=� >��< ���0MG>D��<�Ǟ=X1;�U;㻳=��bk������<:��E�ϻ��*g��m����= ���>��w�yBg=e������� L��G�=�>�ks~�T@/=a7
>�5���WU���_���3�>��m�zf ��/>1$j�?��S��=6\�=t~��z��=!�<u���gM�@�F_.>����4~����=�X�= V����;�!�T'�=R����\���9w=b2�=�֟�U�Z�Rlf:���l�)=8N=��Լq�r�z�<�$�:��=ނE>��ۼHXb� Ҟ�X ��� ��)�>����e�=��=�kɻ �A�~���|��;\ʼY�.���:����I=鿘<�#��z8G�Pj >7����J����(�-%+>�~>�g�<[K^=�3v����<:,I�ؾ��P۽��0��.�=��������z����=�g>���< ��<�SռT�=w�=6P޽�B�=Vٗ<G1�=I�=�a���&��n�3�ƺJ<~޲=O%w=�;ս���=*"��X(����W*>6Q}>K<7=T��<�� �sY���>��c��I�=��D�/*���ʽ��ܽ,9���\�8<� =���=x�C>�����G�l��M .>
>��=�rs$=�k<�*2=lx �\ �<h��Z;��W�������[��Il�u�>���==��=]r���=,����js=yϽ�L�<�_�t�=��a���<D����><
��b<� =1qt=�;�=�0[=�6J>�����}ҽ���=Q��#0�<�-d����T��� >��B=�v�=̣�=U?��Ϝ�4���E�=
ؿ<��1�ܬ����0��=9���T2?�뿽 ���L �EOP=g�
�+L�=P��=Gږ�Ck���E��Pi�=e��Wod�c ߽��0��m=��U�C���z(�=��ߺ�L�(�(��(F���<4ٺo�\�`:�=߅@�0S��W��<��=�Ks=W�=�M�=�x�=������=جʽ������Ͻ0X����y� >lS\=���l0X=j��G�L=S$�=��q=�$�:I
/�\�B;���=߯g�C��� q�>JU���[=g������:;�<Ğ9���;��R��dƬ��5��i+S=�| �6=f-`�ٹ��7�;Q�#��k =m5�=���=��߼
?�(�/=����Y���n,7>�q�������=�<}�����p�a�/��4<9�%ǽ=Y��U�=������-;0����;d�i=J�;T =��B=5��=~j��kL�3׼I4>���=��=�_�<�,O�De�/C��ᾀ��e=+��< � > "'��K*��&P��]F�պ�<'�:=�)
>�Y:=n���ҕ��)���������҃���:�����P���r׎�t���V)ټ߰��c=���=!���U��=풼_��<T�N����=��>8�㽐/�=��0=�&<G�6�t��_p��}�=X�c�����3�=\)<8'���@����9�s�����/�1>�&3��\=����9c�;��=�ě����=��=���ΐ3>}C,�`��/U?�"<6��J�;f�1<�j�=�����ڌ��g�c�T=q�� Ž��=��������t�…��gk��^oh<����m�p<��&�
�Ӽ��޼PS�A�=ʄu��='�D�x��w�<w3'��U�=X�;;�m��e�.=)�q�~��=Rq���ټ���=� � ^6�n��!@�=�xj����:7�z;��;瑦=j������<@ �=�� >���=np:=g)<�J�;܅=�v1�T���A�<=]Z9pj��.�R�d�ֽ�ݼӻ=�T�������=*��6R���M� �+;��`>0�������ӽ��=D�=?rK=����%?)>�����Z���������=��3�t�7���<#W,���9:&f��jux=%�s�"8ν���=����~��~]���g��G ;= 9½=FI_;��>����\���½��B�:�d<�F���f�=nx�O .<�PW=+5��E5�.�0�v��<���<
����=�&<=�A�C����I���󾽏����!u;�#���ĺ�t[ҽ���=�ǀ=�A�=r����=dP$>�ܤ=���=H�-�+L� �=�I�=Z�@>����bP���� ��ni=/7�=�Q�=iJM��D|>f��LN�Z]��M��=�l<���>.������;/N=;�b��o�=���<�)潃1��3��B�<E���=���Ob=�s��̠��\�=A�=<���x��=�;#>_�<�.�<��N��Ӄ=���M������=�f����>�4����=��*<rɮ�!��=Ѹ��0�=�@�=]������0�b��^]�1�;[z�=�@k<2[��9�����I/�=Wa����<�Ky�5�=La���=z;��@>]�R=�K(�r�>��U���z��H0�kZi��p�����<ɸĽ�Q"=O�Ǽĭ6��ӑ���>��a�"C����\=���=Lֽ��=q>R�Ľyhn��d�_��=/gs>b�
��f3��e��������>>�*<�L�<�KN=�}>k�=�,�<&�A><V��4ʂ�<Do=�p�=&��=�ߒ�2}u�>�)= e=G�����5=[�<<P�����=w � �=�{���Z����Ug��z������=��b<��=�F����,=��9�쉽����K= =�\�#���#��=**�= �=��� ��<�:c�G��ܿR=���=cٽ򦳼�j��H^>�=U,;�n����*=��=�u�=�X>�S<`%��f���/{����<a�6<nu�r(4<f<N
���0;��2�>���$᾽N�zE��k�=�ڽ�5>q��<���;e��=j�"=����S�]�C��B��V{_���+�!���μpq=�t1=�`�=�"~=}���?� ���E >G~Q=�> ���q�攽��<zX��7#�� 1������cO�ο'>�*;�+��<���<x�=c���9�`g�<8���򃽉C��+��jKL����󃌽2+6��9b�������z<�-�: ý������l��n�=*x<�F�=} %��l��19=MC'��� ��~ӽL��=+�4�H��p|�=�/�9���9���<h=>@��=���=Υr>]���[��4- =C
>?��=Z_��v *��
�c���]=�wY����=z�2��}�.��n��IP=\��Qd>��̽�C�;��l=��k����=�V���|ؼ�d�=��-���<�n��`;��V�د"����4���`k �ر =& ��u�=�K�<o��?n�=��o�g���}�<f˽���V4=}f1����������b<#�=��C<׬��]S������(/>�S��q1��d�=��C��)�=�[7>�\:;M~�nZK<� =��G���=��1>{>��O�>z�޽*�<4N�=5Z�p�P<"��M� �?�7<$hb=�� ����=�C'��:}�8� >��x�GTƽ �<tߌ=&�`<�d�Yc�=���y���0�1��B�=��8=�~��q�ؽs����B%��]��8#�s��=���:�Rǽ�J�����e7��� �Y�)>�Ù=��ԽwP�=إ<�l���/��[��P��hZ^=@zɼ��7��P�=��Y=���=��==����%<~O���_�<�,Y=~�=�R=��^��H=D7�=�L ��z��eP��E�:>�r.����5�<;��(<x��=�Ӣ��Nw���=����ĭ��x�<��g������$�= �2>�� >��<��<t�^�#��=K%�v��=SQ&�2x8�1���2J�=0��<���:(����3�m���K=�r=&R�����. �����x��Ў���B�m:�]һ߂a<�r>d譽m�Hmb=�cР�-T��35<�攽 �%=��=0�9�<�;��D<�����v=�n������g�
�����=�ı�2ތ���c�����5 >����HQ^=�8
���{�� ��Rf< % >R� ��>G#Խ_{^=���=)�2�|Z��� ��0o�<�3V>W�&��<߼��Z=c�<s&�5� ��3=_��=����HD� 5��03��ǚ3=�$����@=�נ�+߽_�h���=���<��佚n�<s�=�� >��=�ں<��"�yOF�opv=3- >�Ax=m�߽�=6Z�=�� >���i=�W6�Dp%��Ԉ��K=UD �T���)�=�j=��� ���-��� ��Ay�[SK>�>�=��=�H�y=)ZW��GF>�_=y��=3[\�y���7 �<9��<߷u;��=��E�k��f�=��6�Cʾ��S�<�?>Ҍ��)��;�F>�lI�� ��XF�9���ޒF�����"< M>Oѐ�����^ֽe�9>��=� �<��:>��<��½M���,�����۽�&���X= v��N<�da�C1νB&�=��$=�ȯ=T�q=�}�P�c=��ýㆶ�/��<O%>A���qs=Hz;���=0���_��K �<`� �M�����">\"�j��39�=�6\=̌�<B�{= H���>�,����=���&l��S�ӽ��뽟�$�����?>gw�)��<�S=��x;p�>Yb��)�=��$=��弫Lv=���G�G�3��=�o׼Do�C}k>�yv>��;=\ ��ǽ�}��� �=ս�����=z�R��= ̜�x�=Zj�=�=�<ڃǽ@(�<Ƿ�<�R%���<l� �(7���`�c�Ͻ2<=*�ɼ> =���<;VO>q���?;��`D����=�0�=�{��LE��b_� ���V��)y�xZؼY����>��`���Xt����=�M~=��}_>����[*ɾ*���K*����q=�<
�2�ª�hI��Ž�?�=�1ڽ��^=bi����"�H^޽�f�� ��=r�M�\l�=OZ�=�D">����z��]��v�=���=~�ɽzZ�������5=��>Zd�=���=�jüˈU=x�=�/6�������q����ٜ�I4���I���{=Q�=IN���;>�ٷ�̰�<��Ľ`N�=���<&{�<���<��<��+>7[�)b����n�ފ/�B��=��>�
�K��:K� >��z��j:�#Ҍ��)�ـ=.OT;�Խ�Q��B�>Dth=��*>n�м]M��ms��/���UƻHI�>r���Ա�<I�輹H%�92�=R�^�A�'>eJ�tB5�hh�=6���˳�r =hʅ=�.>��(>L(�?-���5�<��u��4;=���<��>�η�ɴ�=�XP� ^0��/�=��<�I��Lw���;�V���N>���=�M=7�!>$�=?y=��ʽwxڽ��� E>�2��j0�F4=�풽�c�� z*�����4����n�<i��=1��<>� =�� ��#�̮*��/,;�����弞 H�1u(=�2��z�<��n<��s=lQ��mt�=tͼ�F>e>ӽo��=:R��}+=j�0�� ��I�=/��<��L���t=[F��3$��˾�;������ټwq�=6ϖ=��ٽ
�%>�ǁ��-=�N�f ���=Nݏ=����>���Ƽ����s�c�6��==��
>���������<P3���.�і=;�G�T��<��1�F�$<�.4=��r�u��<��4=$�<R$f=�G�Q
��oS�32>�%����x��$���֛=�E��]�R=��=P�ļ��,�}�A��!�U<�;T�=�\>�v�=GW<>? ]=^>���x�y[=�\���1:=v���]��&sK=��1=�<�Q�`�ƙ<��D;6u\��_��y��<~��= �=l��=]>|!T=���=�t�;���� �=h=* ��둼߭���{�=>��=��=���<�ٴ�5}=��=�B�\g�A)��R�>I�<�Б=�Q�����V�/�0�`�����[�=��9M^�=�@=��=�9��'�����=ͮ�;�AŻ��;�������=��`>�I�= >��)�2�z<b�>8�>X��J�=s�d��-�="E��DB�����rǀ�:�����=�ע�'�<�਽d��5����EF�����AZ�~��=��3>c�T�3�-����=�� ��u�;�Q�=ɇ�C�(�򚚽*==��=9�=�����=BZ
��8>�y�91|=�� =#��=
��=�c>۹�<���=ۄν��]��=�R�<�7f�{��=�B����:?��:c3����,�mvf��)�=��#<������"�I�d� K>�Z]߽k7�����+s�=�(E>�>3��΅����Rx=�K�����=D8g=w��~�w��Ͻ�5�u�x�M=��(>ȋ�=F��=��W���1�>c���J�ލ ��hc>�<��<�����=��g��^}
>?n� �>r8��%�=0�?=Ӡڽ���=&=E=����t>��s�m��=���<C��<l󥽰3������<Ð����Y�c7��h�=�{<gR�� �<��>6��<��=&ߤ=H��<7={V���u=�����H>b�O<b�<n;�=�ԅ=�Ȑ���߽�e���׽�^F��1�?��=���<��W<��>�ɽ���<��ҽ\�>T�����;�f�ڑ�=h�ɼ���=ˣ;>#YM�:�:xR�=M�=� <>�Ap=ہμ��a=���z�; ֦��}����w=d��=�<w=�9M=�����=P����w�P%>ie�<?)�m>�N>}����ѭ�K� =X]��1T>��R��= ��<Đ,�EFl�Ĩ�<�����,�=P��?>�rV��/=��4��<xj�<9["��'=a�;��,��j����I�=��b��C�=�̑�-2=���=�T��7g=�h�<�ޜ�%��=ÖԼq�F��/��;A�i=��=lo = �B=q����<Y �=�Wc=�E��Z%=�I'��r�Ƚv;��W��C��=1>���=
��=���7>\c=�~{=^E[>���d=�;���<�<��+;F`���[�������=8i�<���{����;Us���=��=;�<_�׻��5=��ͽ,�=efk�[�G�#d(����=����� >�Ts�a1���K1�U|�= ���Cr��9��=ϛE��� >�����P��2���(�>�:�=PH�=�������Xy���E��ü������1=���=u�+ ���ڟ�%��<����sR��3 ���;qi>*�=Q�ż���<�9�mJ<l�2=�=��A8�*Ž-�;o�>��:u]�<��>�=�%.>���q=7�;���=�-�=�T��R�8=�6�=`��=�s�����>^���B)>�A�%�>�utE=�ĕ=����I=��[��V���Z@=�0\�-���H��=q�����=з����F����:��Ƚ �A= ;i�-��c�B;��=��=種�����;�ػb}��إ�H��=ǹi���Z�Lá;G㼠����،>p�2�P=ؽtm��VX�=I���Ӎ�!��<��R=ґc= ��|����7�k�ͼ��f����=zG;�M�ǽ��n��U�<g=�A���3=J���d>)}T=��S����e�jS����N=��+=IMY���H��W���^r>댔��3� Y���<н�B���F���ߑ=aX!>�( >"%�����٠;$��=d�'=��!���Q=��=�%�K`�����=����3�����:��5�=O���z'���T=PO�=IZ)>�M�ʰ��7���=�/.<V��<-��=w��=:-=u��=
̂<(ɦ�����(��<��B�U1�=��O<��n��ѽ��>�p=?=X�2�C�=�ڔ=����q�X=���9.���@�==� �=��*=�!M��9�?ԏ��F�.��=���=X�w=��V�֘�����������\<� =H�0�;����F�<��=�Kν�����Y�>`�2>cU�;��1��uk=�P�<x:��쯼��=�OJ>��Q�\��> Yݽ/���B �6�����<�׾������N���� > 8���L�=��=�#���
>�j�=��u�����"��۽3
<V���4m�=$�G<t%��Q���U�M�>�΃=Ò��4��=I �3ںh=���\2='�^��9
>E��=�=;}�=\+;��ػ-�K��=Mb�?#= o>�9;�?�<M�;˴ǽ��>>�>�^�=�r>��&��>uf
>�ܪ����=���=���e���=Ζ�=���Bx���#�\�ٽ�i3�Y鱽^�r��h�h�� ���.i����G~;� ҽ��#= |=0���ʫ�.��=[S�=�9f�ߞ˼e�=�F9��R�Ht=��;����=��<�_�<?�/��4ϼ\�.>B�<c��;�c�����=#��<�'�=60�/ѽ�/��w0=!���~Q�Ý >�l5=��<O��ʹ��M�>9z��x��=�1B���&=EY?�H �<jA<��U�������[V����r��b�=���=���6�;;������=I��=; �<������=d?<,�=��˽�OY���G=�w�=��6=ڼ->&�N='D�=)����ٚ<���;��(����=s�G>�#>M��=]��=�:�=�%P�W��=��!��⎽UT>�^��gr����ǽB4ټ]6=>=��c��3 =�r"=��x��>�kȽ�u�=�R̼��O���>��<�Ȧ=f�>"�2=)�>��$>�n>����3���L]<�\������q�<��<�R⽷̀<[܃=��y>��C=�0�;�3 >gT=�O������:z�a I����L�=U���Qg{<m"ѽ�8ȼ��=@/�=�m&>Jf:>�O>�V�=��=h�N�EUH����=� ��i�==������<��I=�H<rj�<ڊ=�&>g��<�su=n: ��:���j�>üUgX=H�'>��
��;M��Ѿ�zY=���<��h������Z�=��=�2"��7�&�=+�Z=q�=+ ��a����aV=1�=�k���c2>�"����=�$�<哽PNս\S`��糽w�;
��<�Ǫ�� ><K��W0�����(�<�m[>8���d�=&��<\L��9���nk��+=XX�<A=��Pw�
�u��������M�����;��=�C���5/�� <d�Z���;�6���A�=��w=�T�<�0)=�}�<����Me;�c�J��Qs��y�ޤ���J��E��=O~�����=V������=�<q�"37�o��=���a� >�p�<���=j��<iYo��F@< ~>wt���� ����=�G�:5G���^;�z�<�C��(����=�Q<1���f㽺CR>�o�N�=�}�����<��@=�f�����_Ə��:����=���<�?��=�<����L��;�4��o� ����=4Mݽ����a�l=�a=�:��y�Z=�wt<CI�< ��%N�;E���Z �M��p=)���y�y��$���n==EN�;Fa/�@��<Q�o=��=`Ҹ��y��nN�=Ħ�=R�:>�����p0=�)l� ڳ=()�=���<�w�<��@�����a�G�:[,���<�9�s=��N�Wٽ��=�l�<q�>�@�=��=�P��Һ=���=���M�M>���=(m= �ュ�@��j;��K= �<����:H�=�i5>{Գ�T��>�N=�s�9ý-�z��n =���=�1%���Ȼ��
=Ϛd>T��=-�½,�0�I�<H� �d<�<V@�<S�H=U��=ɱE��š�0|�=�:�gW@��S�:���<��a�鰎>��.�=8z=�ܽ�F�="��=m����� =7t���9�󫉽G�=��� َ���=6��KNk�9嶽=N�=�I��y$�0��=fϽ�)��^*>O# ����=$¼A8=��<=<Z=����#X˽O��=��P�M�ɽ�;?�[�ѽ�T�����磽���<��=PV�=��s=�� >�� ��=�e�G�u�Ӵ_>l$S=h@��=S�=h������;�"S��Jk=��@=��)�`���OL�|���=� #�C᳽?���)��=�*=pZ��|��I�h��פ��jv�`:U=�;���==�>�Tu�y�(<�P��cA<�"���o=��<�- �=n� >A�/>��<;��=4�=�TX�҈>��l*�p�)����<�̶�fO��u��=�컄Rǽ���<dFǽ�爾�� =�)�=��d= Zź}&ɽ��>5p��2� >�9S��i<�IG<?�=�5�=�N����=¹�=+�?=�L����<PFc�(�=DVT�+X�=]� WV���,>>��=ib�������]==��u��m�=`�j��5�;T=��_>��>�6��ƙ>>���;c������+4����w>L_�<5�
��ݽe㰽<rm=��H>o?i�SH�<)_���$=�R�`��=���=!=�J�۹���=&�r=&n�2k�=�`;�J�=�"�����2�=�,�=4_j�Q���ݓ>V��<�H����G�G��S�=�?z;�"�;=0D�=�Xz:6�h>d�c�pIݽ�=>R�?�Li߽ne���fz<�ɼK��,3�����O�������C����>�}^;-5u=А2���p�XJ�=z>����>C\�=�3->��꽇.#<,+>i��B���>p�=y��g��}&>ٲ�<V I��+��iPC=�Wg�^�FVͽ���=զ7=�d1�Nn�>�g<��轑�y<����Au=7/Q>��=����q>�)��"�����a> ��=�����Ј��
��{��=w��=]�d���`�S���b=�5t=�Y�=. >��$���>�te�q�&��=�����|���,�=�f�������={��=�O/=_N�=�(�=��k��=�� =��=�:�=���=�ʽ�q9=llE=�Ø<��B> �V=A�=��=A ��U��!��
s`�]9]=�(L<��g�e�<�����=�Y=*����<=X����V>ud=���=r, ���
����۵��F
F��o_��v�;B��a���N�������=)�w=����}J=Q�^�#�ֽ/]˻��>Z�x=���y����Az�� <�,��5��كt�]�M���ڽ�G}=�a�����;��S=���<�f_�%qq�鎌����=/�c>�����,> -�� V=H�H�~�Q�ƁP>�*/�����6,'�� ��B)�w�>�-��3�ɻu�<�K�S<"�Խ��;x�a�/R�����{�(>'v�=������� �ֽ��������l������.������G�u�~�(�������>�ڒ=��'� �J�ݼ@�����
��/���>���� o8�5y0���u=XE�=��=&�������&�=B���ٹ�=��=�O�<��P���'�����qi��A=N��=��=Z�ֽ���QsY=�`޽�Z9=i�R����=i�B��9r�<�S'��#7��%2�=�>s+��ҽ >��=<0E$>T��<7�mx|�kC����U�ܢ����=���=��o<k�����"�B�CQ*=5��={��H4ػї�� ��=��꼯�����B�s=܁j�l�� q/=ScP=�Od<X�P=[�k�<���=w =�7!�='�J��X�=�h=���<�C�N�>�=�� ��>@��=e5>�$�=c���h=��c^=�C2�)}�=B�\�� �=���a{��8`=�+V=�q>��>�F�=!v�=�C�W`�=YB���V�����C���O=�>��>a���� >��սc������}��@>8r�������4=���I�>�ó�ِ��F�⽇G��&T � ���@���xq;>}%���<��C<�QF���x<����P��<6��= �y��!Q=���
��;'9j����<�]j�:d��f-���
������a6�ȾӼ����Խ�ѓ�g��=셭�٣<��A�����Hί= k;X2==��=��=p�=4������=�Ž� >L�>�v9����X�K���?����9�;�%=�ս�\/=S��m��ꢼ�g� !�<���hq�Is����H���6=dW�=�����O�=�=���-T��>>S��=�a���7<�? ����=?̼+�~>�u�<E�=Sa\�ٗ����=�Z(>��K�h�5�,c��&Լ�}�= `����=9,��t�-��[�D�6������_��)>���=���<�>��[=o罚� ���=��=�G)�0{_����<�#='G�;��<��<v�B=��)����=��U��*S=z� =֟��z�Q�rBE>���<�fm�*����O<u��_B=��J�r/�m��<P =��˽��"=��>�V�������|�a��=��i�������>�+>�&�>8��1ֽbx��Q��>74����x�=,V��?Q<?�ܽ���=@��Wz�;:W�H��Ӂ=R����q���#�=Ѻ�<��нn8$>���=*��=�)~����= *�=MƑ=D@��CWq=�z���h�<F=��ܸ}�Y�������ɔ��}V�7�����<��?���-�-z�=� ��~��É9F^|<���<�����$�=v�=FའC��O ��i��=�� �8��{�5�Q�� ��=d���>�ż�
�;G1�;�$�=���j52��R�<�*<���08{*��V�=涾��!>��OG��n/)����� (�΁׽��$=9 ��!���|<u�/>�m�J
�Z��:��=���=͆=D,@��K�j��* F���'�[e�<B�>��H�@���K�ս!� >�!�<H�V�Y>%�܈<��0>�ҍ<|�=� ��]㑾�+>/c�=.��=�ڍ=�<�=�q�=�֏<�=OU>��O=e�|���:�F.>#R{<`w��֍��1�y���B<���=�J`�l�n���#�"��3��|x�<��=�j�=]t=�>{�B!���)��� ��������‡�=}6����{'>%�,����=����*�}�ާ�=����'�ף8=���=x;r=F���[r*���=j�>0�e<�H��I�={�����<�y�)S��덣��-���>�xa�Y��<�!=��o���/�ؙ����g=����V3#�e[=�i<+�����<0�ڼ���<�=D|�<v��,R�=TP�w_<�o=�=x��=�Ӌ=Uj��S��=�'�=8Ul=&�q=)v>W��-Ln=M�M�ku����6=��>Z+½{�=�K�<N��;��轹ډ<�6�q
콺�S�=���=ԨE>­������ϡ�cH ��m4>�b�=H��=~�����+<�2����=���=`y>��)���+�=>�ݽ%��=���=�?)=����X�<a�&��Gz����=zA�<FY˽h��Ɇ#����=}��׎���5=�������F���J���c:j��+�G�=����KD�I�̽��?=��/�ソ�z�<G�T>:�"=�������)҃=[ő>Q��=w ��ZX��1�=�%��[�=��S>���:$K�=�.?<��=��ʼ��';j{�}8�=op��+����=�)����<>�v��Z>pt�=9�=U9�<Mӽ�qW=������)��#�=����9>��g=���=" =�}=E����Gk=���7*�=�t �b>Z��'��X��?�=�Oǽ�಼��>��= .�=U���P�'>���U9M>�X�=7���E�="�=sf�/v�=n& ;�~g�����ӻ<<�tK> ׽� =�"d��&��T�4>�� �2)v��"w;-��<U�������;o��=~P������w�=M���6�"���m������R��^8J�u*<����Ľ����0�=�{j=��*����<H4��½C�ʽo�>���<S�� sJ<!��'�E=��SLU���}=))�ޖ��
<-��=�m�=5�_=�cz��Ӑ<g`�������ܒ���'��-\�FN��f��=�`�8߆�=�����6�=Q2=��=B��"�=Y�����=��uy3��"�z&��+>V=��=�q�����)�;��һ:%a=�bV<�FǽlԒ�Z��< �����=��ռC�������+(=�t3=��<r)�<L��=��ν�j����,����mр<7���v$�����=J���@�=�����=��=����ݟ=r�&�������ʽ��(�������=����-�ļ����}���&� '<�9��/��=���ѡ�=X��={->��=���
M�;� �=�<= X>5V����B>$�s=e�=�L���}F�(��<���;m�4���0��S�~W(>6��=y�׽D{
�7�����=.A�A8���ǽ���P>�i�=7-=��`>3.Խ?t�� �eX=UG�=X���(
:��<3>ꥫ<�R=E!���L =�v������ݼ5K�=������'=�9�<SD<�ı�<���>_��<�E;����p�)=��<�L6�S,�=�3>u>�;��H��> ����?��9�*>Z����c�=�l뽵%�=��=}m >���<�b$�V�X���=�͝��I��a����=&�����=�?���w:���D�j�<�� G=`��<�0����<��e�h� �[����8��5� ��R �a�`<�B)<Gv4�Jg����o�\QC����& �<�(X�z07�Se�E�=���=�1 >��=�_������:��+=��ƼQ2[�-b�=�>���mд=�آ<�����7��h�0�ԨF<�d;�\&���.=�������5��<��b>qsR���6�vԺ=B���7��=5ֶ<�t0�w ���P<����<�$�G�\�ـ�=5� �s=��>S^t=|�=�,�=kW�=B� ��x�=C�=}�ͽaM �9� ��� >R~��"�<x�g< dP=�7<&�=�?�=�
>���D=i�=,/L��Ҍ��5��L����?�_o��p�<�ƙ<Wrr��@�0}U<���{N�7��=D��=����U��Wl�f��Y��gؽl6L<�`�=��X<#���Bc轝0�= .1��캝��<=䠽�+!>&�.��ˬ������%�=o%��c=�yt=#"�;�f�<�u�Y��=d�F=[�@�ܤ�����j����=+��=�= >���=\{�=�G�����<�� >�Ĕ��.�=� ���������=W<3�:>�zF=_�[��»糼4H�+㎽ �=N낼t��@\�L�)��?B=�%�<�� �,��= �>��3=�A>
�>����]�=t �=y��>�i=R׽�KR>��Ҽ�T��K%;^iܽb5N����= n�=B[��>�=��=m(�;�,>tW<]����6���D�"�=F*=Yv¼����f��!X;�0�=�?�����o1>w������<��G>�P.�>�=1�"���;��*ؽ9����>�΃=�C��A��j�L�ȼ9 �<2"j�l�=ʩ�;$��<ƽ(��Fݼ7WP�r�G=���=S=;��T�=u%�= ^=��C�t>�@c[�Ұ�=�w���xF��������Ϧ��E�=N��"$Ľ�f���>̙O=�Z��&�=*��=�:���<,ϩ=^/�=\j�;�dʼ}(I=L��=!T뽉ͼV�=� c>�n3=Pö��'I�65���%u�a��=�۽=ʧ��I�X�}�Z=SK�=� �=/����6��\S����=y���c�Ԇ�= ��8�T�<� >F:.>�u��]�=���2�>6�2>%�T=󅩽�3���v= �~=r�$���k�d��=N���#(�����J�|���;��S�J���\]��T,<�W�:@?3>��=�G��,����=�=f>[�>��6�U��<�X>&-
��r)��<��t�#�c|�;� ����<*�b�#�-��5�=�<}��g�=_�N=�~���U=�u6>���=k �����=�6d���f��l�=�#<r*>��$���>�����u>F����ʂ���C�U>=Η�<�l'�&�߼���=K�P����<ڀ���V�=iQ��%�:�������;jͨ��X�a� =^��=ѝ���� �Hs�=��=�Ȅ=o<� xJ�������s\ �a"\=�l1> �M=B�_���<�SN>D��= �>��3�}��=��ռ�_7���}<r҅�y\A��N�<5qƽ�y=�="�>󅗽 �:�5==�҉���s=��C�"�Y=L�{[ٽL1�<;�8䆽K�Y��ʙ��k��#F�'n�dK�=ن�����E�߽�ռ�qN�f�=Z+����\=
T�=����Q�̸��[L����P��@�K=�*'=h2�<�?���>�=��G;{������<hQ��} �=)��,F(>��G��=������ϼ{����:���H��¼�q౽��Q�v��=p��b=��g>B*�L>�fƻ������.0K���: .��1=����� ��Y4���� ���*>�⻖Y�v�B��[�=�>�@C>[��=�}Ժl��澽r�����=\�;�[=�B>=���eG��d.=���<d�<=�<.�K�9��m�<�i���C=(��Zr���Q>9�m;�^ͺ�����=��k�w�=��)����3�Z>�?=>l{ǽO��rWj���C=�g={�[=h�=����t0 >�7�� ��<�rM=W}J;V��='��M�S>㭜=�Ƚ��U���S�E\-=Ի����<�1�;6P�=���<~;=��=��T�>�̽�M<�� � %8=5 A�t�����=�Y^��X?��H�6Jн;��=�~B�(~:�1K9���=�d�=˨1�3+�<w͇�����5�<�U�:��>���=_�==_��=�O�c�N<*OR�� �Д�<�󆼙3�=�{��!�<�����=�ϼ���B��=-�����D�X��<؎<�}�:=mm��� >��=d��=�MN=%�>:#���>�ы=v̤��=�
�=�UW�q�W8x�">A�����ֽ�]�;�n��A�=��k=��+=���4��<��(>4Ze��*>k���RKi���X�C誽 �<���=Z�����Ù��fe=1���\ <�)s=��Q>rgV�T:">�S���Y���:m�d=�ꓻO�ѽ�I9�Q߼��e=:
���>>E���n�=�J�� =�!�;ǽV�D��~k�=t���&��3f�;P�x~Y<+N0���=����/�:����`=�0��_=ӟ:>y�-���伟�-p<rUf=����z�<U�e=�����:>�a��mH����@>?�tD���;л[�� >|��N�=�]��s=8B�����y�ʽ��ֽ��]��:>�f�=��d=��ļȾ�<mB=W����=�_�;HK���>������x=��׽ �����c�=w=�ի�����x<���Mj5��`>(p�َN�b>�=k:]<�CO=wռ�\��{<_�ݼ6e��*�:z>b�S>���=����5��xMG=�s��rm >$A=����,�o�$I>�xa=��;|Y>�˜���I=��'��Q<��Ꮍ�U;=ĕ����T�d�<Wͅ�~��<���:zRW�V�F�$R>���=X*��F��=�C轼}���҇=boT>o��<���=i�6�$��� i�ӓ�� bֽ������R�>x��e��@ܽ���;OF �I��;
��<tF�@������4,���i��<\�8�����G��=dd�=�2����-�8<>X�=͔����.�hY�=�A����=��=3 ޽'J�=�N��k�=�Խ�$��>w��v���ٿ<�*ͽŞY>ǣ<�{)@=s�ػ#��<��>+,>�=S�7E>�����{��,ڍ=�+ >��$�Ý`>a=�����Ig��̲=+{,;� �<���4������lG�V 2>|a>~�ͼ�U=C��;`Y�<T��=����<�<H�F���ɽ�N�>;��2���dw��eFH=L >�E�=��ڽǑ�A��=�&��d�=�0���C�<2�=Y�=uHG�\���=v��
��;=��=����S�����p�ٽ X���͘��o˽R��=G�=e�=:p�S��0�=;�\�AҼ𓁾������׽��(�M%��y�=��>>eA�#���=�q�</A�=��O=M���@ڗ��M2��><�g=M���0�Ը�=���h}G=h��=���=��O�[�=O;>�B>�s��=�X׻t�9�� >2��[u`��K=O4�<�HȽHc��3>? ���;>�bg=� �H|ԽM�>y�=?���k�i��k=������<�k��S4=�׽Z�p=�����=���=Y�<;�-=CJ�G��=X��=��=�r=C:n=�½K�=��<�䊽AR,<ꕅ����<�p�=� �=�C�<��>z��;yp?���_߽��Z�*�ҽ�+>��e��:|D�=:�k�%k=�xx�!-V�d���>��K>�<R�����ǭ�>y�=�^,���E=�tE��6}��#"��l�����;����R==��˽��x�#��k�s� =�/;~�<Pv>!BS�,�<UsQ<����>s(���n{�;p=�]\�<ݏ=�G.�kP
�h�=�l†��[/=DO����#�6-�=�����&=�P->5]6=��>�g����Z�K‰=-Z�Wؐ��¨<d��=j��=�p��4rJ�Ä=����U[�c��=1[�=߭�=�p��U
/�A�>�e@>�$�=����~ >�5�=R�l����<1>���������4�� �=g���D� ��K=�Z{����<
�=�`>�w�=H�P>��ս]�)�i �i�v>��R<�������1Q��d� ��+��;Y���@�?�QJҼ�=@= 2���u��
�7=X��=hޣ�y���r�H=�>��ٽ�]ּc����pɼ)񄼤�9�w\f=�ঽ���=��}=�J�=�L�=�!��3V��`
�D���P@�=dK���#�*ܾ��
��Y��:��c����=�ǽ<�6 >e;�=ER����=�O >��`�>�c:��2������ل��"��Q���I�&��<�=�5�I��� <�: ��:�=e�>���=Y ����0>�>�">�5=���=���=!�=�x�7��;�u����.$�x�?�Q�+�uV�=�1���p�;��N��k<�Ep��D���?,�*�Z�/$�W=�ۼ���D���K���V����=�.�}9���3����A���z=ξ���һ��<�܊=TL,�{*=�}��5H?=P�W��a�=gf��QI��9(o=�e=���=���6�S)=�����O�Q萾�E ��8e�f0�;BF)��Y����<�,>1� ���S=��a!�<�v��]̺���<�^�=��������= G�=C�Q��O�H���L���(��n{�,׼$ 8<�Z;�,�=d��=�h=t��=�$>��L�Ps���x��!мϱ=Oi">kT�<���c[ >,tD�/�* z>T����Y����>�0�<���� =�����O�=�
$�je+=B��`�߽�
�=�4<pj>�ؒ�R>4���f$�\�:�� n��Z>y�J�Π1=}��<��=�?@>8Ѕ=�Y>���;�8�;$d<���뻵Tx="o��{�<�U��%W3==A�;�V[�������=���jh ��~>��>��=��(� �=�ս��Ͻ�6�<���=���=��e>
Tp;�'�t��=�nB�P�K=O;J��e��������{�6;[%0� U�<}׽5�3h��J�<�i�������<�Խ�V>;�z�#<D�ケ��9�9���=��/>�[�� ��<�=�Y>�p���3����Ǎ�=ds5<|e$>��=�p.>6���?��6��)}���ͼ���;�m��A"��ZȽ��>�9l>�ɐ�� ��}4&=�y��j��(�;�ܽm�R�������ɼtK`=���;��=��n��޼���=7���h��=_���i���j�@��=���=8����н���:w4)>��<����񛲽ϨX���;������!���ս0��<aM��e������ߥI>��=O9g=�����G���N<����w=�]��� =|��"�
����=8�:=~���8�b�$x ��� =��;1>��L�������<���������o~=����x�꽚 �=p0�;�޶�W7=aC���>#i˼�D9�!�%���Z��jo��x��r!�<�pȼg���W_R>��/�‚���]���\=�� �)�)>P��=6뫼-���L��=*ջ���>�Q���\=U�!�X�:=D΃�� ���׮;!��GÂ�{��ig�=�(�=�->ip���9�:AӶ<���=�Ѷ<�#۽�uo=p��#h��m�=�g>� >$(i=l$� <<��>=�@Z=0�H>�ޠ�uϩ=�A����R=�����B=�!ǻK���)kT=t���<A8#>D#ٽz���â<�|�=�fV�,�׽�뽼��=�O�=}��=�P�<�T�<�>(ޡ��у=ԃ�k����h��;�
��_H=��S><��=+N��;�|=A���P>ȁ`>��x=������Z=�aG��/^��I7����c��=��=�n�=/�<b��=<7;������W=ި:�u��=,%=d�=<!<X풻��`�e���%}�<~�P=��<=��m��q�=P���ZR�9څ�V��p?�<C��< Q��)=�D�(���g�<�?��D_>�T�=������������wI>JA��s�=0!=���F>�ϛ�5;�>63� 1�=�;�=��r<�ˇ=/f�=X��L���cd�����ڹ��S�>�%�=oE=�S�� ��=����WL���`���1=�e>eYC������>��8�s��}��<;_>>x�<�3e� {�����=nIh=��̽����2�=��i=����q���S�=ķ��ʉ=��
"���=q 2����=p��x���T,�=��J=�|C�$Lj�X��4��=ϔ{�����%��=�>�=G�+=�c�=��\>r����X���U�^���TZ=�l�=*��*����-��tg����=�4y��)½���<���z���*V��WK>��M>�d��e�=�F���Uj��c��1eB�`9i=�����p�촞<"��=���J�= �'��׶;GFP=���Za=�ʜ�,߆�ZG<x��������<KQ���b <��I<(�2=/�V��m�=F^&��o�>���=wG���g?>x^޽"��=E*�=9%�\��<�X>����bI=<�=穽��!�k��=t=���W =�"C=��ݼ��=��=�)�;��@>�* �o]J=(�Q�Q��=5�>��׽��������;�� E=�"x= ZQ=�j��� =PRj>�`�=Gn����T����.�RՋ�z܃��-9U��=O�~���ۻ�*�{�꼟v�=�OJ=� �<Ŋ%=���<�*���4<L��=-�ĻP��"b>�1�=7.�Fl>k۔<�(����=��ּN��;wI��?�J�����>x߫=3�
��hs=S��=��"�6���ŭѽ�\�>�ӭ<S��=�2(;�P�<�6=>���=��ֽ�S��P9=�����f�=�!��4d*�O� =KI�=N�-�>L�K4O>Iq="�A�����=D�=5�=� =�<�= 0=���=���n�ʼ�����*��S��i���. >��T����<�ŋ=�Z=2"���ז���^���ȼ$>��㺮�=��>l��=������� н���0 >-�\��==��<�&X�����a�JHн�IG;$sh�����e�<[꽭x��y��=�j�=�Z}���S�?yA��u=�����=�ż���=t� >����=a����>G����~ݼ��ؽ�au���=S[/=��v>���<��+��wE�>�A��t�;�T���|�<��ܽX�8=s�w<�s�=� ��f`���Z~:ܵP�h8Խ;n��asV��1"������ѽd;��k6>�>[�!��+C��ㇼ��g�o)�7 ���<�E���ؽ�g��=���:�,�>Bcӽ�c="��=�����?�=�ta�?��=:����C)��l�z���0iL=4�����<��">���=5dL=T�����=���=�s%����ɽh�>��:�]���P��=/�c�G���L�人�8�p�=��2���>���
I뽣ݽ% =)��;z;1<��U�j��;`���g�gI}<һ��� �=���:}€�Nu�=<����I�ܙͽ��3=@�y<>F>=sﰼg��=܈˼I�=�r�=��$�F�=�5=�~=�~�=(����r�<�?ݼ{Y��*�>���=�н���7�ӽZ;���f[$�畽��K�=ƽνӉ�<F����[�= ��*����Ҽa�*����;r=sK����H<�a��i��N�H�o��<vz���:���~�ط�f~�<�������==��=?��=�><�Q�<�ܩ� ��@ސ;Y+�=������Z��������wL!�l���R���%0<�y�<�gٽ�3��HP�=�r<=��n=̿�S/����\�"E��[�����������sp>����l�<��=:�:�w�;������=?5����ڽ���=^�=螂=�T=�3ܽ�0&�z�"����������=YL>�vK=Q��|��=��U=/��l�� ��c���W�>L��=*u&=q�+>Oּ�%��YSr=� ���ͽ1�%���&��y����=C�ʮ彝��;Md>m�d>���=�x+�x��F�;���u�A�OL���F��/N>LH>�=�<8�ŽV��=��9��I�:m�+��b�<�!�=�A��)d���ݽǜ?>��;�g>�}���d=��=�k ��g� ~��2>�:�$q���=�d� <�T��u��nw�4i��)�>r]
��� ����M3ν�">�hj�>-<�(���&&�8i�~7���U��A���ur<���<'�=���=�E�� ���>�p];��F=x� >��v=Dx��f�b����<�3Ὡ�;k��<��<�L<>@>?=t�"��;�.�9�<b޽�p.�s���6���3���sg��,
�(���=f�ս"X�xMY�U�/���$=��O>t]�=P�^< �=�Aս *=L�+��B�:� ���;9��;�B���ս͢�=JdC��$���R^<�q��@�~8=Ws�<8"ջu� =v0��'g<[y��Z �O��˄������G����P ��K��])�k��=8˷���V��P >���,�=a7M=°a=�p�=%����I=�YZ���J���ټ��&��Uh��ھ����=0P�=��Z���k=��+>w��=&�<.�I��>�������8��<;_c>��<�k��H� ��[����=f�5=� >p`;� >�˦=-�Q�g�K����=ڧ���={�˽���{=y����*��C�#=L�=���;�=���+�`�>d8=a�ֽ,!�<1��ٶh�'�x=B�:Cl:��ؽ_�,>d��:������=��M>�٣=p��=�T����`>��� B �\�ǽ��d���Y�ٽp���D�G�':�<%|Q<��e�����=����<!���uQ�I > ���c��� ��b/=
��;!oؼ�=�Y�9�<�+ �y8<���%ą<�i�=��E��2��UM�=p�x��{�t���/�=\�4���%�����K�=��h���<���ZR��_��u�h���&>H�u=~�Q<].��[>�b���Ǽ*�X�`���ۻ�x|��ot>�$��5<)`���OI=S��=� E>��9���<LP��ɵ�=2�=���<��=}8)���f���<>�0��y��6��=�L���<�+�;smr<lO����O=��a<PQ�=�IL<�赽��I=Z;��2�u=;��=qɽ@ջǂ�=6��=�����E����c>=����I4�=#D>�7%=53���� ��@�=ù�=�Up�<0�����|v!���0<_�=�T��#��<�f��� ����=2�k�W�=��(�ʽM�/=%9c�F���m���m��`>p܆��k�=�k��|A��)>~��;���� ��o'>��#g"��a��K ��W��� ����� �}�@�J;g>��r0=n����p=(y��L'/�E}���E�=t‘<��>�x�=�����׼��/��z�=���<�['�A4� ��=�ܠ=rO>� �=�*�BȜ��I��m�7�x�'�~k=RP<<�Nw:x3�=+��<�����g[=Z�">42>�Y>�C4=?�����I<�g������=��1=K�9��ū�5���6Q��s�<E�2>���G;�=���;�:=) ����;#�9�X��jĽ#Ÿ<�M=<�[�=����"_���)=P+����=�@��]�բ�~h�=������ؼ ��� >f��=� �=�(�=�������i'ѺR�=��m<R)�o��=�`н��~�*h�����=�0^���2�d�;��%>�`�=���<o���@�K>؈���<<��Ƚ�<{�3��a�]½�}��^9�:�<&t-=�>�="��=0�w��Q�<#�=F���TL(�6�Ͻ
�� ��=�����=�F��`"$>���= � ��>z��=}��=�y����I��l>�W =��^=1�p�gm=�Xc�$�/:[=vCY=�m���)�U�p= �`<<8���is=TR2>����v^=^�ǼA$��\�Ľ��V=���=�[Ҽ�2>s�>s^>k�ս��k=��ؼf�;2Iʽkb;>8ª�!����b<�򽽸�<L]<g ���U���<���� ���<+� >~�>;"8��3����<i3�=YB��� �X�=�g�;��<����!?=�&��X�ѻ/.Ҽ3�>@:��u>ن�lP�'�p=�W1>�T9�N�8=�Dɽ�R!�!�ּG�X�˯�=To|��|��R��6� >Ѐ�=���=���=u9���N�N�P�]��=Q[�����ٮ���˽���=5#==� ׼���Bˌ��&�.׹Ct=��;�>�<B>�`J�WN�=�&�;F�+��˽�<�
T �F��/f=V�Ѽ�H/��՗�������G����o�=�cR�Z��(��j���7U۽4��Ⲁ=~;�=�&:>D��=gP!=妳����=�����<>rﺻ�\ҽ�E�I�>�L��D=jD=:޼9�O=δ�=���=��+�U n�~-�=���:>����=�v�����?~�=[�ٽ�����@l�Q��<�c��b��]�.>��=�m�=i|=C��;��=� м� �<s����h<+�]<!8G�l�$=L�w�}�=����p��^|+����p�q��rR� PϽ)���б����
>�I��Y��=�@��D�<�
s��:�<���=��H=�N=)�=e��l���E�=j�2>NU��K���#X=���=���<�<y;�{��G���j7�t�?�>r: ��9�<d��=���=��;�� �.�=�J�;.���ZM ��$���-�<1lP�.=����u���y�;!F��ϗ�3E��r��ő�=!X>p|�ԛ��`z=]�+=�;��^.��l>���5��<@��=qo轕��=)L�Z��q��R=���<��Ƚ��������0�=����K�h��;��*��ؽ���;�`0=�ȼ��u_=4�/��y���k@<}�=���,�=���c��<��׺���=R�
����=���=�3g�<7�;�ի=��<+�ܻ�jػ�$����<�01��6�H��=�J��84<��)����=߯-=j�V=l�5>! z���¼�U�9��8<�y=�R�=&V�=A�=ҝ��Q��Z��2z=>1�;�$�}�����=��?=�c��3I�=D�����Ƚ���=8}�����e�7>ރ�pƼ�#�=�!�cj�����<_����$>����?4ڼW���<VԼ���=���j�=����V�<�����]�.{׼��=�@?=��3��-T�fN=��d=�fy�!׍=�P�<�e=}�ٽ���=� ����;���$�<.�J=SÄ�3�ȼ[Aq=� ۻ��">�R��f���U�<TH�=&ٽ�.���=�?�>g���E�=����LJJ�U'O<3Nt��Z�<ߥh=2ڥ�L�7vF=f�ؼ�6��*�=�)����<�ҭ<�S#��i��
��pG7<�8�;� T=p{>Qs�=���<m9�FM�=I��;<�6>���=�>�S�=?3=�8�;>e���c<�=3�����W>|�ν*��Qj����=� =��>�U&�A�*��Q=��=��h>��=�^a�^�<�n��<l>^�ڼIx'��C6�iw���q�maZ��ؓ�8q�=yJ=?y�>��>�)�D�� �4>�c���5�=Q�=&�<>�ԽX9S=��">�"2<QB�:Ӳ��v�= �<�y���`�;���=��>��0�<�VZ�< �ս���=;C3�8��=�U�=1�<���<�4>s�_����<*Q0=�������e���C�6�F쭽�3?��T��ϥ9�z=�;)>�hg=N>�oȻq�<����)�=���%�V= oý�Ȕ=16�;�*>���D$�<��=���<���xG�<���=B���+_��FE�(⃽�7�=hEJ���<��=_:�<,蝼��B>�>�����G�<=�λ���2:+>Q{P��u��c=��=t�ۼTؑ�����
�>�Z�;N����a�=�3o���<��=eԁ�ͣ�=6MV�V>���ҽ��̽�w =�]G�:���WS�=��N=��E=-�~��=gz�5�c�H> �T�Žei����>c���N3���R<�Z���2˽^��� o����=gS�=�@ƽ}2"=dP<w��=�5=�3�˽���;�H>�!�;>'�">Oh���<>9��0Q|>==�~��LT�>�G(=������=�_S=�<<V�>���*k��RC��*ϼ=�u��IA�=Z
��w�=�ؽ�h�<�E�=>Aּ�=�� L�>���#�>��=�M|���v�=�K�<�w>
O=�M9>vL4� �ӽN+^=�\ܽH��=if��1��=v�> vD�D�R=O��=���=~�Q<*�>ĩ��7PB��ؖ�\>�uw7=tӢ=�>�=�o����T��盼�� >�
>���=���Gܯ��">��<�=m A������R=(�p=�Ku=�ҽE�C�g��=j6��ͨ ��<%.���S8� ���3>H�V=�1�=W�>�U�<tC��zW=���=�E�4�v��y+齒}���d�9����<F'J<�;�>�x*���)=��>������=_��vv��Ǩ=�dj���1�V����#��ܼ����h�K=�i�=���=�^ƽ�g >�"n=�j�5��<h��Ľ!=�>0<���X=���쒌�VV�|WD��HD�RET=�m̽/��=�؏���� �=W�����>�3̼��>�w��
(>L��=�=����=�D�����=�@��g�l�6^+��s�=3,�=BZ�u���F��V�=yn�=ٛ�;��A��9��_K�=���=�6=C8�;�O��Cs7��*�=1� ��I�>�����<���=�� =����)q��Y��=�
?�)��<r�U��S���S <%���L�<jOS=0��=�����Ț<UV�<l%�=��M=Ò�=���^T�< $%�uK����]���¼|#ý3O,��W>� �=�`�rk��[���z�߽����?�F=�W0�����'���v�;�;�ы��J;I�=�\Ƚ�Ľ�ѿ�`Sd���>��H��]=��a=Zl���XѽՔ�=���<�� H�>Q��<�2=���=lfc<����L����>��Gz�=�I< �}=,G]�?� =}O�;t�R=�5�=�㚻���=]S:=��<�l�<~(�<��ּ��Z����=��=:� �� �=f:���]=��t�� ���7A=�~�,.=>��1�� K��3�=�d�RYc=S4ֽ�Qs�q�����R�x���u�=M�����=���#)�=�ƽ��N<�P���ӆ=�SJ����9�A��6<ɳ>Il�=���;�1�=��.��$=���=�[���,߽Ľ�\w�f�: > �<��<<�7�����H��=2X�=�uQ�y��=F��=�`�=�<T=4v���=ѺH�᛼�ߊ���$��:w=�ub=���<�n���=�\�������>N@�2����۽=h�����g��=�E���d�j��=r�=�h����/�ȑ%�=�,�� ��_��d��=CQ�=���R�e`=��>]&�`�ɽ���=�'-�A����R��T�潩'��;��<
�V=�B =��=����S�=������ 4缶`�A�=#��<�����==C��=tx�<=|�U�<�� =C]�=�{��@b�<ql�w�k=����Y��<����q����g>L�<>\��<r�罶�=G�����=����<9^=�W���c>~ �=g�(�ru�=U�������3�ǽ�� �YL��S�<=#�;=�„<���u9{����b6�=#7�lWd���k���1�սR�q�=�D��&�b>�żY�k��5>��>�����7 >�;��)�.��I�<�vx�ht�j�!�E\�=����H����#�
=ꚧ�z�ʽ����&����TQ=y�ӽ��4>1%7�/t�b*�<���<�.��cӻ����=�P�Z�C��ݼM���s߃�t>��J��(��p�>�� =�p?>�7<���<`}�l�s+Z<_�=J�G���#>^���'ýb�n�潛r��b]�D��=c�/����A=�=߿��3�=FMO� ����59��T�=X�f��J�=w��=P
L�>��=�tz:4j>n�<�=`���:�=���=g�=�;.=鎮=G,��f)}=��T��� =��=�i�=��=p5�쐪=�����'�=�>�=���=x�ֽ�fD=���<��?>�1�<h�G�1���tc��J=���Iq�5d�t̽�̀���q��O�=r͆="%��a%�D��!ϽE��=�,ǽ�u�<�d<�<��O�=A5<D��>m�
=��u�x,1���׼����`��=��=��
=�+Ͻ�x���8�=��X;x�~=J>M<M���C>=��<��>�ͼሽWC����*�1ƽ�Nt�����2����=��-շ�4�����=c@=�|�=9��=���=�o�� �=���<��"�efb�b&=ϳ�=�6>�����1:3ɬ�lI����<�Ǔ�8$ս��`���Z=L����������
�џ�<сg�l;@>�>xq<N{��� �=�x���9>��"=j-�=��=�g�<{];���>^��=�U<�|;�缳�`�R>���Z8�i��:w$>f���"%����ؽ#X��*��;!8=�k����<�= [�A��=.��= غ<ب���3�=��(<��ͽġ[;� 9�M{=4:��ȍ=D_%>.�5�oVĽVtk=�"�;���)J=<:|=bL��O�='N�=�F�<��Y=Uk��_g�=_ 4���<q��=�.��z�����F=;[�=��;��p>�ut�˪!�od���a{=�t :2EݼI6N��4=�����[��K�E�<=��.>�O<��[=,���!�C=��`���ٽ?��T�.>0 �1KY> �N�7!Y=L#*<<G�=���<$�>��;�����w=��<]��� �;��C=��l=�8f=Y�ͽ�7�<�`�=x0ʽ ���ҡ�> D�i'���==��!�q�b��?��M���J�<��;O�=b$��Pf����l=O�/=�2���#=��R����Y����ڽ{��:[;�QW��)>3���R=�7=�$˽2F���%��[+������ξ�缟\6>1��WN#=��;8 �)�<(�����[%�;V����8�@�=��] ��-��[���M� ���h=���02�@Ԉ�>�O�S��������]�n�Q=��=B���G�m�w�<gI >@�<��>��1�{�]��5��<�d :=��<R<Ϫ�=H�弎�缫������E�=���=u_+=W:�=�e�r�����=�t=>ܬ�������E=��=�H��N���SE<3c��3�=z��TH>�k�<Sy��_=66������� ��Ĵ<?�=�d��8ը=lg�=,'Ӽ�s�=_��=�c��$��<(7\��">�I˼RƎ=N!R=Hs�;�9���p�ا�m���S�=^j:t�ͽ�K��<L�P�#>U���$4ռ������|;�y�e��=���\��<�t�=�Z{;@Q{�X�=�����=VS=�!�=��2>�Ei>�X��4> L^�¯��Y���R�<�[�=�x*>|���q6r>ǔU=�����(=L��G�=�k��ɭy�Q�{=O��<����Y�8��=�jB=�X����&<~�`���<̧>dܽHɽ#���ĕ߻�\��u<���t�=��ӽ���=�2��,v=��5<״T����= D�=䦲=l�c=�4�;Ay=��=�L���i�K&ݽ3���ލ�i>�6�N�����ʽ�����XF=�yϽP��p9=���
�����=*W��1�<�i��ƅ��#K<x��=i��pz��`;����/�x� 8d��=w��=���T$�'�b����=�^�=І>�H��Kr��K9�H�&��Ԃ>j⨽f�9=)����߽J�ս��k=n>�=��<�[�<~�)����(d�_���o'�=m4��L���.D�`*�<���<ݡ�=���gdg=e+�� <=^�.=9���K޼�A>kH�r>�=�3⽣�����=w�=�+�=��3=���aݼ�3�=� _������Oż�������L"�<Ț������xh=���k���K/��X�~��k?>�_>/ "<$���u�=(��<�n=���<���=���'v=6���P�=8����o<�¡=�9=.3����#>>�a�=��%>�8��l� >����)Fͽj<��м!9\�'��T ������T/=,���2��N�<sS>F-�����=� f<�L ���+>���;j�W�}��=P:�=��ܽ&��4]����<,�a�ɢ��� +��> �=>�<�b�=Ŷ�H�Z�ƈۻ��">'��V��=��>�@�p_�=��=?�=]ҙ����=tI�� ��< s�$Y���*�=���2ъ��x�=��=���=��<���<�=��P=�]�=J��O���=�K޽;샽�m=���=�.�=!m�=&\���=
�)��,�����<�m={ڨ�3k����l�/.�=J�_<:� F�jפ��)�<ng>�i<>�@b=e}�=��=s���.��}�1=���Q=�m@=�f���>�� #�=��=M�9=.1�<�Й=�=(R�<do=�Ā=Y���Ά �JX�=��0_�<v�>��k�K���4�ʼ��ܽ�__=JE���{½&\2�(��<�$>� >���=��{��� ���G���=bh�=�>�ե@<���� *�ņn=i�1�%bf=����LL?��Y��O��n�>�\=�I4�/��=�Q�"���ǼfK=��T3=6ý��/����=!����2��84�S꽖���.�<;�����=�Q��iӼ;�
�����������@:��ѽ�=&‽@M,�d ��j����=��=�=Ol=I!���0켃$����^�=��=�Ҡ��YF��j)�G�#;��_<Y>���=����3*G�����8������=c1<\S
�sK'>�b>`}3=�qU=�7���ּhm=: ���@!�������=L�����<���=�
>H?>��-�� �=�:i�H> �:��l�<�޸<;h ���4:�W.��) =-��=5�< �����R=9�X= >u[�K�!�i�<tc=�<=L罡��=T�h����=�EҽhZ���l=o�q��e<��� ��=���=O]��.Z�������0<X9s<�� ���s=��G<S<]=�,G� �f�V
���:�<.��wr=RǨ�Rr=��T=���<,Մ= ڦ�a�r=�h~�+�� ��<F�۽Ƴ����� v�=�&e��-]<���;iў��|���e�����<ܢ�<�T���+=Mx=W�R<� �0���A�>��-�t�A��ʽh�\��@<C�Q<��=/���jiW��د=��W<T��<Ƴ���=�V/<r��=m�0>�&���=�)��P'{�h71=����������D>��*�����!��<����#U��
�=SϽ"ʽU��=_Z=�O�=�;�<H��20F>bi����z�M3�=�;�=��=�ѽ�J�=^�Q=->�=��W�?�:=P�F�=r�Fd)>��<�pd�ˆ)���ѽ�7�=�?=���;B�\;�t���GF��*�<wܻ��&=��2�u���sA߽D�+�řp=k��<�A����x�L�}=�L=�k���q<�f������}��<]ԡ=+f=psN����<4�=]g=��|���=X�G>֊0=��g��������K=.ݡ�-��a�׻*ǃ=�ё�6=HLx=�%��:MF����<[��<d6"<"�+�AN�ո =���f��=�=>��K�Ļ�>k��ɡs=�I[�9��=�ON�U�$���)>�ۂ>1�1= q3�>(>0+�=����?�-=��=Y���H
�j�=��ܽف�=ݒ������=��
=i�����y�=>9���M
>&��=4�.=��<�,�<�̳�˜
>�� >fN.�4Ê���f���>��e>��ӽ�ٷ���(<�@l�����-�=�=>��L��CH�b���6��=�l":{�,��4�J���|�2�P��z �=���=(�$<@ap����<?n=\9��� 7>Eb�=���<���<,�4=i\'���� �<꫰��*�n�/<Tȥ<
E>������ý*�s�L����t=Ҳ:>�z��{s/=���= M)��i�<�U{;�qɻ~��Ᏹ="�==�8���q�<*�4>�4>��&>L��=� '�5�>��>��a=�� ������=𽸽*.��aP�!��=:�ۼ{"w=�3�c���]*��6)=7ӽ����fN�)��������G�n��<��F��j�=+��۔_=� �<`�=�=����h���o�9��ó�4f=��=ncm�t)�=@۹���Q�AX��L�}��B��ڝ4�r��<Ŝ&<3O\�����K����=? r�<���'��"�p<�B�����f�xV���M����=�(���Q(�]O,<���=�3�=�,��~��=�y��OR<Q&�<�@�=�s=S�{�������ĽN_�=�`1�/��<������$����=Ffo=���=�*H����!�;���=4�g=��ƽ6s=Hxt�9>�V>&l>���;o���?�=�
�^}���X��9O� 7j;V�A>�"�=��=&G�=��ɽ�P�=�d�=�^��G��_
�����z >���=����uW<�wh�#f��ʿټ�,��Q"���g=���>:��[�=�4�=�:=I̥��?���!�;�}$������7��{�l��='Cּ9����
>^��=@� ><D >P!w��6����.��7=�1սU9��5���)}=P�#>7q�=�� >$����a<
�)��Wݽ?�2=��R=�d��󁽌��� M�;=��=�y=>4&��
A=b��=�����L�#>�򡼮X:�U:=˓����yw0����;���=�h��� ��<�U��<k*<= ��Lw��
M��cĽݻ&���+�:'�Q>�����
��HȻ�:���w"���!�3/�=�-�=����`��+5��,�Ǽ��>U�K=�_�֙Z��ၼ1&Z>~%��m��������s��{AӽXc�����1@��D*�=�y���Y�<��;���:��d=���;�D���x�"A� �齹���j*�V��=Ё]=dͽ�:�<o�t=�k^�����*Oؽ�<ӽw�ὖ�ʽ��漼1�=0>��T�$-�����=��;>Z�=��ؼ�;<>���$Tp=р>+P]=�ů=��<Y\̼j0*>��=��x� u���=I^�;9.=�炻��=���=�3=�~f�>Lͼ����?��=�<�=w/>fS>��<K��hX%=���=�J�!�=�齛5=o�.�-�T�V�E�c�!>bC������9(=�KR��*�=+���G ���M=�$�d�=R�M<��o=�5㽷�<&���i��������>�/�] �<�4J�[�νO�J����>#꽖�5>G��;�=� �
�=B��=�;��D( =���>�5�=B���q���ly >Z��=I؆�r'<�����DR��-���S9���$=���=г������dP�Pb��<�[ݼ�\��+��<���ܼU��='�� �4��䋽_�3� �="�<��0�<��0��y�t��=L�p=���=�����8�ʩ.�!�3��;��>�N��� �Z��=�o
�EΈ�[��=[Ն�Sh�=l�;=Vy���^��ő�ԥ����z=/~����=�ʽ`P����<�?��`ʸ=U�:=��=��'���������6>��5�}M#>b)='q�=� �=� ;#��:��=���<2 ���|�����<i���7��(�=ҥ�=� �(P��<��5>7��O�g=߿'��wռ8�5=�C>jkd=��=&�m���=f*�<�\p=
��=�Ys<t��p���}���E�=�4�=̯:=�>��P�ý��=]� =6<�=~Ƽ=A�����=>*���f%���?`=�/'��{�=@���>��Q�lѽ�����7�X��:I�
>9�4�e�0<�Z=��>p����`����Z<N\�=Cp�<���=���r��=����>�n�=hނ�k�$���=c �=�2��ke=�
a����>y}μ�9`�H| =�8���5��j>�cv=ȵ�L�=M�)�������7���#�5���{۽f遽�޼�c�<(�\= g�=����H��;Bzm<�P'�+��<�����=' �ŝ�����#�>P ���1������,_�{��<)�n��qn���=sD�=*��g�=^o=N����G�l�� � �n�<�5�=c�=>��j�F);<u�->ә��-%=vV����:�z)�&�<׋=MĿ=n�߽7� �4�罹ኼ�K�Q��;�H,������[C<r��"b�=�h��Z�W�&�}�$�1>��ֽ��<�x�S� <��/>,���<B����<W�=�6>�_�=�س���=�\�;փ<=-�<0�=��=�E��4`�=S��f����|=�Gҽ�����@,=^o��{��>��d�v��꠼<�k�<�
�[=�0�*�.=�ڥ�;+>e�t���=�f�=��<�(����n<�P�sQ};ʂ�<ڥ��O �=�!����=�u��u9=m�ͽ9�߽�
=��8� �>O=�G������-��\uѽ�v�=�_>�>��7�5>Z�0��'�Q,��^^I�
��<��8�@�m�{' ��Ě���}<&�/���<���=v�&�.��|v����� ����=��z��E�=�=��E>i�+�� G�����t:⽸���>�6=�庽��7�@Ž=�;� ?�����]C>��u�\%;��5�=�؛�V�v�m�,�$ �=�Ø�-)"���6_ ��Ᵹ�>7������Ʊ��i�/�Z���Y����^"����Ҽo�������`�:���Z Ͻ�s%�#L�泤���-���A��D��z�=;*K�ԋ/=>&�=�>=�"��� v=#��=z:ý���=��ƽzn==
0��TS��>ץ=�A%�~W�����2>��Q�?����S��Ym���<aZ=��׼B`=���=� ���[`=:JO��P�=�l9�3���;� ���99����{�ȼ`�B������=B�=���=y\ڼ�CM��q�=��<Z��<Ͱ��o�=H~-=q�O>��=0��=�4e<)�=���.�=Z��<��ýB�U�Y咽G����Zc>���<�gp����6�`��Hg<ԩ>꫺<�������++>.¢��ѝ������ <��:<�C>.�4��7���=�Z"p��>?���~��VC<=C=d���}�>1�>�um<���<!U�sd&�{�F=n� ��t9�L6�<�! �><�=Q��<_@�=��:���>]?>]�<;�� >��=�g��9����S*�� �1�� G�1�޻˄�=��|�N�=C�<31>Kϼ��L<���<�6=��X;�Av���Xz�=S����jS=��"���<�'q��o'���=U`�=���='v�<ȁ�=}�
�B�U>W >�=FA��׼=M����� 3A=8?n=����#���&_�;�E�fSJ=�_>�n=>}��2���E9F=���=��6>���=���=��B�!�����"=5����S�����bX<��}�����=Q�L��B�9!)=㤼��=>�|9���<ߦ������~7U�d���� 2=���<�}<����$>�ǎ�t��;]#���?�=ܕ�=��ֺ��;���=��3����=%L�=Q)>����Rc)=�ǥ��>�>U�;e��Ө&�'^>~m� 0>s�t=��=;|*=�}�� q"��I�ͣ�8"+�ܖ�=��0<���=�����/����xFT�U֭���H��*E=�=5���=9͘�� >�T�Ms�<���=V�">d�=�G�<)X��C�=ms=�F�`>|��rT=K�=:ג�
����=�Q ;�� �$�=�E���-�ω�<a]ȼ�B�=s�9��E��A�Dt߼��<z�r>�F<-Du�Y
9���)>�ϑ���>�,��zI=�����TA�[VϽnp�=oz��.d<�k�=6���I�<A��=����^�<ѫ��,��.�J�>fֽGb��*�B1network_body.processors.0.normalizer.running_meanJl�S=;f4@O�<�P=Ƿ6@�.�<[ M=�9@Л<�PH=BB;@� �<!�B=^u=@֤<r�;=̖?@y8�<b 4=��A@�4�<,�*=ĊC@�¶<g� =rLE@���<*�B1network_body.processors.1.normalizer.running_meanJH�Nh:Y�9��:�ͦ9��:�<�9���:�o�9=:�:��9�;���9�k;,��9�Z%;� �9�:7;��9*Bversion_numberJ@Z)
vector_observation

batch
-b
version_number

b
memory_size

b)
continuous_actions

batch
b,
continuous_action_output_shape

b
action

batch
b#
is_continuous_control

b!
action_output_shape

B

15
Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.onnx.meta


fileFormatVersion: 2
guid: d179c44c147aa4ffbbb725f009eca3b8
ScriptedImporter:
fileIDToRecycleName:
11400000: main obj
11400002: model data
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
script: {fileID: 11500000, guid: 683b6cb6d0a474744822c888b46772c9, type: 3}
optimizeModel: 1
forceArbitraryBatchSize: 1
treatErrorsAsWarnings: 0
importMode: 1

58
docs/Training-Plugins.md


# Customizing Training via Plugins
ML-Agents provides support for running your own python implementations of specific interfaces during the training
process. These interfaces are currently fairly limited, but will be expanded in the future.
** Note ** Plugin interfaces should currently be considered "in beta", and they may change in future releases.
## How to Write Your Own Plugin
[This video](https://www.youtube.com/watch?v=fY3Y_xPKWNA) explains the basics of how to create a plugin system using
setuptools, and is the same approach that ML-Agents' plugin system is based on.
The `ml-agents-plugin-examples` directory contains a reference implementation of each plugin interface, so it's a good
starting point.
### setup.py
If you don't already have a `setup.py` file for your python code, you'll need to add one. `ml-agents-plugin-examples`
has a [minimal example](../ml-agents-plugin-examples/setup.py) of this.
In the call to `setup()`, you'll need to add to the `entry_points` dictionary for each plugin interface that you
implement. The form of this is `{entry point name}={plugin module}:{plugin function}`. For example, in
`ml-agents-plugin-examples`:
```python
entry_points={
ML_AGENTS_STATS_WRITER: [
"example=mlagents_plugin_examples.example_stats_writer:get_example_stats_writer"
]
}
```
* `ML_AGENTS_STATS_WRITER` (which is a string constant, `mlagents.stats_writer`) is the name of the plugin interface.
This must be one of the provided interfaces ([see below](#plugin-interfaces)).
* `example` is the plugin implementation name. This can be anything.
* `mlagents_plugin_examples.example_stats_writer` is the plugin module. This points to the module where the
plugin registration function is defined.
* `get_example_stats_writer` is the plugin registration function. This is called when running `mlagents-learn`. The
arguments and expected return type for this are different for each plugin interface.
### Local Installation
Once you've defined `entry_points` in your `setup.py`, you will need to run
```
pip install -e [path to your plugin code]
```
in the same python virtual environment that you have `mlagents` installed.
## Plugin Interfaces
### StatsWriter
The StatsWriter class receives various information from the training process, such as the average Agent reward in
each summary period. By default, we log this information to the console and write it to
[TensorBoard](Using-Tensorboard.md).
#### Interface
The `StatsWriter.write_stats()` method must be implemented in any derived classes. It takes a "category" parameter,
which typically is the behavior name of the Agents being trained, and a dictionary of `StatSummary` values with
string keys.
#### Registration
The `StatsWriter` registration function takes a `RunOptions` argument and returns a list of `StatsWriter`s. An
example implementation is provided in [`mlagents_plugin_examples`](../ml-agents-plugin-examples/mlagents_plugin_examples/example_stats_writer.py)

3
ml-agents-plugin-examples/README.md


# ML-Agents Plugins
See the [Plugins documentation](../docs/Training-Plugins.md) for more information.

0
ml-agents-plugin-examples/mlagents_plugin_examples/__init__.py

27
ml-agents-plugin-examples/mlagents_plugin_examples/example_stats_writer.py


from typing import Dict, List
from mlagents.trainers.settings import RunOptions
from mlagents.trainers.stats import StatsWriter, StatsSummary
class ExampleStatsWriter(StatsWriter):
"""
Example implementation of the StatsWriter abstract class.
This doesn't do anything interesting, just prints the stats that it gets.
"""
def write_stats(
self, category: str, values: Dict[str, StatsSummary], step: int
) -> None:
print(f"ExampleStatsWriter category: {category} values: {values}")
def get_example_stats_writer(run_options: RunOptions) -> List[StatsWriter]:
"""
Registration function. This is referenced in setup.py and will
be called by mlagents-learn when it starts to determine the
list of StatsWriters to use.
It must return a list of StatsWriters.
"""
print("Creating a new stats writer! This is so exciting!")
return [ExampleStatsWriter()]

0
ml-agents-plugin-examples/mlagents_plugin_examples/tests/__init__.py

13
ml-agents-plugin-examples/mlagents_plugin_examples/tests/test_stats_writer_plugin.py


import pytest
from mlagents.plugins.stats_writer import register_stats_writer_plugins
from mlagents.trainers.settings import RunOptions
from mlagents_plugin_examples.example_stats_writer import ExampleStatsWriter
@pytest.mark.check_environment_trains
def test_register_stats_writers():
# Make sure that the ExampleStatsWriter gets returned from the list of all StatsWriters
stats_writers = register_stats_writer_plugins(RunOptions())
assert any(isinstance(sw, ExampleStatsWriter) for sw in stats_writers)

17
ml-agents-plugin-examples/setup.py


from setuptools import setup
from mlagents.plugins import ML_AGENTS_STATS_WRITER
setup(
name="mlagents_plugin_examples",
version="0.0.1",
# Example of how to add your own registration functions that will be called
# by mlagents-learn.
#
# Here, the get_example_stats_writer() function in mlagents_plugin_examples/example_stats_writer.py
# will get registered with the ML_AGENTS_STATS_WRITER plugin interface.
entry_points={
ML_AGENTS_STATS_WRITER: [
"example=mlagents_plugin_examples.example_stats_writer:get_example_stats_writer"
]
},
)

1
ml-agents/mlagents/plugins/__init__.py


ML_AGENTS_STATS_WRITER = "mlagents.stats_writer"

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存