比较提交

...
此合并请求有变更与目标分支冲突。
/.gitignore
/docs/Example-Environments.md
/docs/Readme.md
/python/unityagents/exception.py
/python/unityagents/environment.py
/python/unityagents/__init__.py
/unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs
/unity-environment/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs
/unity-environment/Assets/ML-Agents/Examples/GridWorld/GridWorld.unity
/unity-environment/Assets/ML-Agents/Scripts/Academy.cs
/unity-environment/Assets/ML-Agents/Scripts/Agent.cs
/unity-environment/Assets/ML-Agents/Scripts/Communicator.cs
/images/push.png
/images/wall.png
/images/crawler.png
/images/curriculum.png
/images/math.png
/unity-environment/Assets/ML-Agents/Scripts/Monitor.cs
/unity-environment/Assets/ML-Agents/Examples/Crawler
/unity-environment/Assets/ML-Agents/Examples/Crawler.meta
/unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/FlyCamera.cs.meta
/unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Area.cs
/unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Area.cs.meta
/unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushAgent.cs.meta
/unity-environment/Assets/ML-Agents/Examples/Area/Materials/block.mat
/unity-environment/Assets/ML-Agents/Scripts/Brain.cs
/unity-environment/Assets/ML-Agents/Scripts/CoreBrainExternal.cs
/unity-environment/Assets/ML-Agents/Scripts/CoreBrainHeuristic.cs
/unity-environment/Assets/ML-Agents/Scripts/CoreBrainInternal.cs
/unity-environment/Assets/ML-Agents/Scripts/CoreBrainPlayer.cs
/unity-environment/Assets/ML-Agents/Scripts/ExternalCommunicator.cs
/unity-environment/Assets/ML-Agents/Template/Scripts/TemplateDecision.cs
/unity-environment/Assets/ML-Agents/Examples/3DBall/Prefabs/Game.prefab
/unity-environment/Assets/ML-Agents/Examples/3DBall/Scene.unity
/unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs
/unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Materials/racketMat.physicMaterial
/unity-environment/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.bytes
/unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAcademy.cs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/hitWall.cs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Tennis.unity
/unity-environment/README.md
/python/PPO.ipynb
/python/setup.py
/python/test_unityagents.py
/python/ppo.py
/python/unityagents/brain.py
/python/ppo/trainer.py
/python/ppo/models.py
/docs/Making-a-new-Unity-Environment.md
/docs/Using-TensorFlow-Sharp-in-Unity-(Experimental).md
/docs/best-practices.md
/docs/Getting-Started-with-Balance-Ball.md
/docs/best-practices-ppo.md
/images/push.png
/images/reacher.png
/images/wall.png
/images/crawler.png
/images/curriculum.png
/images/math.png
/images/monitor.png
/images/broadcast.png
/images/curriculum_progress.png
/unity-environment/Assets/ML-Agents/Examples/Tennis/Prefabs.meta
/unity-environment/Assets/ML-Agents/Examples/Tennis/Prefabs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs
/unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs.meta
/python/curricula

30 次代码提交

作者 SHA1 备注 提交日期
Arthur Juliani c1ccc4e8 Add new flags to ppo best practices 7 年前
GitHub 4b486330 Update links throughout .md files for 0.2 release 7 年前
Arthur Juliani 6c1c8220 Python2 fix 7 年前
Arthur Juliani 1bf46a85 Add flags for normalization and variable layers 7 年前
Arthur Juliani b4838a0f Version 0.2 7 年前
GitHub 5299a0fe Merge branch 'master' into development 7 年前
Arthur Juliani ca16a880 Fix tennis collisions 7 年前
vincentpierre 36481ff2 removed the monitor display when training 7 年前
Arthur Juliani 827dca28 Fix typo in model vars 7 年前
Arthur Juliani 0f332365 Layers fix 7 年前
vincentpierre eaf0745f fix on the test script 7 年前
Arthur Juliani 5e75f5b7 New Tennis env and model 7 年前
GitHub 989dea4a Merge pull request #132 from Unity-Technologies/dev-logfile 7 年前
GitHub 470693dd Update Making-a-new-Unity-Environment.md 7 年前
GitHub 6f77ea6c Use newer example curriculum 7 年前
vincentpierre f346ccc7 new docs organization 7 年前
GitHub 97b2baf1 Update broadcast.md 7 年前
GitHub c20a5fc4 Update curriculum.md 7 年前
Arthur Juliani 4c8567fc Add curriculum image 7 年前
GitHub e1c3dd15 Update monitor.md 7 年前
GitHub 128a7b78 Adjust wording 7 年前
GitHub 3d03390a Updates best ppo practices 7 年前
GitHub f7e33e56 Additional best practicese 7 年前
Arthur Juliani 8eed1a37 Performance improvements during training 7 年前
Arthur Juliani 5143beb0 Fix Area environments 7 年前
vincentpierre e1e64059 removed the drone environment 7 年前
Arthur Juliani 9d26767d Instantiate training buffer with trainer 7 年前
Arthur Juliani c21a391d Various bug fixed and changes 7 年前
vincentpierre 41ab078d replaced actions with previous_actions in the BrainInfo object 7 年前
vincentpierre 50f91f66 use logging instead of print 7 年前
共有 199 个文件被更改,包括 10449 次插入1264 次删除
  1. 5
      .gitignore
  2. 8
      unity-environment/ProjectSettings/TagManager.asset
  3. 31
      unity-environment/Assets/ML-Agents/Scripts/Agent.cs
  4. 36
      unity-environment/Assets/ML-Agents/Scripts/Brain.cs
  5. 13
      unity-environment/Assets/ML-Agents/Scripts/Communicator.cs
  6. 32
      unity-environment/Assets/ML-Agents/Scripts/CoreBrainExternal.cs
  7. 22
      unity-environment/Assets/ML-Agents/Scripts/CoreBrainHeuristic.cs
  8. 56
      unity-environment/Assets/ML-Agents/Scripts/CoreBrainInternal.cs
  9. 28
      unity-environment/Assets/ML-Agents/Scripts/CoreBrainPlayer.cs
  10. 129
      unity-environment/Assets/ML-Agents/Scripts/ExternalCommunicator.cs
  11. 49
      unity-environment/Assets/ML-Agents/Scripts/Academy.cs
  12. 19
      unity-environment/Assets/ML-Agents/Template/Scripts/TemplateDecision.cs
  13. 12
      unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs
  14. 29
      unity-environment/Assets/ML-Agents/Examples/3DBall/Prefabs/Game.prefab
  15. 183
      unity-environment/Assets/ML-Agents/Examples/3DBall/Scene.unity
  16. 100
      unity-environment/Assets/ML-Agents/Examples/GridWorld/GridWorld.unity
  17. 1
      unity-environment/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs
  18. 21
      unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs
  19. 96
      unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs
  20. 13
      unity-environment/Assets/ML-Agents/Examples/Tennis/Materials/ballMat.physicMaterial
  21. 2
      unity-environment/Assets/ML-Agents/Examples/Tennis/Materials/racketMat.physicMaterial
  22. 256
      unity-environment/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.bytes
  23. 16
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAcademy.cs
  24. 34
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/hitWall.cs
  25. 62
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs
  26. 929
      unity-environment/Assets/ML-Agents/Examples/Tennis/Tennis.unity
  27. 18
      unity-environment/README.md
  28. 56
      python/PPO.ipynb
  29. 2
      python/setup.py
  30. 200
      python/test_unityagents.py
  31. 89
      python/ppo.py
  32. 1
      python/unityagents/__init__.py
  33. 3
      python/unityagents/brain.py
  34. 31
      python/unityagents/exception.py
  35. 247
      python/unityagents/environment.py
  36. 85
      python/ppo/trainer.py
  37. 134
      python/ppo/models.py
  38. 85
      docs/Example-Environments.md
  39. 40
      docs/Making-a-new-Unity-Environment.md
  40. 14
      docs/Using-TensorFlow-Sharp-in-Unity-(Experimental).md
  41. 25
      docs/Readme.md
  42. 11
      docs/best-practices.md
  43. 4
      docs/Getting-Started-with-Balance-Ball.md
  44. 45
      docs/best-practices-ppo.md
  45. 495
      images/push.png
  46. 1001
      images/reacher.png
  47. 695
      images/wall.png
  48. 1001
      images/crawler.png
  49. 488
      images/curriculum.png
  50. 173
      images/math.png
  51. 563
      images/monitor.png
  52. 213
      images/broadcast.png
  53. 260
      images/curriculum_progress.png
  54. 380
      unity-environment/Assets/ML-Agents/Scripts/Monitor.cs
  55. 12
      unity-environment/Assets/ML-Agents/Scripts/Monitor.cs.meta
  56. 9
      unity-environment/Assets/ML-Agents/Examples/Area.meta
  57. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher.meta
  58. 9
      unity-environment/Assets/ML-Agents/Examples/Crawler.meta
  59. 10
      unity-environment/Assets/ML-Agents/Examples/Tennis/Prefabs.meta
  60. 40
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs
  61. 13
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs.meta
  62. 81
      python/unityagents/curriculum.py
  63. 87
      docs/curriculum.md
  64. 18
      docs/monitor.md
  65. 12
      docs/broadcast.md
  66. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials.meta
  67. 77
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal.mat
  68. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal.mat.meta
  69. 84
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal_on.mat
  70. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal_on.mat.meta
  71. 76
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Hand.mat
  72. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Hand.mat.meta
  73. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials.meta
  74. 76
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker 1.mat
  75. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker 1.mat.meta
  76. 76
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker.mat
  77. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker.mat.meta
  78. 25
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/checker.jpg
  79. 74
      unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/checker.jpg.meta
  80. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs.meta
  81. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab.meta
  82. 821
      unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab
  83. 1001
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scene.unity
  84. 8
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scene.unity.meta
  85. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts.meta
  86. 9
      unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels.meta
  87. 412
      unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.bytes
  88. 8
      unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.bytes.meta
  89. 12
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/FlyCamera.cs.meta
  90. 23
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAcademy.cs

5
.gitignore


/unity-environment/[Oo]bj/
/unity-environment/[Bb]uild/
/unity-environment/[Bb]uilds/
/unity-environment/[Pp]ackages/
/unity-environment/[Uu]nity[Pp]ackage[Mm]anager/
# Environemnt logfile
*unity-environment.log
# Visual Studio 2015 cache directory
/unity-environment/.vs/

8
unity-environment/ProjectSettings/TagManager.asset


--- !u!78 &1
TagManager:
serializedVersion: 2
tags: []
tags:
- agent
- iWall
layers:
- Default
- TransparentFX

- UI
-
-
-
-
- invisible
- ball
-
-
-

31
unity-environment/Assets/ML-Agents/Scripts/Agent.cs


* If AgentMonitor is attached to the Agent, this value will be displayed.*/
[HideInInspector]
public float CummulativeReward;
/**< \brief Do not modify: This keeps track of the cummulative reward.*/
public float CumulativeReward;
/**< \brief Do not modify: This keeps track of the cumulative reward.*/
[HideInInspector]
public int stepCounter;

if (brain != null)
{
brain.agents.Add(id, gameObject.GetComponent<Agent>());
agentStoredAction = new float[brain.brainParameters.actionSize];
if (brain.brainParameters.actionSpaceType == StateType.continuous)
{
agentStoredAction = new float[brain.brainParameters.actionSize];
}
else
{
agentStoredAction = new float[1];
}
memory = new float[brain.brainParameters.memorySize];
}
InitializeAgent();

RemoveBrain();
brain = b;
brain.agents.Add(id, gameObject.GetComponent<Agent>());
agentStoredAction = new float[brain.brainParameters.actionSize];
if (brain.brainParameters.actionSpaceType == StateType.continuous)
{
agentStoredAction = new float[brain.brainParameters.actionSize];
}
else
{
agentStoredAction = new float[1];
}
memory = new float[brain.brainParameters.memorySize];
}

public void Reset()
{
memory = new float[brain.brainParameters.memorySize];
CummulativeReward = 0f;
CumulativeReward = 0f;
stepCounter = 0;
AgentReset();
}

{
return reward;
}
public void SetCumulativeReward()
{
CumulativeReward += reward;
//Debug.Log(reward);
}
/// Do not modify : Is used by the brain to collect done.

{
AgentStep(agentStoredAction);
stepCounter += 1;
CummulativeReward += reward;
if ((stepCounter > maxStep) && (maxStep > 0))
{
done = true;

36
unity-environment/Assets/ML-Agents/Scripts/Brain.cs


External,
Internal
}
Player,
Heuristic,
External,
Player,
Heuristic,
External,
}
#endif

public enum StateType
{
discrete,
continuous
}
continuous}
;
/** Only need to be modified in the brain's inpector.

}
}
else
{
foreach (BrainType bt in System.Enum.GetValues(typeof(BrainType)))
{
if ((int)bt >= CoreBrains.Length)
break;
if (CoreBrains[(int)bt] == null)
{
CoreBrains[(int)bt] = ScriptableObject.CreateInstance("CoreBrain" + bt.ToString());
}
}
}
// If the length of CoreBrains does not match the number of BrainTypes,
// we increase the length of CoreBrains

{
foreach (BrainType bt in System.Enum.GetValues(typeof(BrainType)))
{
CoreBrains[(int)bt] = ScriptableObject.Instantiate(CoreBrains[(int)bt]);
if (CoreBrains[(int)bt] == null)
{
CoreBrains[(int)bt] = ScriptableObject.CreateInstance("CoreBrain" + bt.ToString());
}
else
{
CoreBrains[(int)bt] = ScriptableObject.Instantiate(CoreBrains[(int)bt]);
}
}
instanceID = gameObject.GetInstanceID();
}

Dictionary<int, List<float>> result = new Dictionary<int, List<float>>();
foreach (KeyValuePair<int, Agent> idAgent in agents)
{
idAgent.Value.SetCumulativeReward();
if ((states.Count != brainParameters.stateSize) && (brainParameters.stateSpaceType == StateType.continuous ))
if ((states.Count != brainParameters.stateSize) && (brainParameters.stateSpaceType == StateType.continuous))
if ((states.Count != 1) && (brainParameters.stateSpaceType == StateType.discrete ))
if ((states.Count != 1) && (brainParameters.stateSpaceType == StateType.discrete))
{
throw new UnityAgentsException(string.Format(@"The number of states does not match for agent {0}:
Was expecting 1 discrete states but received {1}.", idAgent.Value.gameObject.name, states.Count));

13
unity-environment/Assets/ML-Agents/Scripts/Communicator.cs


public string AcademyName;
/**< \brief The name of the Academy. If the communicator is External,
* it will be the name of the Academy GameObject */
public Dictionary<string, float> resetParameters;
public string apiNumber;
/**< \brief The API number for the communicator. */
public string logPath;
/**< \brief The location of the logfile*/
public Dictionary<string, float> resetParameters;
/**< \brief A list of the External brains names sent via socket*/
/**< \brief A list of the all the brains names sent via socket*/
public List<string> externalBrainNames;
/**< \brief A list of the External brains names sent via socket*/
}
public enum ExternalCommand

/// Implement this method to allow brains to subscribe to the
/// decisions made outside of Unity
void SubscribeBrain(Brain brain);
/// First contact between Communicator and external process
bool CommunicatorHandShake();
/// Implement this method to initialize the communicator
void InitializeCommunicator();

32
unity-environment/Assets/ML-Agents/Scripts/CoreBrainExternal.cs


{
if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator == null)
{
coord = new ExternalCommunicator(brain.gameObject.transform.parent.gameObject.GetComponent<Academy>());
brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator = coord;
coord.SubscribeBrain(brain);
coord = null;
throw new UnityAgentsException(string.Format("The brain {0} was set to" +
" External mode" +
" but Unity was unable to read the" +
" arguments passed at launch.", brain.gameObject.name));
else
else if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator is ExternalCommunicator)
if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator is ExternalCommunicator)
{
coord = (ExternalCommunicator)brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator;
coord.SubscribeBrain(brain);
}
coord = (ExternalCommunicator)brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator;
coord.SubscribeBrain(brain);
}
/// Uses the communicator to retrieve the actions, memories and values and

brain.SendActions(coord.GetDecidedAction(brain.gameObject.name));
brain.SendMemories(coord.GetMemories(brain.gameObject.name));
brain.SendValues(coord.GetValues(brain.gameObject.name));
if (coord != null)
{
brain.SendActions(coord.GetDecidedAction(brain.gameObject.name));
brain.SendMemories(coord.GetMemories(brain.gameObject.name));
brain.SendValues(coord.GetValues(brain.gameObject.name));
}
}
/// Uses the communicator to send the states, observations, rewards and

coord.giveBrainInfo(brain);
if (coord != null)
{
coord.giveBrainInfo(brain);
}
}
/// Nothing needs to appear in the inspector

22
unity-environment/Assets/ML-Agents/Scripts/CoreBrainHeuristic.cs


/// CoreBrain which decides actions using developer-provided Decision.cs script.
public class CoreBrainHeuristic : ScriptableObject, CoreBrain
{
[SerializeField]
private bool broadcast = true;
ExternalCommunicator coord;
public Decision decision;
/**< Reference to the Decision component used to decide the actions */

public void InitializeCoreBrain()
{
decision = brain.gameObject.GetComponent<Decision>();
if ((brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator == null)
|| (!broadcast))
{
coord = null;
}
else if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator is ExternalCommunicator)
{
coord = (ExternalCommunicator)brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator;
coord.SubscribeBrain(brain);
}
}
/// Uses the Decision Component to decide that action to take

/// Nothing needs to be implemented, the states are collected in DecideAction
public void SendState()
{
if (coord!=null)
{
coord.giveBrainInfo(brain);
}
}
/// Displays an error if no decision component is attached to the brain

EditorGUILayout.LabelField("", GUI.skin.horizontalSlider);
broadcast = EditorGUILayout.Toggle("Broadcast", broadcast);
if (brain.gameObject.GetComponent<Decision>() == null)
{
EditorGUILayout.HelpBox("You need to add a 'Decision' component to this gameObject", MessageType.Error);

56
unity-environment/Assets/ML-Agents/Scripts/CoreBrainInternal.cs


public class CoreBrainInternal : ScriptableObject, CoreBrain
{
[SerializeField]
private bool broadcast = true;
[System.Serializable]
private struct TensorFlowAgentPlaceholder
{

FloatingPoint
};
FloatingPoint}
;
public string name;
public tensorType valueType;

}
ExternalCommunicator coord;
/// Modify only in inspector : Reference to the Graph asset
public TextAsset graphModel;

}
#endif
if ((brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator == null)
|| (!broadcast))
{
coord = null;
}
else if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator is ExternalCommunicator)
{
coord = (ExternalCommunicator)brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator;
coord.SubscribeBrain(brain);
}
if (graphModel != null)
{

currentBatchSize = brain.agents.Count;
if (currentBatchSize == 0)
{
if (coord != null)
{
coord.giveBrainInfo(brain);
}
return;
}

i++;
}
}
#endif
if (coord != null)
{
coord.giveBrainInfo(brain);
}
#endif
}

// Create the state tensor
if (hasState)
{
runner.AddInput(graph[graphScope + StatePlacholderName][0], inputState);
if (brain.brainParameters.stateSpaceType == StateType.discrete)
{
int[,] discreteInputState = new int[currentBatchSize, 1];
for (int i = 0; i < currentBatchSize; i++)
{
discreteInputState[i, 0] = (int)inputState[i, 0];
}
runner.AddInput(graph[graphScope + StatePlacholderName][0], discreteInputState);
}
else
{
runner.AddInput(graph[graphScope + StatePlacholderName][0], inputState);
}
}
// Create the observation tensors

}
if (hasRecurrent)
{
runner.AddInput(graph[graphScope + RecurrentInPlaceholderName][0], inputOldMemories);
runner.Fetch(graph[graphScope + RecurrentOutPlaceholderName][0]);
}
TFTensor[] networkOutput;
try
{

{
Dictionary<int, float[]> new_memories = new Dictionary<int, float[]>();
runner.AddInput(graph[graphScope + RecurrentInPlaceholderName][0], inputOldMemories);
runner.Fetch(graph[graphScope + RecurrentOutPlaceholderName][0]);
float[,] recurrent_tensor = networkOutput[1].GetValue() as float[,];
int i = 0;

{
#if ENABLE_TENSORFLOW && UNITY_EDITOR
EditorGUILayout.LabelField("", GUI.skin.horizontalSlider);
broadcast = EditorGUILayout.Toggle("Broadcast", broadcast);
SerializedObject serializedBrain = new SerializedObject(this);
GUILayout.Label("Edit the Tensorflow graph parameters here");
SerializedProperty tfGraphModel = serializedBrain.FindProperty("graphModel");

28
unity-environment/Assets/ML-Agents/Scripts/CoreBrainPlayer.cs


/// CoreBrain which decides actions using Player input.
public class CoreBrainPlayer : ScriptableObject, CoreBrain
{
[SerializeField]
private bool broadcast = true;
[System.Serializable]
private struct DiscretePlayerAction

public int index;
public float value;
}
ExternalCommunicator coord;
[SerializeField]
/// Contains the mapping from input to continuous actions

private DiscretePlayerAction[] discretePlayerActions;
[SerializeField]
private int defaultAction = -1;
private int defaultAction = 0;
/// Reference to the brain that uses this CoreBrainPlayer
public Brain brain;

/// Nothing to implement
public void InitializeCoreBrain()
{
if ((brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator == null)
|| (!broadcast))
{
coord = null;
}
else if (brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator is ExternalCommunicator)
{
coord = (ExternalCommunicator)brain.gameObject.transform.parent.gameObject.GetComponent<Academy>().communicator;
coord.SubscribeBrain(brain);
}
}
/// Uses the continuous inputs or dicrete inputs of the player to

/// decisions
public void SendState()
{
if (coord != null)
{
coord.giveBrainInfo(brain);
}
else
{
//The states are collected in order to debug the CollectStates method.
brain.CollectStates();
}
}
/// Displays continuous or discrete input mapping in the inspector

EditorGUILayout.LabelField("", GUI.skin.horizontalSlider);
broadcast = EditorGUILayout.Toggle("Broadcast", broadcast);
SerializedObject serializedBrain = new SerializedObject(this);
if (brain.brainParameters.actionSpaceType == StateType.continuous)
{

129
unity-environment/Assets/ML-Agents/Scripts/ExternalCommunicator.cs


using System.Linq;
using System.Net.Sockets;
using System.Text;
using System.IO;
/// Responsible for communication with Python API.

const int messageLength = 12000;
StreamWriter logWriter;
string logPath;
const string api = "API-2";
public List<bool> dones { get; set; }
}

public Dictionary<string, List<float>> value { get; set; }
}

public Dictionary<string, float> parameters { get; set; }
public bool train_model { get; set; }
}

hasSentState[brain.gameObject.name] = false;
}
/// Contains the logic for the initializtation of the socket.
public void InitializeCommunicator()
{
public bool CommunicatorHandShake(){
try
{
ReadArgs();

throw new UnityAgentsException("One of the brains was set isExternal" +
" but Unity was unable to read the" +
" arguments passed at launch");
return false;
return true;
}
/// Contains the logic for the initializtation of the socket.
public void InitializeCommunicator()
{
Application.logMessageReceived += HandleLog;
logPath = Path.GetFullPath(".") + "/unity-environment.log";
logWriter = new StreamWriter(logPath, false);
logWriter.WriteLine(System.DateTime.Now.ToString());
logWriter.WriteLine(" ");
logWriter.Close();
messageHolder = new byte[messageLength];
// Create a TCP/IP socket.

AcademyParameters accParamerters = new AcademyParameters();
accParamerters.brainParameters = new List<BrainParameters>();
accParamerters.brainNames = new List<string>();
accParamerters.externalBrainNames = new List<string>();
accParamerters.apiNumber = api;
accParamerters.logPath = logPath;
if (b.brainType == BrainType.External)
{
accParamerters.externalBrainNames.Add(b.gameObject.name);
}
}
accParamerters.AcademyName = academy.gameObject.name;
accParamerters.resetParameters = academy.resetParameters;

void HandleLog(string logString, string stackTrace, LogType type)
{
logWriter = new StreamWriter(logPath, true);
logWriter.WriteLine(type.ToString());
logWriter.WriteLine(logString);
logWriter.WriteLine(stackTrace);
logWriter.Close();
}
/// Listens to the socket for a command and returns the corresponding
/// External Command.
public ExternalCommand GetCommand()

{
sender.Send(Encoding.ASCII.GetBytes("CONFIG_REQUEST"));
ResetParametersMessage resetParams = JsonConvert.DeserializeObject<ResetParametersMessage>(Receive());
if (academy.isInference != !resetParams.train_model)
{
academy.windowResize = true;
}
academy.isInference = !resetParams.train_model;
return resetParams.parameters;
}

}
/// Sends Academy parameters to external agent
private void SendParameters(AcademyParameters envParams)
private void SendParameters(AcademyParameters envParams)
Receive();
}
/// Receives messages from external agent

Object.DestroyImmediate(tex);
Resources.UnloadUnusedAssets();
return bytes;
}
private byte[] AppendLength(byte[] input){
byte[] newArray = new byte[input.Length + 4];
input.CopyTo(newArray, 4);
System.BitConverter.GetBytes(input.Length).CopyTo(newArray, 0);
return newArray;
}
/// Collects the information from the brains and sends it accross the socket

List<float> concatenatedRewards = new List<float>();
List<float> concatenatedMemories = new List<float>();
List<bool> concatenatedDones = new List<bool>();
List<float> concatenatedActions = new List<float>();
Dictionary<int, float[]> collectedActions = brain.CollectActions();
foreach (int id in current_agents[brainName])
{

concatenatedDones.Add(collectedDones[id]);
concatenatedActions = concatenatedActions.Concat(collectedActions[id].ToList()).ToList();
}
StepMessage message = new StepMessage()
{

rewards = concatenatedRewards,
//actions = actionDict,
actions = concatenatedActions,
sender.Send(Encoding.ASCII.GetBytes(envMessage));
sender.Send(AppendLength(Encoding.ASCII.GetBytes(envMessage)));
Receive();
int i = 0;
foreach (resolution res in brain.brainParameters.cameraResolutions)

sender.Send(TexToByteArray(brain.ObservationToTex(collectedObservations[id][i], res.width, res.height)));
sender.Send(AppendLength(TexToByteArray(brain.ObservationToTex(collectedObservations[id][i], res.width, res.height))));
Receive();
}
i++;

foreach (Brain brain in brains)
{
string brainName = brain.gameObject.name;
Dictionary<int, float[]> actionDict = new Dictionary<int, float[]>();
for (int i = 0; i < current_agents[brainName].Count; i++)
if (brain.brainType == BrainType.External)
if (brain.brainParameters.actionSpaceType == StateType.continuous)
string brainName = brain.gameObject.name;
Dictionary<int, float[]> actionDict = new Dictionary<int, float[]>();
for (int i = 0; i < current_agents[brainName].Count; i++)
actionDict.Add(current_agents[brainName][i],
agentMessage.action[brainName].GetRange(i * brain.brainParameters.actionSize, brain.brainParameters.actionSize).ToArray());
if (brain.brainParameters.actionSpaceType == StateType.continuous)
{
actionDict.Add(current_agents[brainName][i],
agentMessage.action[brainName].GetRange(i * brain.brainParameters.actionSize, brain.brainParameters.actionSize).ToArray());
}
else
{
actionDict.Add(current_agents[brainName][i],
agentMessage.action[brainName].GetRange(i, 1).ToArray());
}
else
storedActions[brainName] = actionDict;
Dictionary<int, float[]> memoryDict = new Dictionary<int, float[]>();
for (int i = 0; i < current_agents[brainName].Count; i++)
actionDict.Add(current_agents[brainName][i],
agentMessage.action[brainName].GetRange(i, 1).ToArray());
memoryDict.Add(current_agents[brainName][i],
agentMessage.memory[brainName].GetRange(i * brain.brainParameters.memorySize, brain.brainParameters.memorySize).ToArray());
}
storedActions[brainName] = actionDict;
Dictionary<int, float[]> memoryDict = new Dictionary<int, float[]>();
for (int i = 0; i < current_agents[brainName].Count; i++)
{
memoryDict.Add(current_agents[brainName][i],
agentMessage.memory[brainName].GetRange(i * brain.brainParameters.memorySize, brain.brainParameters.memorySize).ToArray());
}
storedMemories[brainName] = memoryDict;
storedMemories[brainName] = memoryDict;
Dictionary<int, float> valueDict = new Dictionary<int, float>();
for (int i = 0; i < current_agents[brainName].Count; i++)
{
valueDict.Add(current_agents[brainName][i],
agentMessage.value[brainName][i]);
Dictionary<int, float> valueDict = new Dictionary<int, float>();
for (int i = 0; i < current_agents[brainName].Count; i++)
{
valueDict.Add(current_agents[brainName][i],
agentMessage.value[brainName][i]);
}
storedValues[brainName] = valueDict;
storedValues[brainName] = valueDict;
}
}

49
unity-environment/Assets/ML-Agents/Scripts/Academy.cs


private int frameToSkip;
[SerializeField]
private float waitTime;
[HideInInspector]
public bool isInference = true;
/**< \brief Do not modify : If true, the Academy will use inference
* settings. */
private bool _isCurrentlyInference;
private ScreenConfiguration trainingConfiguration = new ScreenConfiguration(80, 80, 1, 100.0f, 60);
private ScreenConfiguration trainingConfiguration = new ScreenConfiguration(80, 80, 1, 100.0f, -1);
[SerializeField]
private ScreenConfiguration inferenceConfiguration = new ScreenConfiguration(1280, 720, 5, 1.0f, 60);
[SerializeField]

public Communicator communicator;
/**< \brief Do not modify : pointer to the communicator currently in
* use by the Academy. */
[HideInInspector]
public bool isInference;
/**< \brief Do not modify : If true, the Academy will use inference
* settings. */
[HideInInspector]
public bool windowResize;
/**< \brief Do not modify : Used to determine if the application window
* should be resized at reset. */
* use by the Academy. */
private float timeAtStep;

GetBrains(gameObject, brains);
InitializeAcademy();
communicator = new ExternalCommunicator(this);
if (!communicator.CommunicatorHandShake())
{
communicator = null;
}
windowResize = true;
isInference = (communicator == null);
_isCurrentlyInference = !isInference;
done = true;
acceptingSteps = true;
}

private void ConfigureEngine()
{
if ((communicator != null) && (!isInference))
if ((!isInference))
QualitySettings.vSyncCount = 0;
Monitor.SetActive(false);
}
else
{

// Called before AcademyReset().
internal void Reset()
{
if (windowResize)
{
ConfigureEngine();
windowResize = false;
}
AcademyReset();
foreach (Brain brain in brains)
{

AcademyReset();
}
// Instructs all brains to collect states from their agents.

*/
void RunMdp()
{
if (((communicator == null) || isInference) && (timeAtStep + waitTime > Time.time))
if (isInference != _isCurrentlyInference)
{
ConfigureEngine();
_isCurrentlyInference = isInference;
}
if ((isInference) && (timeAtStep + waitTime > Time.time))
{
return;
}

19
unity-environment/Assets/ML-Agents/Template/Scripts/TemplateDecision.cs


using System.Collections.Generic;
using UnityEngine;
public class TemplateDecision : MonoBehaviour, Decision {
public class TemplateDecision : MonoBehaviour, Decision
{
public float[] Decide (List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return default(float[]);
public float[] Decide(List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return new float[0];
}
}
public float[] MakeMemory (List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return default(float[]);
public float[] MakeMemory(List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return new float[0];
}
}
}

12
unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs


List<float> state = new List<float>();
state.Add(gameObject.transform.rotation.z);
state.Add(gameObject.transform.rotation.x);
state.Add((ball.transform.position.x - gameObject.transform.position.x) / 5f);
state.Add((ball.transform.position.y - gameObject.transform.position.y) / 5f);
state.Add((ball.transform.position.z - gameObject.transform.position.z) / 5f);
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.x / 5f);
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.y / 5f);
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.z / 5f);
state.Add((ball.transform.position.x - gameObject.transform.position.x));
state.Add((ball.transform.position.y - gameObject.transform.position.y));
state.Add((ball.transform.position.z - gameObject.transform.position.z));
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.x);
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.y);
state.Add(ball.transform.GetComponent<Rigidbody>().velocity.z);
return state;
}

29
unity-environment/Assets/ML-Agents/Examples/3DBall/Prefabs/Game.prefab


- component: {fileID: 65551894134645910}
- component: {fileID: 23487775825466554}
- component: {fileID: 114980646877373948}
- component: {fileID: 114290313258162170}
m_Layer: 0
m_Name: Platform
m_TagString: Untagged

m_Enabled: 1
m_CastShadows: 0
m_ReceiveShadows: 0
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!114 &114290313258162170
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1914042422505674}
m_Enabled: 0
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: e040eaa8759024abbbb14994dc4c55ee, type: 3}
m_Name:
m_EditorClassIdentifier:
fixedPosition: 1
verticalOffset: 10
DisplayBrainName: 1
DisplayBrainType: 1
DisplayFrameCount: 1
DisplayCurrentReward: 0
DisplayMaxReward: 1
DisplayState: 0
DisplayAction: 0
--- !u!114 &114980646877373948
MonoBehaviour:
m_ObjectHideFlags: 1

reward: 0
done: 0
value: 0
CummulativeReward: 0
CumulativeReward: 0
stepCounter: 0
agentStoredAction: []
memory: []

183
unity-environment/Assets/ML-Agents/Examples/3DBall/Scene.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.37311918, g: 0.3807398, b: 0.35872716, a: 1}
m_IndirectSpecularColor: {r: 0.3731316, g: 0.38074902, b: 0.3587254, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVRBounces: 2
m_PVRFiltering: 0
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringAtrousColorSigma: 1
m_PVRFilteringAtrousNormalSigma: 1
m_PVRFilteringAtrousPositionSigma: 1
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 0
--- !u!196 &4

manualTileSize: 0
tileSize: 256
accuratePlacement: 0
debug:
m_Flags: 0
--- !u!114 &40419387
--- !u!114 &1195891
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)
continuousPlayerActions:
- key: 276
index: 0
value: 1
- key: 275
index: 0
value: -1
- key: 273
index: 1
value: 1
- key: 274
index: 1
value: -1
discretePlayerActions: []
defaultAction: -1
broadcast: 1
graphModel: {fileID: 4900000, guid: e28cc81d8dc98464b952e295ae9850fc, type: 3}
graphScope:
graphPlaceholders:
- name: epsilon
valueType: 1
minValue: 0
maxValue: 0
BatchSizePlaceholderName: batch_size
StatePlacholderName: state
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
ObservationPlaceholderName: []
ActionPlaceholderName: action
brain: {fileID: 667765197}
--- !u!1001 &119733639
Prefab:

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: ff026d63a00abdc48ad6ddcff89aba04, type: 2}
m_IsPrefabParent: 0
--- !u!114 &225656088
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 667765197}
--- !u!1001 &292233615
Prefab:
m_ObjectHideFlags: 0

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: ff026d63a00abdc48ad6ddcff89aba04, type: 2}
m_IsPrefabParent: 0
--- !u!114 &454511406
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3}
m_Name: (Clone)(Clone)
m_EditorClassIdentifier:
graphModel: {fileID: 4900000, guid: bde8f790a5181fe419cc282c62090fc9, type: 3}
graphScope:
graphPlaceholders:
- name: epsilon
valueType: 1
minValue: 0
maxValue: 0
BatchSizePlaceholderName: batch_size
StatePlacholderName: state
RecurrentInPlaceholderName: recurrent_in
RecurrentOutPlaceholderName: recurrent_out
ObservationPlaceholderName: []
ActionPlaceholderName: action
brain: {fileID: 667765197}
--- !u!1001 &458019493
Prefab:
m_ObjectHideFlags: 0

m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1

m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5

-
actionSpaceType: 1
stateSpaceType: 1
brainType: 0
brainType: 3
- {fileID: 40419387}
- {fileID: 2069635888}
- {fileID: 225656088}
- {fileID: 454511406}
instanceID: 11612
- {fileID: 1235501299}
- {fileID: 878319284}
- {fileID: 859680324}
- {fileID: 1195891}
instanceID: 19744
--- !u!1001 &764818074
Prefab:
m_ObjectHideFlags: 0

m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: ff026d63a00abdc48ad6ddcff89aba04, type: 2}
m_IsPrefabParent: 0
--- !u!114 &859680324
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 667765197}
--- !u!114 &878319284
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
brain: {fileID: 0}
--- !u!114 &1235501299
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
broadcast: 1
continuousPlayerActions:
- key: 276
index: 0
value: 1
- key: 275
index: 0
value: -1
- key: 273
index: 1
value: 1
- key: 274
index: 1
value: -1
discretePlayerActions: []
defaultAction: -1
brain: {fileID: 667765197}
--- !u!1001 &1318922267
Prefab:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!4 &1397918845
Transform:
m_ObjectHideFlags: 0

maxSteps: 0
frameToSkip: 4
waitTime: 0
isInference: 0
trainingConfiguration:
width: 128
height: 72

done: 0
episodeCount: 0
currentStep: 0
isInference: 0
windowResize: 1
--- !u!1 &1746325439
GameObject:
m_ObjectHideFlags: 0

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 3.12
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 34.15, y: 0, z: 0}
--- !u!114 &2069635888
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 0}

100
unity-environment/Assets/ML-Agents/Examples/GridWorld/GridWorld.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.43667555, g: 0.4842717, b: 0.56452394, a: 1}
m_IndirectSpecularColor: {r: 0.43668893, g: 0.4842832, b: 0.56452656, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

targetFrameRate: 60
defaultResetParameters:
- key: gridSize
value: 3
value: 5
- key: numObstacles
value: 1
- key: numGoals

m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_FalloffTable:
m_Table[0]: 0
m_Table[1]: 0
m_Table[2]: 0
m_Table[3]: 0
m_Table[4]: 0
m_Table[5]: 0
m_Table[6]: 0
m_Table[7]: 0
m_Table[8]: 0
m_Table[9]: 0
m_Table[10]: 0
m_Table[11]: 0
m_Table[12]: 0
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0

m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0}
--- !u!114 &224457101
--- !u!114 &201074924
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 0}
--- !u!1 &231883441

m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!114 &594701073
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
continuousPlayerActions: []
discretePlayerActions:
- key: 273
value: 0
- key: 274
value: 1
- key: 276
value: 2
- key: 275
value: 3
defaultAction: -1
brain: {fileID: 1535917239}
--- !u!1 &742849316
GameObject:
m_ObjectHideFlags: 0

m_Father: {fileID: 0}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &775711703
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
continuousPlayerActions: []
discretePlayerActions:
- key: 273
value: 0
- key: 274
value: 1
- key: 276
value: 2
- key: 275
value: 3
defaultAction: -1
brain: {fileID: 1535917239}
--- !u!1 &959566328
GameObject:
m_ObjectHideFlags: 0

m_Father: {fileID: 486401524}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &980448580
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 1535917239}
--- !u!1 &1045409640
GameObject:
m_ObjectHideFlags: 0

m_Father: {fileID: 486401524}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1083197345
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 35813a1be64e144f887d7d5f15b963fa, type: 3}
m_Name: (Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)(Clone)
m_EditorClassIdentifier:
brain: {fileID: 1535917239}
--- !u!1 &1208586857
GameObject:
m_ObjectHideFlags: 0

stateSpaceType: 1
brainType: 0
CoreBrains:
- {fileID: 775711703}
- {fileID: 224457101}
- {fileID: 1083197345}
- {fileID: 594701073}
- {fileID: 201074924}
- {fileID: 980448580}
instanceID: 22306
instanceID: 12718
--- !u!1 &1553342942
GameObject:
m_ObjectHideFlags: 0

1
unity-environment/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs


// to be implemented by the developer
public override void AgentStep(float[] act)
{
reward = -0.01f;
int action = Mathf.FloorToInt(act[0]);

21
unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs


using System.Collections.Generic;
using UnityEngine;
public class BasicDecision : MonoBehaviour, Decision {
public class BasicDecision : MonoBehaviour, Decision
{
public float[] Decide(List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return new float[1]{ 1f };
public float[] Decide (List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return default(float[]);
}
}
public float[] MakeMemory(List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return new float[0];
public float[] MakeMemory (List<float> state, List<Camera> observation, float reward, bool done, float[] memory)
{
return default(float[]);
}
}
}

96
unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs


public class BasicAgent : Agent
{
public int position;
public int smallGoalPosition;
public int largeGoalPosition;
public GameObject largeGoal;
public GameObject smallGoal;
public int minPosition;
public int maxPosition;
public int position;
public int smallGoalPosition;
public int largeGoalPosition;
public GameObject largeGoal;
public GameObject smallGoal;
public int minPosition;
public int maxPosition;
public override List<float> CollectState()
{
List<float> state = new List<float>();
state.Add(position);
return state;
}
public override List<float> CollectState()
{
List<float> state = new List<float>();
state.Add(position);
return state;
}
public override void AgentStep(float[] act)
{
float movement = act[0];
int direction = 0;
if (movement == 0) { direction = -1; }
if (movement == 1) { direction = 1; }
public override void AgentStep(float[] act)
{
float movement = act[0];
int direction = 0;
if (movement == 0) { direction = -1; }
if (movement == 1) { direction = 1; }
position += direction;
if (position < minPosition) { position = minPosition; }
if (position > maxPosition) { position = maxPosition; }
position += direction;
if (position < minPosition) { position = minPosition; }
if (position > maxPosition) { position = maxPosition; }
gameObject.transform.position = new Vector3(position, 0f, 0f);
gameObject.transform.position = new Vector3(position, 0f, 0f);
if (position == smallGoalPosition)
{
done = true;
reward = 0.1f;
}
reward -= 0.01f;
if (position == largeGoalPosition)
{
done = true;
reward = 1f;
}
}
if (position == smallGoalPosition)
{
done = true;
reward = 0.1f;
}
public override void AgentReset()
{
position = 0;
minPosition = -10;
maxPosition = 10;
smallGoalPosition = -3;
largeGoalPosition = 7;
smallGoal.transform.position = new Vector3(smallGoalPosition, 0f, 0f);
largeGoal.transform.position = new Vector3(largeGoalPosition, 0f, 0f);
}
if (position == largeGoalPosition)
{
done = true;
reward = 1f;
}
}
public override void AgentOnDone()
{
public override void AgentReset()
{
position = 0;
minPosition = -10;
maxPosition = 10;
smallGoalPosition = -3;
largeGoalPosition = 7;
smallGoal.transform.position = new Vector3(smallGoalPosition, 0f, 0f);
largeGoal.transform.position = new Vector3(largeGoalPosition, 0f, 0f);
}
}
public override void AgentOnDone()
{
}
}

13
unity-environment/Assets/ML-Agents/Examples/Tennis/Materials/ballMat.physicMaterial


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!134 &13400000
PhysicMaterial:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: ballMat
dynamicFriction: 0
staticFriction: 0
bounciness: 1
frictionCombine: 1
bounceCombine: 3

2
unity-environment/Assets/ML-Agents/Examples/Tennis/Materials/racketMat.physicMaterial


m_Name: racketMat
dynamicFriction: 0
staticFriction: 0
bounciness: 1
bounciness: 0
frictionCombine: 1
bounceCombine: 3

256
unity-environment/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.bytes


8
global_stepConst*
value B
:���*
dtype0
R
global_step/readIdentity global_step*
T0*
_class
loc:@global_step
Y
running_meanConst*
dtype0*5
value,B*" ?��F����i?�p�=S�F@ˀ�
U
running_mean/readIdentity running_mean*
T0*
_class
loc:@running_mean
]
running_varianceConst*5
value,B*" ��JK`D:J��9M {"L�?��WK�?�i M*
dtype0
a
running_variance/readIdentityrunning_variance*
T0*#
_class
loc:@running_variance
-
subSubstaterunning_mean/read*
T0
6
CastCastglobal_step/read*
SrcT0*
DstT0
4
add_1/yConst*
value B
*�?*
dtype0
$
add_1AddCastadd_1/y*
T0
9
truedivRealDivrunning_variance/readadd_1*
T0

SqrtSqrttruediv*
T0
(
truediv_1RealDivsubSqrt*
T0
G
normalized_state/Minimum/yConst*
value B
*�@*
dtype0
S
normalized_state/MinimumMinimum truediv_1normalized_state/Minimum/y*
T0
?
normalized_state/yConst*
value B
*��*
dtype0
R
normalized_stateMaximumnormalized_state/Minimumnormalized_state/y*
T0
dense/kernelConst*
dtype0*�
value�B�@"����>P �>�:��:>�B@>jW>��h��乾�7���>���<�_Ǿ����[?=r���Cm����+Ag>V�����p�F�)��A��Z _�4Լ��}=j�i��� �[,=mQ@��Xc�L���e@e>]���ғ��¢�>{iG�L�U�"j����A>�7�>JֻU�9>L���H����=V��<��z�Zy=�FS�={�=z�'��=�9>9��>xVɾ�%�#d��>|<���=�4/��r=���=��<���_nT>-���o=��2>k^������4vN>JS�=i�Ľ����c�>��L>�>�=_�!�TN>��=�9;�k��O>3ɽ�廾8>T� � �Z���ϼ
�R=�q�=8a=<qq�$��=P��<8vX��>�>S-�<)d�$�+>\���&->��-�C��ġ�=G������h>B5%�CO>����Oe�=��6>s��2'Ƚ�!��/w���b��^n>�j���"x���Su�=��<��>T��Q���m�
>�?=������z�h�X>v�>(�p>���`�u>��S>��#>��;�РB=��-; ��=�m>�{9> �<T{>�>���_l>�"n��Q>tZo>�֖��S ���h>:
�� >hP'>�ս�(����>R];>N�5>��>"&>�ml��R�ʽ��>��C='=<�7��*K=6�j�($�:���N�%>?�6������>��R�h1E��%����<g�6����l�ݽ�4c<�X����+>,�>@��;����,9���W��AŮ��'�����>Qp�>m�M�|[�=t?>Qֶ���?#^��:Ԓ><��=pTc�G*�>�X�ٿr��'E�+�޽3�<��N<�E?M`�=pe�=xQ�>�4�=��y�5��<�ҽ�3�>nd~> �#�0p�>R�t>!C��r�,?Ϙ�=�$u�� ���d����h>1r��
�ýY��>��ܾ�g��Pr>v� �v��>%�v�9w佋�N>�*��8=�@>@vμ��=p�0����x9�=W�>�U�<��u�`:>,`����>0�l>5���B#�XfؾOۏ�kj7��UU=M־�BM>�I�>R�d>,z�k|�(h�=O��>�j�ن���f�=�ͪ>���(��>?�>�=��%���c��>���L��>2��_=���ͽsp��BL>��� e�=Lʍ�ʼn(��b�=�K@�f��g���~Fg=��=�5Q>�U>���>FVƾ>����>�>� }>� ��f4�<d蹾��>���>�n>�*��Aѵ>�9e�Iľ�_�>-��>7��>c�S=A��>��оt�>>�kĽv��ܲ���j�C��>#�E��hN�����Z0�>���=������<V�D��!��(Z>xi>��,��¾�?l����=v�#�p�\>�t>����C�b<���>�� ?��H�(>e�>���pE>���+Y�=5���>�f>�������'U����”�=!�6��80����>�x$���ӽ�/K����>�8�>�f!>�$> ��Nϗ�m��6��=��;3�F>���3+i<$?A�n�5�>hV���h���v~>]��<���=.+�>�`��m�=�q"�=q���л�l���^>V.M����>��>��¾�э��e��e' >�?������> J�����>o��>J��=��[�۽h�>%�������<H��c��sŴ>�����C�>)�5>�VO� �=���<�n�>��?�TM>�5�>凾� ��l*վ�c������߉��T�-<Drk>a��>��=��'>�GJ�����H�H>=<���R�"wƽS�����=7$�>ѝ]�i�6<^�0>�-W�O�ڽ�J���-����� +�>�w�>b�޾�A����6�j�=ԫ?W��<*r^�pC=u��<i�U>Q���>��x>�.W=��/�f��>���>ɿ�>tG ��c�>ڽr�\e�>��,��-�>�J>��"<�} >��ݽ"K �LR�=%��������,�=�b�>�I|�#��>���>o�]>�&.>�� �̽�ƒ<\�?>r��>�+K�5�> �'��yA�
dense/kernelConst*�
value�B�@"��荾�p=?��p�=�Ǻ>�k����&���a`�1�:�� ����w�}��!=��㾀@6>�c�>���o�=���9�?Ȅ��@(��]�q�=f�U>K��>B�9��4r>�n�=�a˾t� �J�>�?'�;���SV>
�>������=j���=� [>7AB�!��>�k�<B�>}�<�Wʾ�Ž��?�̠>���<�wd����r�>,4�G��=�2��u �=�`M�Z�a��($��s���'>��������4?+Cg>Y$����rc���Z?����ά��'�<�0�>��=I�վ��>��U�g���+�{���>�C=b�þd�X>Qp�=
,�i�C>Ժ��h�k�M8:�.b?��>��<��w> � �:l��C�J�WwB?./�>Yȴ�H�Ǿ8M�=�s*�/6:��#z>}Qe��j<�\=�k`>y,$=�2=Wߞ>>�E>S��=�=��BqO>X��>:龽j��B;�?v��2�� ��TN�e��>��.>�Yv=|��>� ��ξ?����`>���>>r׽mm=�(<e��<������X>S{ͼ�!a>��/��c�:�'��[�:�%<>�1�>���=�X=�T"<�}��d�a�׏��מ=0YZ=~��=8�>�q̽'��%0I��=�M>�=��2T>1%����b� ��=��h�T: >�<���䣼��Y��x�k� �Q٬�0���E��e�����:lS�ۍh�dl7>?Z�1<���|<<.��=�ɠ=���=#�����=o\~=s�'��(p>���3/�=>ͯ��A$>��$�Zy�> ����?\���<ا=���=q�H>S%�< �=b�6=�o�>R�9�3hὌ!>�I|���y>9gM>���>v��UГ>� >.0 ����>p��=-� ��%����>I�={�V>\��>�}?����|���> ������-~�O� >^��;�=F�E�x?��ë=�ד>���=�O�>4h��l���=�hEɽ}����=�q�=O�=o0[>�]>^/�J������H��� �=a�����f��� =7�-����,1������>�E�>��=&�=���>��*=�W����<���=y �=����J�|�7C>�n>�V�6p�>���=�\�>ܑ�>����1ڻ�V��rʖ>x��=R[�<�)k���=]=��]��Z�P->y&�[���ٷ�7�=���>�n�J+�c�Z�PD���=�$��
,��_��W.E>���=�;o����=����� �=�����2��ži��>��b�S�J?���
���T-l��zd?o�&>�\=�5��5꽧�$>/.��i:��-G�$2>��@�������ᄽ��9?Ma��/i>O̾Q^h=˂�>cY&��[? )�>���?f��<�c"��}�.?Ͼ���7;ɾ���>���>z?�>?��T��#�:W%h>��;K�?�F0A���Z�f�>jcJ�s |�_��d�3?v��>���?E�>��<
�;?
#�V�;?M�����*'��1���?�O���U>w�ϽT �� #���R>@����h�>���;�����[�;�9�>D����Ez>�;#<�d�>9y���;>S��'�<"G�ԗ<�31�=�g�>�B¼�C=>o�ǽ�[�<�,I<Әz��B>plm����>Wf�<��>V2�V�j���f�<G]���$�=�o�>ښ<W +>����U�=#�Q���D���>c��]��>6���Ͻ�U=(�p���>;��>Ltd�5���AwS��Qv�Z���I�7�U{ ��>�~���E@?oͰ��R�����=(G�=³�>���]��.��={
�> �꾩����� 6?�M�ӫ%�=�9�xF��Ț�MI���Vн��U�>"��s����a��� �Gn�=b ��.�ʿ�±��{f���+��=�b��0���F��W>�# ? �>��~�����~�$I?쎚�j�b�w��>�S}���=��/�Zd?��: ?�O��� ?r���^ؽR��=Oĉ>�G��N��*
dtype0
_
dense/MatMulMatMulstatedense/kernel/read*
j
dense/MatMulMatMulnormalized_statedense/kernel/read*
transpose_a(*
transpose_b(*
T0

ǀ
dense_1/kernelConst*
dtype0*��
value��B��@@"����H���<��z=�D �W���H�>�<�>qGʾ�]������>�hk>���=�8�=���:�B ��d>.����z�a��=%;���>!��޽�֪=�$���D>dz=Ӡ> a��R�7��>;�>
��q�>zz���#>�<��#����'��9�>@þ�b>�fS>iC9�f�N>�1��b]>YEI= �=�tP��s�=g �3��>�� >������I���?��U��q>�R%>7�e��n(Z��5��K2>�`Q>'��=��>��E>#P=��<-:r>)l�>' �=��x>��5=�+>��=�=d>Vg��1_����>y�?��+Ļ��ٻ{�>AMֽ62��:�,���m>�{|�$a̽"Ø�6q�fx�8^�<̖ܾ��غ�K:>xe�=' �� ��۳��R�|�=Ì�(꿽�=�<��1�R�Q�$;p=o
�>��I=�F���� ��jT���<[���%��=\���S �=�d���7�퐊<V+���e>�K�>�T�>��ڽq �=�������1J�y�>��������;>}������=��>�v�>�h6�P���v�=>���>3Bk>$��=�U�Ҿ~=4\��:HW>0a���ts�A��=`L>��H�">����c���>������t>��>�<9>�$���6C;����� �>������
�>'KS=�2��Tb=f۱�����8�j�� ݜ="DZ>�o�>�]�>���|���#����%>.Q�>�\>u�^>*8�>Ԥ��z"սrE��#=ʘ}>�5�>�8\��L�>XM@��_���^?�E;��:?<����kj]�S�>>4�2>j�\�Ɵ6��kK�>�U�&T?[�Z���"�<����>,�%>��?����;�>����ߕ�=�X;�<�8��-���k�,�6�JSm?q�}�aG?�H'?�w�>��d=�������>��5>�hF> \B=-��Q v�e��>�/H�v� ���y��-�=��.?c�t�w�����=Zx�=h&6�O����P8�o)��A͕�߀齐Z �)�>���%�=� >�R�=�~�K1I>��w>&�=V�m>~i����Nд>#̎�*���Y�V�����LB�z@8;
��<; �=�w<ߋV>�r���l>�݂�$���#>^�'=2���+Fݽ=� �G�>����V��>���}Rн�,�>
�4�q���HF���ͽ`E>.���YϽX5$>nm=�[���T��Zu�<?釾������>u!<Gk=q:{��[���>� ��w=���D=wn򽁴�=79K�`1�<�:f>��u�Or�=���=��4�0i�<~���:��<�iֽu�>hԆ�#Q��S��>�ڋ;�n�k���p���j>in�>��i�q� >���=��1���>�>>V?7>��( ��B�=���F �=��0;�i��L��<3���2����[����>H��G �����>�3���>��>��7=�Ja��}�N��t~4>\��=�!�>��h>>p"��꥾���>�^�<�ī>!S��Km>�;���
̾_.�= z-�E��<��>&����K�K%>v��>r�< E�V��sܙ��]�=7)#>ČO��&���>�D>��>R�>��G>S�����*[��W�=Ay�}��<+A�>^MC� />���=dq�=�h>V��>B
���0>�f�=Px.>Ҍ6�X/>u"g=(�=�命�U�<
�M��Z<@�h>t=t�)��C�>4
�>F�Q>� �>���<F\W>�貽B����c�=g��>R�+�X���Y�r�>������>$K�TU�;�>�=�3C�W�a��(��Q��>a����r������ѤD��6��v�����������>dP|>��i��� >Rb��)>,�I��i&�`�i���e����Q��>����&�>���>h��>��>�<b>VMe���}���h���a>{d��9��=}�޾ǩ�>������ɡ�T�������/j>�F��c a>���_��>�TT��5��c>�w��o�>Xh����=*��QR�=Mv���E+<tB��5\>�ϛ=D�H�� :�E(���4��o�=ӱ�= �u���u��ޡ��3ؽ�z�>j��=�����0>�3�=7(>��>J���[���S(>���%�X>���h������}�=�&�p�?���\�AIH=ں�=��<>X㽯l�=�b���;�r5�=��ǽ�g��O��=�>�>7>W9>,v=9�� �G=4����;G�9>��o>3��>z�g=\ >��/���
>�&>��q=&W�<�L>S��=����>�}=��ξR\r;^i�>�x��w�>��">�(}�lo>��ž�->���=�Ǽ������!>��>�4�h����7q�tD�=���>_y�r���9u>&e�>�[>��>ب�=���>���=��=�I�T��=l��oh��ȁ���4�>�ڒ��z2>|><��>�=\�"���T>ˢO>�̗>�7޼l)z��~�`��ը�>HQ>�y�=���= ��>��`��w��[�>U��sR>���<%�c�ܫ��+[�t��='4�Vj>j��ϼ��� W>�0�>��ǽ��N<^6C>.@滥wj>&�=����̖���)>=�;��h-=���=܍��#��?]�=iڻ#�>;�B$�2�u=iŘ=<+>�0 ���P��R>i��>ld�=�Ž>� >I/��ɬJ��=��n��bfs������aB:7�)����[�:���2>̐a��*��g8�� ���n�:��>e��=�����CY��]/>'ׯ>_��;W�>c/�>=V>܌���!�>L!�>��w>�q<>����"�߽H`���ݽ���=�&X� %�<_s��"L_>5���b���I��X����N�>{b�>�ר�Κ�=����k��Z>d��K�� op�7<�傾X�b�3PŽ8)�=��>�v��!.=>�2�=���<�k��$��ؙ��N�@',�W�T=��s>1h����n㽺�ǽ@@ �������<⿸<���<�x&>W���ټ"���>of>�E��ɗ�>��9��-L>��ٜ�L�X>F����=O�.�ݝ=���=֎=0+�=cd�ԆD>
�2��J���q�=���T��'�<N�� ��=���>%;�uئ��B>�H�%I���|Z��ӽ�NV�߷X�8��<�/K���[=�O�>x>3��<hY>Nf�������(=�4Z�|��ϭ*=Δ8��<�<Ӌ6�ρ�:5J����G=5#Q>06�=.?���u=�1�=�l=K�>�- �5�Ľ6M=6F>�F�>���4J>�W������t<�A��sA�(�=��n=b�=��=��L��1�=���>�r�<��>��v�d���q.��C����r>�v5�-�E�0z���%��x�=Z�7�h�ڽjk����׽�\a>J��=&�5�g�@�"k>��={�o� o�[�<y�=P$½&ޖ�d�*�8 >;��<�^�>=!= (W���o=Q��=J��>�0�=}Ҵ=�92���i<B�w=�q�<8��=�D��1M=?M>����9,��e' ���-��=*� �.O�>Z�|��P�="�$�%ν9Ѽ�=��d �=6�μS��>�>ƒY=|�0��j�<"s��g�f�¥���.'=���<���=kb�� �o����=�AϾ��#>X�!:�>��>�픾����| ���,>���>v8���<�=Ǿ>�ѓ��z#�)]�=� �>^g��D�>��� >�>7���&7�������=��=,�==�̾���>���>m=S�>�� ���>����������>'�پ ��Am�Y/>�'/��?�oG��<ʽ>V����9ֽ6�>�Ƈ�3zk�J�=��%�+~�nc�>6^.>������>O�>y*_>|�G��V;�d�>�����o>�e��hƠ��콠�[��-��c���p �lW_>� ܻ�{�>�[�=�^�=�7��>V������M ��� ���:C��^���?��sv�{c�>�ޥ=�Cʽ���}ˤ=d�>ݡ�>o��>���49��e�����=�y�> !>��=�7�>P �V�����=0xL>'T�=�p����<>қ��5=�7�����=���=�#W��b>�3��5]�=Z`>�<�L~A��>�>�&n<�'>��\=R�н���>B�\� �|�oQ�t��<N��� ��=�w6��v���MT��3f��A��e>�H�=[��=���<�������䖾6�'eg�ء3�!���Z׽�{�E�1��؈>�[d� f�3�޽ ?C�_,=�
ؾ�c+>��f>
�o��S��H�5��\p>E]=���=��L>���>KT*��,s�c(o���"=���<�)�>HI��Ų���(>�e6=���<8��>l���/�����@>��,��=%�>YS=[ܖ=*��=����K^> f��Q4>��>���>��=9��V��#�A����=��X��_�=mӑ�"|��C��b(C�e@�����=�����]/�ݡ�>��x�F�L>C��<%[꼷�@�H�;8O=^���A�C�I�?�G���+�{����{C?U,�=�}�>�!7�vƻ>f5��쵗�P�>y
d�-5y��_��= �:�n����=�5�= ��n���ܜ���2=7/[>�L>�����ţ�,�>��?�0 >W�c=�\�O�>鴹�x=q�NiA=ZkܽCń��ʾ>�\���X¼-�>��>u��=�/?G �/1P>�����be>��Ծّ�=�)о�{�>�8>���I��J� ��U�>��<�����7N>+��>S$�>s�>B�|��+��J�辠O���ʸ����>^&�=���H��=Y8Ͼ�<��� �����="��<7���:�>ڬ��ԩӽ��+?��v>N�e=3�<�!(>��<����r�>}�e�AT��ʂ�;2��?�>���#4Ծ�)۽���u[>���o���-.I���ݾ�s�>dg�=�0ʽm�U�A= c
��b���)�:���>��=!?|>&Q����?��\�
�P�r7��?�G�Y�ֽ� ڻae�=����X=Ï��ۨ�EQG�lj>ɐ>�����t�n�Y=�E4�5"}>U �+f>i�P> �W>o��=�L��?h�>�D���c�R��>T>�lz�^�o���;=>����'>��e>^��=�4>�����{&d=ڃ��HՌ��/�>Hh���\_�7ד=����۽��ʽ>�A>�9>��=��B����=>�>�'�=W�=�D�>H�3����=�� �2R=�-�rֽ��6>��Ͻ�"�%����:x )�FF �Q闾{���Ѓ��0���쮽F��>� ���3�z�)��>>�@v���>���>]��=pށ=�/�=Z��о����VV�=d��='榾�����\Z�$�����a>��t=X+��������=Z*�>�ؚ�O�r��;��F��=�C�=��R�0�;>V���W�pg*>���#�==*��=�o�>6�4>�^�>�֊�+�V>-2>O�Y��߁�CHI>�f�<q����N��&r>
��=��=U�C>��Y���澵�̽j޳>��>P€>Б�<s �eNW�� �=��Y��J�>@Ꮎ݅���6������=R\)>�̽���=�A�_�q����r2�=-=�� ��zN=�����D>���r�D>+8�=�Q������>Vm��r�0��{O���>ڮ�=�5y����=UOt��m��c$<!VJ�t�Y�D=�<e��>�%�T��>���>�|�=|�:�� >�x%�����#����==M���(��7�Ƚ$�>s�^<�)<*��=m ���G�bí; ��>��<�)y���?����=+�n�� _>�N�Q��<
O>��<�Ǝ>��T=�c�i�>��̽�=w��v����>;g��K/�u#Q>�6޽u��=�r3>��>��V��7o�6g-=�74>�;�;7�;���ڽ݇z>(�9=,Ti9������Y�h���;1�ԾP�8�'#��j�>:.�=<H*>)�>���=2�ξ�#ѽ�����O�=ϊ����=o�>�~>��Z��;x�crӾ�n>�<��=B5�=.�=G��=蜊=P��H䘾��B�\�>]���✼+�A>�R��w/��j��9����� ���S�>œ9��[��P �)H�=� >�|���o�>�.=F�������E=`Q�<�Ҽ��|��c���q���>��=pTz=o�=��=�⭽*�m>�[=Y�=�٠=����]�o�j�J=��ܽa�־�=B��>�)>|��=Ra�>�9�>�z8>9�N��>W�7����([�=N��=�� �,I>���>SX�<���ؾ�d�>j�����9������b��y�ud��LJ|��=���=�E�>&������ly ��C<�zQ�Ӧ=��O=�� ��k(�ۏ=4����5�<E��=�A۾ն;>���>�ug>l���6��@;]�,>^�b����=��z�q'P>`I��Ր>\�ڽ: �7�Ƚ�b>��
>�6>J})=�B!>ւ����;�� �����dͼ���<�>3�W>��;�+�⽚��*�]6���U>��*>lEA��^>��R=�(�i'׾���F :=B*�>�����6>N?>����9<��[�������:�#k���>���<6=N�M����4�>W =�
>Vr���漄��<�,�n(k���U� Vq<#ft>�v��0/$>M�ѽ~<�����bOr��3���v~�Uy����=:�I����h��=���=��u>���d��%�<��U�ke�Au]�?�l*�=H�=� >��#>����ڼ�b�(̽Ę#��%t=�<��������Ӗ��"�漽vվ���; q�<a�[>��_��U�<�H�>����9���@��=�=�=�L������~��=��=��>�@��~��>���<�~˽�a����>�&��'����3��+��=e��=Z�><���<Ws >�PL=<"7>�H�>'��>5��=�I�>Q:�<�酾�Rҽ����׽�����M�>ԫC�I>y>a��>���>W]�y��^H޽�_!�qª>_�)>.��<����j>μK���I��!���� �C�&>�^x��&��=��ཛྷ�r�����J�e;?tԽY�l����>
��=3 �=b�t��[E>A�����)>'���O�o8>{�þ���<%�>Td:>�j=�=��$���AS>c�#����1!�ԅ<=Ɏ>A�=qS������|���-Y=�ؒ��Z>�n�<*�=��н�I>�9'>Hx=��G��,��}9�K���F��"(=�TM=M����;��<_ϑ�9�a�v��w�B=�oL� =��U�>�]>����b�Omz=����j ���+�+��>�`4>�)s=�X�=нz�p� 5��[D=`63��v���l��:�$R>�O���H�������������<Kj��/��<�/ >��=����{���i� >�>F;�an�=��^>��s>��=>n��=E .>}b����^>��>�g]>x�k>$X�3�$�G��<@ �=N��<{�n>&ȫ<��<����+D+=i�
>;������>�
�~�
��:F>߶���`>�S��iJ��t�=�����3��N>��@>�R���=��>������½F�B������=[��>&>U������� ?g�>&���᠂>�M��f޾E��=���=_��N�ܻŁ�ŏ���Ah=^>ne�-�C>����g�>�6'>5��� =��E� 4�=#7�> �K��)ʽ�# ��������<���=�O���o>���>,��<J��>��Z��dW�18_��� >��>,�[�}�>���>��=�x���yᾆ���Pnd>��<=���<FcȾ�?���[Ͼ�~��nnV�U�>��<>���E�j�EJ�>:7�>�)�>kː<V����> ?�3�`��>Z��>��3��,X>��1>���_b&�����>➄>l�ξMT�>~2�>}=>�z�=��>{��D>����~k���y���n���E���!��g$�d�?B)C>���>3��>��>�O>̏�f��=�C�<��=Ye����N���>�0ѽ_�������8��k6��h�>�+�>"�̾$� ���/�1'k�F�Y�̼�_�>Y�Q>�ֈ������@��� �>{��;f=�e��>���C�H�"�= �=���=��ѽl��<8 C>�ap���=�'��=�u�=p߷=�4��~f�4w�=����Q���.��>I�o����=4ߖ=9�E:>4��>�0�=5<��y.>>�½�K����<+�0����>�=�>?qF�2ԭ��2[>\'>;1=􈬾DHR>6��=a]>�= �J�%� ��=�I��u�>g3�ʚ�<�[e��v>>���ۢ��)���S� >�,�=MY���{I>��޽x0=�*M�p
t>������:>���=�*>�.X>�A1��)�=�P=-W>l�m=���π<(ok�#!��X>�{�=�@G�V�_�p�5>U�@�N� ��&.������i>����������~��g�=�Č>iD>�i���k��=���=!�׽ԼA>�*s�o)N�Q�>'D�=�%���弖]e=lV����ƽ����,����7>�p>�����|�=4��;_��=����6��=�A����yo���>���<Y��>jϥ>W1�������%>QΥ=�Z�>�|w;�"n���L�׀�>�xt>�K?�����iM���\�<ۘ켚�%���������*���%^D>���<��=9Y<>
c�c��� �z������/��>^'<T �>�<�<y��>�>i�w>Ĩ��:~>W>�xi�穼�E�I�>.�?Veӽ��=�𾾷���W��R9*>���>v-�����=H ��Y`?��g�;MI�>�P�=��<>q >6�&<>A1>+���=���
#�9��񚀾u�>CFN<�b�<�RC���L��x�H��ߋ��������L��C�;���*u���>V�K��I>UZ�=�#�.�r������ >�ǔ>
��>�%H>��e>l�S<򄽽 �;9�=�v��Tڠ>c�ؾ!D�}#�9Yxf=m]��Bn0>l��=�Q:>��:݇s=��ۻ���>�ŗ�9��M�N�CQ���YS=q�d��D�>�'��1Ƚ���<fd���<$�^�G<QQ9����=q{G>fV�t�4�8����'�>�Z컺���"<=j7 >)�\��#?� P\>����2<�N����r�Q���<o���Ԋ���<��Ka>����$ >q+�P�F>,�
>�q>�4:>�j!=�1�S�������~X�<懐>��!>*�^>��ػv�S>fg�<"����R�=!m_>l$>�٥=���=���=�ɪ����=bg�s5����c�F>�l�����<�̶�%Ԧ=t���P�<>_���c�<O*>3�<R�1�
��Yc̼Y{��+'=�0>2�=6&��s���^���O�)�U=?uɼ� ɾ�ui���U<���<2G�=7�M>��G����=����8 >�~���T�2p���#>Ӈ�<��8�
Ye=��>'Ɠ<��>ዻ�N�u�뚂=By��{i�Z�#���ӽ��,��v��A��=�)L�,���R��+��� K�� ��5��=B��>4�>;���[���\Ǿ!7�S{S>Ì�> ,�[h�,�d>1S���j����{�>�h�=n��=���> 4B�Z[�<���؋�=�a�>��,>әc=�[�<O
��m^>�S>���7엽��w=��b=��/=����'�a>� c>Ł>���">ns7=����#0H<[�J>P��;쳇��Fǽ�,���\>,n�zR/��;�;"�>$�X���B��8n= 5A>$F��(�i=E:��Qx���0�_�6>��-�1-� ���݅��ѻY������>ؽ=|ꇾ��ݾ7��;t��j��=8�,>21�<�����!>&ŏ>�_�=���=��a�~��e��]*_>��<}�</�R>��#<r�<�%�>$�.=�=�U1���$>���F<��8���=��x>�U,��rP>2��=��l�I�(>JQ5�?N��1�U�!�<�޽���>�_��K=��_�1V����;�.%>ˋ9<[j>����*ֽ̘6��E�����=��>��c�N���Z���M�����">�b�=+wp>[�Ľ%ɼ�P���;����"�s�T��=�=��VľXh���>;�ʾh[�=�#��᤽�R5>����A �>���#�">q�l=�P��'g�=�0[=db��Yf���L>k��km�=��Q��x+>H(�>��O>�9>b!b��Ƞ>��$<]J:����"_>��=���@��<��>>Q���u��='K�>�R�>w;;>�0��z >} ,���>Cd�=������%>�r�>E;���<+=L΅� n,>.���"�^��=�J����h>AL�=l��< ��C&y�����*�����v=���>�E �ƥ�>Ⳕ>0�����>lb��z,>�����gz�o����
>c�����ØǾd���:�;GS�>������0����<PJܽ�x�>p� ?��=-�>�X�@Kػ�5ǽf��=j/��Go�5-Y�K(>��=}��>8�/>��?N���� ����`�{A�;�A���c�<v{o����=�����=N�Y�J?f�(���(�y>eνu���9=>(b���B7>.�U�g[p>E��>�ț� ��>E�I>Gi�<,�'�]�g�]\�<�8 ��:^>Y��N=�J �4f�=��>;�=cxA=Ն�����>ʾ�>�>��>�����#�=�W �S��=��v�wbN����5p�}�羏|��LԽn��>��`� �d>���<Y�=��R���=hQϾj�Խ�Ծ�ڼ�!=p�1=Udf��{�=$��=r�Y���E��_Z>��`�a�Z<mV?z�ݽ\�N�U��ċ}>��>��=�e�=*�(<��>��P�X�u�u�P>����ܒ���lE�ʴ����M�~b�:.��>ҋ/�
�]�>!��C�T����<�I�=K*��/��#��<!��>¢��� 7=^�9�4?<����(<��V�>�둽@��� Ľ������>�{?�T�>�mD>> ?�P��W�<1RP��c�=^��Sg�>u���>���=�jb�Բ�7�8>�A��=�=o'־� %>�>%�E>L��<����6b �(������=�����?$��7�>E5����)�� �=�1>��m�Ϥ����*+=jJ��W�ֽ�5F<g)��V#�<�J,<�|>�=�>jg�>PT>���=��T>��>~_�>�����x�=e$#�`z��8q���
�����=��u=W���5�!6�=<>���<�2�>���=a���e�í��;ھRS��IU��]�=�g>lyz>L�輠�q=X��=+�>þ��z���}�HkT>�=1����rX=U�4���>��l����>%>�pI>��;��=W<�^��<c�F���y��&^�Y\ >���>|�(�;�`�)����KV����٢�>s,v�I�<Kὸ�&��mb>28t=e騽w����0�=#���X ����������+ze���=8�>�M
��r���7 � =��l>�r��kgE��$��6~��6ڒ����<%�e�Dl�>,�>H��o��=H��BT�=[�q=P�f�ԷY>�B�;����-��;~uŽ��ɽ�3Ľ�3>e�6>�b@=3>�2��<l���� ��KL>w栽���=uQ >:K>����\q�J>r��O͞:,�*=��>��3���=��>��y���"�:�4>��.>�>ڏ��W>@�x>� A<� =X�.���>Q݋��|��\�@����5>����"=��o=��V>u�<Bn[<C� <�Q��Tc�:�s�=�@���)�5h>u<�=_o=�K�8�>PG��-�=�)�H�*�Jd������C�� _>�㲽�彬�P�@�:# �+l�=�ʆ��𦽣F$�Q����<>��
���V���>��󼳁�=,�f�\��0���2*>�.M=��뽂Q�=��R>�c">�=�&��=�W�<��=��U����IR��e>�M�h,�= �<+F�=^���M��<*(>!�#=d>޾@>4ې��L���)>�U>��>A�m>Ӡ����'���<��9>���>�<�\��(hS>AJ�n��>ٍ�`
">T��>�ﭾu�> {�:��->�n?>��ҽ������=����=U�5B>���:����2>_=�>�2��yP�>{���"Y>ʠ> �I>r���3=��Z>[���C2�����v�B>h�½ۃ�����=�;9��/�=���=�TA>j>���>83�>��ȼ���=��
�ja���)> d����>q^E��o>S�>��>[���D�=_C��8%3�N �>��P�G��>�*��p"�=�|� �%��H�_n8>��>o<%���=��>�y^=�>�de<M�|�r�T�f}�o��%=�>�d�=�w����?��/�‘���j�=�rg>��><G�/9�=��A������<�>�t�����=O ���4����څ>q�>�w�>�jO���d=S�H>(y==�u��߆�<1��g'�=z�V���=6C���lJ<y=н���=�)�>[&���e��λ#>܍�=�;a>S{�=� ޽���k1�<��0=��u>�����;2ܾ�����.*�Ć��!�>$#!���k�����}^��' ��b��=�[)=`4�=%��z"��ڎ�>y����u�>+"��s>���<��2��)�= H����vNj�f��=�Lݼ��:�W>��z�U>֡Z=��]��>˾���X�8>��м�(��?{>�k�ov��y�&<��C��0�=N_����>p��]AM>J�!>6�����#��n½�``���4��, >$�����M�\>B�O��;���Z�=���>�[��==�����>R>���n��=�> -<��>�`��8^>`أ=-d>LՂ=K�;���=�=�ӑ=���=�W�;+`�����>��K>E�>}�>�N��.?x�uK=̜�>��4�\:L�]���~�=`-���ڽ�X����|���7�&�i<�W�=��">-.{��`�e{>��9>��!��0������V5c<�禽�L�>�R�=@�ƽ]�C>����NO�=���$�6����=��⽬">�K��jlj>|)>�<t=�]�<P������˦>��>������j�`���Q�Ȝ���t�����>�m��d����C��Hn>X��<%�%>����&&�� ��>���>a�>�L�>: �=�v���/>C-�>Mv�=�˶�;O���\=��:�t>�ý�g�>9/>,q�=ǘٽ����䄾S�>3(ü?��<CG�E*��a˾���.�>���>5`>6�>`��>Ƕ<��>[�H=�)��O-W�e�C�j6��*䃾��?U�>�[�=x4�:2ľn-�g�C=[��>�?��T�ξ.H����Ɗ=��<@�>n˼U�2��W4�dbs���,?�㡼�"�=��{=z�V�?2.2��?A?���=���IZ��6�>' �>�̾\K��se�F/ý*N?f��F��=�H�=��>b>�g?�(>̃ ?���=˓���X��Ȱ�p ��e�?���Ў?0u.�ܮJ?��>?�s?yG������>�D<�; >�D;�Go;|! >s��S�8�%�(�ps��Z���|�U?�,��?�ý���=�"�3����Ǿh5l����>�Jd���j�)>�=��y<�^���p��۴���=�u�>����N*���ͽK;�m���D�=-,���x��-7>�l;��־�!��L���|�ž��
��.���� >۷̽��hl�BT���>�k?�ؿ>�X�=6�8=�f��/A��M��l}�>����<nNþU��>��z>�����ܽ񤉾�#&���ɽ%m��T��>)s=%A�<H>�:;��Q�n>�~��2@>3 <��J>���Q�>,�%��=�W�<��mx�<�;n>�ࣽ=��ؽb���v��Ɋ�����=�kj�Jҥ����mZ>+�2>���=�v�M�2>h�>���}���8 �=-o=#H�T�y�Yz�<iO�xF����T��"۽�3>+��� 9C=�m�= �'��h�LɾD�
=8�3��5ξ#ʾ��q>��>aB�=��:M?�=�%x�f"����<D���#h���+�8]����3=�VT�V�>q�'>��<Q�T=hN���0�=�!�9��=Z���$;C> jh=�=a.�q��Ki ����=eD4>�,|=�w;��=u�>t��=��=\��}�<)S.?��S>�\�O�t��]�>Cs���?>��)�+ڧ������k�>�ݽn�8��L1>'>4�L=��=��J>�R�>��(2���2B�� ��Y���+Մ>�}˽S��v�Q� ̼�_���/>���=2E�N����>p��>D��>h� �{y�����>�ɚ��>D=N�5��f�>զ�>×˾ås=̥=D��k����� =�oa�ɱ����J>#k��ܮ�=�l����<������<�q۽��x��y�=ID{<}� dž��G>Q�>l�P>Ē,�mp�=�`>x?�=������>�� ��&^;���u.�v|%>JՎ=p�����_�ɩ=��w�>*2>�ܟ��;������L =�i>g�>�.>�&=�/�� D�����7�=�)���h>�{a>�rٽ�&}���k� E��c�U>P\>{�ǽ����*�S��XZ�2�=j�I>U��<�P1>c#>p<�=�I��3�=1.>�I%���c>���f ��� >Ƽ=�\'�R�������8=�B�= 悽'j=�X���=�<a�����=�^a��� >99{=��Q= ��<��<�#u*=���>V�=5�M,½F�E���=`�I�4u�=�.L> P��F ���o>�.i�N����Ͻղ
�3��=�:�F�=�b->4N�;'>s:'���&:������4>�G>���=_
�S��=�g=�x�4Z��GR�>:���a�}��67= C�>��|>Ź����}>���=��P�����T<�;<-;=�G*��lM��%�=/.�=v���ʽU����֓����=!`�<��$D��X!�m��>t��>�i�� �W0 >�� >cS}�����υ=ر>�0�=���<�1�>@�f=k҃=7vx�o�m�C��:�U<>n���� �>���>��J�����(!�����>��M��+k=�3���ʜ�|�����=�!"����>�n�<-Y>/nڽ��{�z�\����_�=�2Ľ˽�!:>ax��~��E��=�f�>�#>�y��t��<����63V>#���b�U��� �/�'����n>�?�3>����O3o>�>��a>�>�.V>��m>�a>6�=I������O�>��+�� >��%>�׎; S=����ٌ=i�3>�p>Y�>� �=);���pN>��j=�~6>w2�>�@��'�>15?�,͡=$ν#^�<����\Y���^�J�i�������7�>� �>�^�?��>: �M��<�^�>it=>�N�>e�"=���>��پ��D>�q�>���:s`��������y>Ywg=��[�Sq�ң�>�`�=�sy=!��� ����l���`��>���0Eս�u��嚾���;��>I�;�=>��>ΰI��6>��<J��>k�*�y�T=���<�j�� ����O��F>uٽ�����G��O3�R��;��#=5��>�#��p�E��*=����.X�>t�F�5+s�\�<�-�<[E >�K��Eq�E��q�x�=��� ��E�>�(��S�w>��=�g�<{b>��㼣�">���֚�>���<����=��{>����Q%���/�=x�J=�V���q��r�=N��=ՠ�<]ӽ�/���{z>��^��s�����=ٗ:��z�����aʜ��>�2���!>\S>O��� ��=������=?��@�=�ds�=����ȩ�h�b�O� ���6>R���� *�#�<��]>����c*�&��=�T��+Q��Ҡ�z�����=����>���׼�<�
�=+\>�r�c�==~9X�-vi�W14>�Ψ>K��=�1d�<������� ��1=��%>�6>��Ͻ�ߚ�y������=��{>�e��x�p�q+�`��=����m_����<�ើ�fN����G��>����~V��M=A;n=".���?(���=���=�"ѽ�n*��>�����>�1��� >�S����p�o�����s>�y��%� >9�>
dense_1/kernelConst*��
value��B��@@"��!�սT'>~����̽I�>2�o��� �G� ������=�=����^�� �~o��o ~=�����he=E����� [=>V(=�LL=,��=�S���d?>$��=��#=�ms�8=���x>@�ڼE�> ��j>ܽy�����e�|K>oө���3<�˽���=2�>>X�q����=�||>�c�=�/�^iZ�����t ����=�*:�g�=S�ɾO�e�F��=?��;ea�>������fk�u�.�������?>טнf�B>��n=/���U������h���UP�=�z�+�����#����'}=@W�����_�� ����{X===�µ���F>�y�=��'������>�0j>7
<A,�=�ȴ���ݾ��<�4)=@�a=q�P�J�|�����=����=���>rm~������I6>KOJ���Z>u�K���>��{=�s�=m��=�A(�� ۾2���Ǿ���6=7H�'+��4�ݽ��[=(N�ړ��'i�����™��!{��4<�'������S"&<I]]=#��v�I>U7�<��˽��0>����y䧼J_��LR=m�K�%G{=DҦ<#9^>U���D��/>��_>�<�>=�������>�[G9��.�<��>�UJ>2b�>�R�>%|�=�K����>c�>���=� 1�׈���t:;��=}&��y�$�x2�۳�>X�9>�C>�=J�o��p�A��=7������
Ž�$>��=�]k��€�����7�=�lN�k�����'���.=��=CM�=�Y>o�˽a������{Td��cT=姺���=_嶽�z�Y������=F�y�2,e>=s=_��>���>��O>+$���F'>`��;ι�<�ׁ=�{��z|h��(�����=�s��5-;��<I��=�䣻� ���J��� Z=������7� g�>��Q=-�f�x=;���\㉽�j�=��Y�Z�_>#{>q��=���=�M�;��߀ѽs*��{�> �>
=�MZM><�S�ٓ���0�> \P�}d��f���%� ���<�j��|]+�D�<�@,;O> >��=�ە=��P>�y����O�#�<���ّ[<� =�����v�<J0P�<s��c�=�`p�E���q�䧏>��q>Z�J>Լ >&� >�ӟ���t��NG����=$��h��r�=V���-]<YDI=*Y�>�i�<FN>�����bQ����˒N��k�����=�mC�e:5>�o�=D�`>`o����0�
$ݼ)��V�4>��=��7>Ʉ��C!,>�P����-��.��2{�;)$ =k��=¤d>���<�,�W�����۵=���<�"=� �=1a�=AW>pDA;N��=$½s�.��l�=-�<>�";79X��2��2��<q !�e"���?��� >�\:�ޱ���L���|=d�o���>=)�>LK�k�=�Pl>�&�=�F�=ݧ">j =}����xX>�T�;J����߮���<a
=�T�>l�:�H
��L��$:������;��V��Zx�r��3�w����;vB�fƽ&�D���-��.>�+����P�Y>Lҟ=W�@�"��>��@>q�>=����3��H��%T>g��q���_�x�4��=3v�<�
o>���=����|w�<F#�=������>�@���;v{=�����.�=-������>'�7>�"`=@e<dF�; ����]F>�������<$e;�H��%U�;�BU>��н��a�,>T��>A��m�=�],��g �
�d>Gpʾ�/нr�1>a�2�L)������f���l�<�ƪ</�Y��;�G�=\������<O�=�
��"7����'=��R���=6��M� >5e�wT���m�XC�$C�>x�\�]>^���A��@�_>L���_y�=���=�T���H߼�~��߽�a�[�N�?G>�I��1���[��=/Yt������!�RI�>�ҽ];֟_>3�A>�h��&i:��Y;��I�=�?�=7{ؽ��7�Ζ��XMȽb8y�C��"����'�~Q]�������S=�,S��;����<�o>G2!>(��<-Bc>q�=@T����1��*�=�gG>h�2<V��=ԥ�=�Ľ���n!���4��W��mнhWV��l��s�=?|��է]�?ޜ�q����1齯�>���(v����= %��p�=��=�� ��W����>դ��Z^>��g��7(=X�K=K]ҽ�q&�> ='���A����&>�+J;*@=�*�<GM� &v�G��<�Ν=����t��k;��R��=�.=���=�$�;���<�
S���6��b>�4A�}�=;˖>�S#=qN�����$�ꓷ�d�M��L�o�=}��_*� <⼟R�>R�=*�޺��=q��eM��Ԋ>}�u��9�6�;�b��h�d�F�E� >-��Jm��[�=~�^�����|vw=�=K=������ ��s����>���� Iu=�2�� ]=o �>��=�v��������N>�E@���\�栲=k�>7&5<�A�> l��S >�m�<^z�����K> D�=�sN��_�<#RW>o5q>�ʊ�;��=���=N[3>B��<3�I>�n<I�=�f���/d���=�'>T��=���<�� >���8��k� >�p����g�L��8����C>~���e���R>]w�c ���m*<>�c����J,����?�<��,=��8����=p��l��'���0e����H�k�]�/ >��<>�y=���<�0��a>��>��Ƒ>��,�h��G?w���I���=�&&�A.��n�=��[�yvʼL�'=��= H���
��`‘=���#�3��� `��5��<a�{_0=/p=��<i�J;�G>[���v�&�4CU�P��=m\ּӃ:��"��L�w�1��^=r~�6hw=����;��;���On��LF��`G�;�ꆽBo���=h�)<�~���٦<���=s���>=�`*�
���b��<�w�<� l��6�<&K��J���h�R=�<�E�=zo<�Q�����=�\��b�)=c[7=*�= ��=���<�
-=�Q >-L'��\���!a=�lŽpmS=�I�+�> ��=�ʽ@����&�W���z<�I�>���<þ�<��E>1�=��r=�,3>Mn>K�>��K��M�����=c�F>�{=�ߗ>������ż��;^u�<�����;������dT��ʽ�夼 �)ׄ<�^�>@����
'�_�O>����� >�f->א�=�U�=�ժ�_�$���9��:�;t�侩�����սҍ�=^�ܻ��üBg�>�Q���P=#�j>�ֺ��;ݽ�re��w\�T�Z=7����� >Ǧ��l��)>b���W�>e����
P=�݄;�X==��<�b���H>;�k=mD��� �(�=\:����A>�b���3��\ݜ=�M�= JW�Qɟ�L>�'#>"��<@f>h7=���½55�*����;���w=�16��@���r�ث��R�p��bG>��=P���5��R�����>��.�����=��}{���r��(��<p�����>4>��=�R���,>�>0n��^�%��ⰽ�� �i�.�5 ���(�a��<R|G=����������?=ٺ���Լ�t>��Y>b�*���.�2>��x<P��=����j}$�s|���_�M'�������H�� ����6�6+ �a���C+�=�/�=�j������{� mI�����>q>�<cn�=O ��}��/(M>X'��P�=��Ҿ�2o<+��z��=s_Ͻ���^���b����[������{�{=PY�=]�F��I�=�"��J��n�v>g;9��=>�ӽ}m�c�p�<46="�P;�62>�B뼶W�<�x[��g�;Iĕ<p�(;"�p��! �%@R�.3v>4,E���=ǔ>�h����T*�0��=
�>���>��:=L����i���d�����s&=��@�-��=��Y�_~��4>mv=_o����<֌l>Ze>��������(A��e;>���>����EX�E4�t�E����=e�ھ�Wb�܄+>H?����<`�E>��[=�W+����<NJ���Ko�6�;��=>d�<�:(�&)s��e>?6�=?6=mo�=s>]><C��9�T��JT>�Y=� ��r�1�9�><`>�!�� ƀ>�:a=wq��2P��(�=8�c�(U!>�=q㜾���(��3��;&Wn�".�Z���o?<�K��e>�=޺��P���ӕ��i>E�=��>>݄�=�Eս�;jp�o>N�H��%�>o�%���$��$>5���2��=DU �#�$����<L����Iڽ�kH��s��Υ<�ؗ>Hpo��*�>j�=�ٽ�/�=��>{�r=
ŋ>��%���ڼEf�=;�z������c�����<g�t�ij�Em>��7��F�=�> =o���)[�~��=ΞO=L/>�6��|��:S%9=�): '���=�sB=|�{��\T��8
?�]c>0�ѽ D
���=�R��c*���-�՜X>C<�;�u�=�W~��g3�E���}*=���=(��<�z��׷��#����0��o=bi���b&<��p=A�i��6�ۙ���Y,�yw5��٧=뵽=�T���#�<�02=դǼ!�u"����}� ~>�!=��;�IͽA9��V�>�ϸ�+�ʽ`�B>�UZ����hy�pD�� ��=�[��������=���V����H��Z*=WB ��ޏ;�r��}=f������#�=���>gP>���7�^���="۴���m>�)�=�.�����Z��=a�[=D���U�׼]�ɾG��=7�'=%�Y�i��<�����=抣�������b�m����؇<�� �> ��<tu�����= ��BR�����>��=��e�)��=�������<����ʽ?�=� ��|�<�'�x��������]>BH���I>�j�> �ҍ�TX���d�`�g����=Ԕ9>�7B;i�cg�ц������PX>:[�=VF��Re뽕�����Y��h=I �;2�J�~֕>�q���G�����o���W��n1��Хu��O����޽�7���َ�P�+���˽�I.>}\>������=�ya�=�z/>R��<I<�ב�k �;R� <��>�u{>VI�h��kc��T�^=�.s:�A�=���>I%�=j0���;s=t�=�3��\���h=�*i=Z��>��=�ߟ<��j��;�
=�;L���^��ݮ>���8�^���Ž}���'N>V��<�����a>�X���G��]��O;=w@�u���K:=`۲�������;>�ة�D%>Ε"=v ����>˜���h߽q�\�1h>�
���=��>�w�<WF�=�څ�c7u�1A�=�d5<<��<'9O=�*.���8��C>���1��Ma������?Ӽj}�<��>a��<������]d]��?r�>� >�+%�%ҡ=�n���<*�<>&e=��i=xte��m����>����� �� 5�=aj1>�
�=[������Y��;d`��8��;��o q< �(>��H=ż�=~9��-����2W=��t>�q��:������=�u>=r-�6ʓ��:>�C�=u��������>/T��� x��y�^v?�=�>�G��-�>���<dV��0�=,x��[>F�]����:��=�@)=n���n�=�^�[�)���h>����c� `���J>�C�:����
�$+��p�0��� >�wM>�� ;�����E�=D� ��r��ږ�?I�>f[K��5A���>&��������p>kɈ>�K=�G �fBҽڻ3> .I���=d[�>��� �=��
>V�a>C��ג�= r`>�ʚ�۸��7O=]<�<��"�=*�=щ>�7�;�0�Up[>7�u<Z"ݼ3f����a��4+��;c�q��=_=�� �����ڝ-=Z[>�w�>=/�)���w%�cq}�3��Q�*�!�ݽ0�Y;�\>��> 1%> ���%�=1Ȁ>O�I>~8�=��*<�+��@c>�;�=fLP>��%>R���\��=f��>���>��\>��ٽʛ����> �=���<�2���������F��=\ν�'`=d�:�^�Z���D,��PK+=������p=z��<�ӽ�B=חy=�׾�*�=W<�[�=2X�O����\�� ��� =ַ ��Žx�k=����]0c���ݽ��f>����� �;�<*<�2���.1>B�
�=W�=�9Ľ4J(��[|�Q�ͽ�w�=j9D<y�S=�1� ���֜�=�5=):���� =�ؽsw�gtp��p�12���W�� ڼe�C������
�=��=���=o, �v��>�V �L�=h"����<@�}���ク�/��w|�W��=�a���H�=�B>t3�=�;��v�����=�%k>�Y��؏��0i�78g=i������Z޽L,�<��/==��<d4�>&�>��y=�#�=���Aڻ�ݥ6����=6)�{Uὅ�8>vg$<!.>��[>Ӟ�=�����^>v��;�\�P��>�����x> ׾���3�=��b>'�`;�3����=D�<�jW�|�X>F��=�S��>��e>M �<:�ּ�Gp���:<�ʇ��a�hK^=��=�ۈ�>���y�K����=�1½`T�=�$�=�6�>�g�=�AV�C9�F=p�4>�K��%�=Y��<�g���>#!=�>����֓F=?y#<9��<:s���u}��
�����칼t7��x��=Y�=�}Ѽ���s�����N>͉�<F�X����=��=(S���I>C�'��Pb<@9O<$4�'�#�5�G�a+>O� ��P���νE�9��ʉ=�����w=>�.>��=�sy�I{�> n!�w��=�Pu>l���/� >�)�=���Q��>SN2�� n��<������Ԯ��Ż>���=��߾-ͮ=�]����4ȫ>���6L�;���d�<s�5>�M>"��+M>_"�<^0>l>���%��5�t9��>>�in� �]�0{G��q��ˍ���=]��=� M��l���q8>�L���w�=Х�=�����q>����]��=W��=�����<,I/�C݃>�$A�W>W��D>�����,6�c7c���=DB���Y��4���>�g�=�z�] #�̳9>#H�>�;��B��@��>����)�׽�|���e>6�=n�*�uk�=;?a��FD�P�&�Vv�= W>=�?G\g�@ߕ=�e%�φ�=�]�X![��Fܽ�AC>l$=��H>�"�=$����L�����;6U��Q*>��=2�<���>�46�dƇ=b���ﴻ����=if�>d��=��=��`=��>
�>�.=hɽ���lG�p�>���[׾�2t=H��<���=�K
=��=ʤ��kB�<��>�nڼ�(>֡�������>��&�=�~S= P<=��7�>��=��>f�>�BD>�������=� ��d��>x���<FC>d�>^��=QS���ξ�����!��'ν<�t=P�H�%ho=����κ����=B�ܾ��1���)>-:��j�X�;6����D8=𗽏% � �x���=��(=��ͽ���� *����Վ�?�d��</=Z���>=�(���`;����=�K�=�2����i�D��=q��=<�꽼=ν��Ƚ~�%��cż#��;� 1��q/�d���5�9L ��n�A�R�\m>F�>��]�nĶ>�i�����n[V���)��i�`��=[���߽i]��+ �}p���RX>KT�=�>HK� ~6>~�s=QOѾ�$�<���;�=��F=��C>>`ܼqT^�ߖý�4�>���a!r>k�R>�_I>Nk�����L�>!��=K��<$�=� }����=C�>A�w=ߊw=Yq�=&>F�����=e���=��E���<�j>�A�=�H��T���Gq=�F�>�FG���=�G�=$ 0��Z-=M����>몡>����+�����������پ��s>����B;���AS=�+>�^��(�ҽ0����z&;wJ
�hf>�����ʿ�L�ٽbZ>��(>�-�:qu��C �: x�b� ��8K<m���6s���ؽ罅F �La�=�EL���L� �ۼ�=ˌ>S6]����>1f>3 ��t=���ۛ�=����X��.���6��:�>A�W>=��[��-����� NS���\�f����\R=������<��Z�g�3���v=�vM>�ah����Wش>5��>��Z=I3�=CMt����� b��=]��=��gȽ�-�<d�U�}�ý�?L���U�oP�����=���=�n>ی3>�����l�=������l���0�o$�������ཥ۸��=.�c>b���J�׺e��F^��D�=�2>={i���v��]��>�^S>=��=�D�MY½IŔ<��<�z >#��>Ȋ��6O��׫<zN��9(�C��=ڙ(>��x��3 ����=C�p� G��W��=�?�� ��@�=1�"�\\�<+�(�k��2��=�ڼN 1>��=��=EY�w�����;����Ws�V��>�<��<&&[���Žow��玻?�@=6+�>u��>���<KC�4����>�����>.a�>��>���>]�!�( *�U�K�uF�=M���#�=Eۘ=�[�����k���d�=����ar����=P�(> M̼��c�Y�80H<�xX=��H��N��㿶=��n�\=Hn<�N�=�'(>,,=� ���R<��;:k�=����j�t���?<�LJ�f�X�8�ս�Q�<��=�v�<?P=�˕�w�= ��<��F=;��C����#���M�����o�o=�:�:�pb=XVN��v2<��:��K<���==v�s+*���8�'S.=}VϽ� <�3%=�9>s�2=&��=c��<��>9�����
�t��=�1>0�Q>5`�\��=�������=c-g>���=bD��U�s��F>���<�.�=I����5>��> w�)&�>CD������">\�<���@;�=�*׾m�g�OI>q*7=�A�=��*=V��"��Df�t|>]�:}4v>.�=>\�^����=��%�y�>�>w7�=�d�b#��Ê>���qg���==��=�4��y�� p���L���A�=W������ �\�1���?����=��e=�v=n ���E>3b���>b;e���ǽ���=�SH=щ>����>��=-�<R�e=��>��=&�F;WB>p�-�:�!<QJ��M�>حu��½�9,���6;�G�0����׈׼2�D;������<����
,־Z�����������Ƀ����=�����a�. �2�e�0�+>+=]�"=X��<�:�=�[A>/ >-Rټv�}�<�=P��=�����>�� �����f�=tT>��B� ��<7#/���C�{L!�\3?�+W���2?���ؽ�8�<Z�'>���=��~=匔=%�����ʾY<wz2�71��<=�Ed8�� ��5=�+ƾ��}��i�<�ۉ���i�����I0>��1�����T�����y}J>�_B���=�1c�x�(��"��K~ཻ��GΕ��"�>cg��Ǧ�����GS������뽄kK�����U=�Ϧ�1�=yPν��E��O �Ek��7���e����=�r>x�����=]jP�,�[>� ���&P��������k�� �$>Q�>`���^}> �t�����g>>̼6>~۟�"L�A�˽2o�=�9���3>wc���>5j7>�Y>m*,�����U>H>%7�<‹��M�<x�>����>ʖ�<_�����+�9,���H��sz� ��=I?�<i�u�`���H���ӽ[��=����=*��� W>������ξDmX>���=�IѼ�>i�EH�;��M�=��ǽ���s\ ���f<X$:�u�$�t~=����t����=̽�9�=#���z�罏�>��X��@̽tG1=i2���;��s��]��=c���m7>l/�!�>lx=>��m=�b�=j'>3<�=�ҽ ��טO=ҟ�>ޗ>8R�=�5U>xW>4�̽�g��Q���b?�=�X�<ң>��>O�c� (�����=)}(�[��>��:>莾P���>�$=+凾��齨�K=��=��������<j-0���c=�ђ=P���6t��Zg��tCq=u1|=fZ��$<>�+�u� >�u{=.�$��7=����^>�)u��_��J�E��G�註����>V`�>�ִ�����ժ>۝�<����z�?>
e���� >�g�=�VR��/�A����۽���=�!��jvֽT�
��}�=�+�<����?
6��&�=��d=Pz7=�mG>r�M�;��<���U�#>ǧռ�ê<EN��g�A����:�$��h�;>��>oZ��b
��p���3g�~���/<zO��n��=�O�=T�ս<�?> ��Z�<o{����X�3�O��>���h���@ŽC� >�W
��g�=�� ;�D>�ߞ>��j�.}$�O��=�����q�´�=���r�<-�>�x���P�P2��춾�2a<T<��闽՛�>�z>��<��=�,ռ��t>�ȯ=3���L̃=�G��͈�����1)�h�>c�N���6>�뼽�C�=�u�>jf�=Z 3=�;�=����'
�<b֩<�,�����=P�#>!��<���=A��<U������꽛`��k�X��>��.�a��=�z ����;���͵=��U����
����i> ��uݼ��k-�X�z=���hov�C ����[�g���� ?��3��oɼ�L���[�u�m���8=�h,=O[�_M�=I��<9 �<N�=�� ��},<)>W=��̾
S�q��=9x�<�ȕ�t
��8�= �g�e�D<���=��s=W1D=uo��Am >��Έʽ�FW=��>�aI�=L{�=ӥ�=+9�N@��mN=�L�;p�l&��&���h��;���>����~5�����=z���9�;=�넼F���(�=O=>��'��㿽����A�e�/�=�:.<��*�X�>h=���Xy>�Ϲ�3��<���=7�����>1�<��'>X���R���R>�����]w�Zי=�̈́=��>rÜ>Ci�<��C=4þ2�a�C�#<�B�=�3�X���v��= H�>H��=���:��>X�>�nh=��E�0��'E�Ƕ��~e�6ZC�9iڽu�w<�7�~?�>x�>!
�����[�}� � >����������LW��4�E�hR��2�>�ES�n��<o�� �C��[���h>�q���*-����j)F=�+8�M'��)d=8O�=�������<�y/���0�qm��w�0����4�<��*>�UA>,C���y>v�+�(-�<K��=�H<�!J�=G� >{�C��DüN�����U��K�=�ߑ�4����ƽ6��=���=^켜����ys�k��=���;k�Ɉ=�3�<�Ye��N�;�l��uJ�k3=��ѽ���;�i= �н���< x&�+�F=q�g=�9�=�8��p'���H���=�DT�+vl=ae�=�A���`��4��*XB�)gX=8��=���N���b_=�?��/W�={��
��;��������4t�q�x=�bH<w��<�$�=�!�;H�`�9�����������=Ӧ�=`½B��=�$���i���<j�j�����vC>��q��]�����+���̽;"=�n#�<����C+9�5��ӓν��=�e>�=@-V����<��Q���w���Z>"B0>!D^=�R|>9��=2 s�F�`�����WY�ʛ����"���,:#��`�=�6y�d�R�t����(5=����zY�l[���<ӽ�ڇ>�
�=®�=sm�y����or=�~�>a��>MJ[<��<�(C>�N>�zW= &>:�:]$�>10�=
��������H�<MQB�E�Y�j� �p���M>�߼�ǖ�kL4>�7V> ��>��(��'` >b& >� �<f��;�i>�#�=xF���P���I�=��[>�z���'X�� ��Q>S��g����=���=>V��#��>\�=��[=#C�>0�C=n��<�%�� O��q7>�·=+�G�h�=���m�q>��,��u4>������>7yɽb�5���T� �F=�K(>�bN��x��N�s��=��3>��= J��x;� ׭�S�N>ᬌ<����
����<�"�׽q$>XLD>�ߦ<0\=v1�<�Q=�~�5N���� ��n>$� >}a=��8�9�`>R�%�9���l(4�����@�;�4M�#k����=4˥:��=K#ٽ���2�z>?w����\`ս3�>�=]�gT'�� \>�M[��. >��F=;����t>%�=���<�={
�G��="O�=hX�;�����> ���1�}���Ƚ@�ǽ�׵=>� ��B���n����&��B�:��=��L�z������=�2����#>X�4>��`=z�~>Y�����0����� >*ż�$�=��ҽ�ǁ����=��E<,�*���>����x�v� �[=6��x��e���"_�y��>���=s�[>8h���Ê�N��3��J�l>Mf����Ծ,da�Ɔf���O>�J�1��=6ɛ=��[=:�=t?�;�⏾�9׾�?��,v=}C�=�h>i>ڈ�� H�m�Ⱦ���=�7Z�5�v�W��=2 �=�QȾ��=��4����<�=��N<�/��82>��>ZT�=���3$F��l��afF��x�>�\�R*M������;���m�M��=���y�=!�@��.�|cK�$D�=V�ν�Lü(�g;��o��ɴ<�0>�q�=?9>s���]�<W�H�ł+�_#��S��<�Ze>,
�<c-�D%>V���T���U케�4��żɝ.=��D���J�s�>��<��e��"�����;��>���mv��C�>"*��/p�="G�=XD�=hy��경l���՗��>��I�7�>m��=�����08>j�˽��%�+�K��+&>��D>�z�=>�b=�]x>H�>�e>X ���b6>GG)>B��;ɔu>�FE� �c�ҍ���ѳ��)>3O>q3�=}՜�.{�ƈ9�l.�=߬< V�+ޜ��U���<�t�<h=��
>_C�>s)">��ֽ��뼣�d��F>�_�>��@>Ђ�=w���:�p�T�=�M�=����ǒ�b��>��3=���;������y��K(�����=���=Z>n>:y����=f ��4�=hCټ��,>��6=�T��F��n���W�<!]�=����S,��r�՛G����t]�=��;h�<�������g->� �=a�=�����>��+>�~F;<����;�TS=�{��Gý3�=h ������'ʽ0ݲ<�O����@���p�D;>*� ��
b�3N>��iڽ�rO=�ϭ=��������;Tb=e���ӰK=���;O��:� >>��>!Ƞ<m�5�>��YĊ�� +=dl���u��8ͽ����«���h���=��ͽ����Hs�C_�4X>���<�S>�r��Rg���-k=���=�60>2�S�9�M=���������8%y>��6>���<��B��I��d��=1��(��Nq�=Tס<9����튾Fu ;��>���`��<�T�=ួ��?=�eA�՞��<��׽ڼ���5aV=W�:>�>J�u�ý��|����7�%�Y�$����;��w����=�(<�4J=õ���b��(=�^мc�����=�&罦�N�iϾ²}=�5�=�k���Y�>+>�K�<H$���t�bFs��� ���<Nޫ=W� =��=�W���h�=�h�> ��5�> �>t����5���K�=Y�=�|޽�r���q�=�&+>n0�<��r�9n����5���ҽTd�=�۾*��<���f� ��=F>���6��"��>&�_>dM>F����d>};���N+�ˍ����[�ԧC>ugx=fDd�C��%s$=�üfl>�h�=bU��o�>�D��<�Q���=�=B� >�=�O��$C����=�z���^˽��M��x�j>�uB��������u�4�t������'�g���>����Z���&%=�f>�ؠ>�Oq=�{�]�=�.���T��ڗ=pM�>!����^>�%=�&F=#���$n�[>�=l�[=�w�>������������ׅ> C���I>��>��>X�>>�-�<��>):=��3>?�>)��=ZV��ʽja�<
]�;dC�S�=��>���=ŲY>2�9��`x�h�>�*��X:[='f�=���Z�L=k�x������^=#����$=�)���_�!��I�>���;�� ��]>���<�9ɾʖ�<3���:�� �>���<�����&R�6eE�P0��~�>A/���&[=p!>��&>��=� ׽x�>���<� ��Ⱦi�(.�<������jX>���>k�����z>���<2�?>�8��F�|�I���{>�����=����"<�; 3E=V�^=�>���<U�=�}��LCB�S�r�_�=V��;�W��.��=6Dl��̈>�����ג��PL�N�u�d�+>�Ʊ<"�W�)�A�������_=��Ž������r�ҋ=�k�>s>� ?.��=�[��C�<�P=y� >Ɋ��I�N>�_��i�V��7>�퇾0EP�9����L�<���=I��=
�h�����l`�g��>��.=%ie�0S>$�=�Z���խ>�c�=%9��$o�=����� ����O;�Ow���=��>Nc�Z�$�D
#>`� >�m�����>_m� &�=��<"�x�i�>ijz���>��Q���f�V�����������}�@� I����>7u�=%͆�� �H>���Lx���~��-����U�_�q���>4��>\n��S{��C�<�>�i�ݰ6��BT>e3\<3+=�%�x�ӽ�,�;`�=E�=��l��
/����>R6�>��e���~�nM��<���.�=�G]=�gi�����a]�=���_��q�=&?R=��;=W<>��<���=��r=W�5=�_
>���=Od)�z� ���M>�*L���6���Qg�=gb^�HԽ�J�
�<� ��>����>�!!>�Cվ�tv��ہ=�j�=�ؚ�����,]f>�">�Aa���v=LU;=�����Q
>��ǽ�� >zyy�
x+�Rqd�/�w���i���<�&"�%�>��*�ԙ9>L��q�P>S`���w.�oR��S�>f�t=>>��Q>2��6?*>qٝ�58һ&_c��
���)ҽ��<���B���u�S�"��f>�Bh>%� =��g<� >�[��>���<�o�=��PO2��;ľ� >��w>����ؖ�/�u=DŽR�Cya=J�>�����(>�5m��v]><?m6�=^=�=4Ŧ�J\3=���=趽�� >u��>�s�����>�V��%ͧ�?ؓ>0Z >Aך<hf�]O>0���ƀ���>��;���Ž�� ���=��T����=x�1�aLS>Z�X�I�� �>W?�<�K�<�;`���;���>e��=KJ~�"#�==�@>(��@�����B>С@=�Ͽ=�ku� �x�赂> �<�؁=��{�w��q�C>�:�� �<��>�Y��_>��:��=%K��qVU>K(���G漅N��yF=9c�=��H>���_xA����9�g��>���=ˍ1<�"~<� �>�NԽ�X���`����R=y�սU㒾?�>���7�H]�̀�<����̍=ZU|=�"������>���>��7��^�c<t�-=��h>��=mJw;vQ�=�D>o9���U�>��f��u$�׻���e>��>�a,>�����������%���;���>F�F>�1�4��=�B�>sA � �O��4@�1M=�ͮ> �4��ca�Yق��<����>k�ǽ�������NL+>DܽD�H�Z�">ɾ<��/�q;ٚ)>Iu;>kO�=�jI����Uf�RVV=T���!���Y�>��< ���o=�$��E��=4U�=F��;��(��^/>��>=!��py=hѽ=�� ��_��.�5�ގ���&>���<h�<n�;<r_� |�=Gy:��J1���>��=CTh�S2ͽ�qX�F����S�����=DD�>Q�>.꒽5&��� ��6>,9ͼҚ���:��:���V�P>:����� ��VR>� ���ûE�������ϰH>"*=P���PI���9p����
����n۽X��C�������9u�=������=*
dtype0
dense_1/kernel/readIdentitydense_1/kernel*
T0*!
dense_1/kernel/readIdentitydense_1/kernel*!
loc:@dense_1/kernel
loc:@dense_1/kernel*
T0
transpose_a(*
T0
T0*
transpose_a(
dense_2/kernelConst*
dtype0*�
value�B�@"�gT�b��>2zq��b(>%�ν�6�����>��ͽ\��=x�~����>įҽ�\?$2�5�t��u'����=Z��f>�>�W�����>� 5��9��Q>����`=�����?�/=Q�>}>1mp���?����\=,��������3�����>��Q�q�z?Lྫ��i)��|ѣ� u[� �>R/I��h���ٻ�����T�>"���ܹ=4s >�o���pu>�:쾤��>-<h��ɫ>�����c�/�]>�t��k̝>��>�Y`=몧��>�� =`*<��o������d�>&�>n*}=U2�=i��R�H����>����b���a\>����i< ����?��<!"D��o�>�m����ŽVÉ����=��>0��>U1'=�Hk�M�a�Bb�>e��+l�(��>
�>�AžWKe����>-�"�*�=wl���܋>�}�>M ¾LK�
��>*St>%�c>��ü��'��ײ<��>`�4=���M�"g�>���?d�=_�A���=�־P�j>o񝾣@�> ����S+�W3���P��;}�>܀�<��� j4>�G}=b}���5?��Ѿ�G۽]H'�\:�I�9>�R>1����O-?����OC��5����>W�½�X!�� ��,�>�y��eT��L�f>i>�9��>�� >�am��&��wm����H=�f�>��F>�%�Cl� 8�>�N�=%���w�=��ļUXz>���=�(���n>f}>�p> Ǿ�ݽ��ƽrk�=�M�;/YG>��>j�Ҿ���>��#��>`�>�Y�h0��%�=��>T}����=��Y���C>" ̾A.�>� �͘>��3�|n>X���Z���R��c�/?���>�5��1�~��8P����=�(ʾ(�>���w�ɾD
�>�fE=��Ž��J�c^��Ia<��M?k�龰��>H�ʾ}Z�=�#��K����=g 2?0��8�/>2 >�˰<�þ��0�;M(>�:�>`'W>^N��V�H>h]���5���;��p>��Q�
dense_2/kernelConst*�
value�B�@"����;�<�<s㚻 �W=��;�O�</��<�{�<�nO;t b;�o <��:��O<�4�<�!��}�<ktȻ
�<"üM�1<�Դ;��.<X\�����;0Xf�z�[�� ü�Dx����;�즻j�_;U��:�~�;
}<�V��Q �<R�Ĺ�XK��L%<$���>��9l��<}ㅻE�*;-3ڼH�;-�N�,&�;��h;����{�8H�:2
��������S���r� ���i���g =�AR<�ֆ�/����䬼xX#=w�9��;�ض����81�€��EO<������� J9;t�|��TR���ݻ8������Իk���G�#��9���;>N#;"5�;xw�,�nr�;sH��3�����>R��f��>@?ǽ�o ���/;kcû���;��#�m���������k<}��*Fi���߻��h��o��K�z�ǻ�2f��==��мW� ��|B=�4?���<�*d�2��=��"��;It׼O���;h�v�#�t{�>��O>�@�;����h?�4`��m<��9�h�:�ᄻ����;p>Yg���n�w<>SZ
���a=;J�=z�X=,m�2�>cF�=M�!<�u������٧�<8r�;���<5�;��5�<֔����h�h-���/�9񌤺rk����=Q����1�qg-��Y��l�y��=�L�=я���<�Έ�3�<J񫼷��<�G�;Q����<�(^�[�X�~�k�>�|�=֢4����\�<󅭼���<��¼䎯<��_;�#b��:'= /O��j1=k�x;j@p<] ����;<�P<���;�(�^��;d��>�̼d���[~< �ܼ�=[;^MQ��Y7<k�� ��5��=��+�N� ��7>థ�ԓ�<�:�qF9O~ݻ���<�'���廻��ʼV B�������<%T>c��G�>9��=G��$�Y=���>�^k=�����9�g�����<M���܊l<� �;��<, ���`�<��?�O�=H�� p��_���4�NѻS���*
dtype0
[
dense_2/kernel/readIdentitydense_2/kernel*
T0*!

dense_3/MatMulMatMul dense_2/Eludense_2/kernel/read*
transpose_a(*
transpose_b(*
T0
0
action_probsSoftmaxdense_3/MatMul*
T0
M
#multinomial/Multinomial/num_samplesConst*

multinomial/Multinomial Multinomialdense_3/MatMul#multinomial/Multinomial/num_samples*
multinomial/Multinomial Multinomialdense_3/MatMul#multinomial/Multinomial/num_samples*
T0*
seed2*
seed*
seed2*
T0
seed
T0 
T0
�
dense_3/kernelConst*�
value�B�@"���2<i�K�sR��i:ߺɠ���Q���c�;�
}��|�����;mǘ;��<)H��Yq ��4�x�7��9���0�=k��;��<�*�9�σ����I��9)�>Rp:��E�r�=�Π�QS���V��!����Z! =0't��$��y6<U��;��9�ɒ�x�b�A�k=���d��,�>��;jf�=�]L��I;>��*�^��Vp<󰽺�����K��R$��!����e=��������Q�5�;�ǁ�*
dtype0
[
dense_3/kernel/readIdentitydense_3/kernel*
T0*!
_class
loc:@dense_3/kernel
i
dense_4/MatMulMatMul dense_2/Eludense_3/kernel/read*
T0*
transpose_a(*
transpose_b(
3
value_estimateIdentitydense_4/MatMul*
T0

16
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAcademy.cs


public class TennisAcademy : Academy
{
[Header("Specific to Tennis")]
public GameObject ball;
float ballOut = Random.Range(4f, 11f);
int flip = Random.Range(0, 2);
if (flip == 0)
{
ball.transform.position = new Vector3(-ballOut, 5f, 5f);
}
else
{
ball.transform.position = new Vector3(ballOut, 5f, 5f);
}
ball.GetComponent<Rigidbody>().velocity = new Vector3(0f, 0f, 0f);
ball.transform.localScale = new Vector3(1, 1, 1) * resetParameters["ballSize"];
}
}

34
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/hitWall.cs


public class hitWall : MonoBehaviour
{
public GameObject areaObject;
int lastAgentHit;
// Use this for initialization

}
// Update is called once per frame
void Update()
{
TennisAgent agentA = GameObject.Find("AgentA").GetComponent<TennisAgent>();
TennisAgent agentB = GameObject.Find("AgentB").GetComponent<TennisAgent>();
TennisAcademy academy = GameObject.Find("Academy").GetComponent<TennisAcademy>();
TennisArea area = areaObject.GetComponent<TennisArea>();
TennisAgent agentA = area.agentA.GetComponent<TennisAgent>();
TennisAgent agentB = area.agentB.GetComponent<TennisAgent>();
academy.done = true;
if (collision.gameObject.name == "wallA")
{
if (lastAgentHit == 0)

agentA.score += 1;
}
}
area.MatchReset();
agentA.done = true;
agentB.done = true;
}
if (collision.gameObject.tag == "agent")

if (lastAgentHit != 0)
{
agentA.reward = 0.1f;
agentB.reward = 0.05f;
agentA.reward += 0.1f;
agentB.reward += 0.05f;
}
else
{
agentA.reward += 0.01f;
}
lastAgentHit = 0;
}

{
agentB.reward = 0.1f;
agentA.reward = 0.05f;
agentB.reward += 0.1f;
agentA.reward += 0.05f;
}
else
{
agentB.reward += 0.01f;
}
lastAgentHit = 1;
}

62
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs


public override List<float> CollectState()
{
List<float> state = new List<float>();
state.Add(invertMult * gameObject.transform.position.x / 8f);
state.Add(gameObject.transform.position.y / 2f);
state.Add(invertMult * gameObject.GetComponent<Rigidbody>().velocity.x / 10f);
state.Add(gameObject.GetComponent<Rigidbody>().velocity.y / 10f);
state.Add(invertMult * gameObject.transform.position.x);
state.Add(gameObject.transform.position.y);
state.Add(invertMult * gameObject.GetComponent<Rigidbody>().velocity.x);
state.Add(gameObject.GetComponent<Rigidbody>().velocity.y);
state.Add(invertMult * ball.transform.position.x / 8f);
state.Add(ball.transform.position.y / 8f);
state.Add(invertMult * ball.GetComponent<Rigidbody>().velocity.x / 10f);
state.Add(ball.GetComponent<Rigidbody>().velocity.y / 10f);
state.Add(invertMult * ball.transform.position.x);
state.Add(ball.transform.position.y);
state.Add(invertMult * ball.GetComponent<Rigidbody>().velocity.x);
state.Add(ball.GetComponent<Rigidbody>().velocity.y);
return state;
}

int action = Mathf.FloorToInt(act[0]);
if (act[0] == 0f)
if (action == 0)
if (act[0] == 1f)
if (action == 1)
if (act[0] == 2f)
{
moveX = 0.0f;
}
if (act[0] == 3f)
if (action == 2 && gameObject.transform.position.y + transform.parent.transform.position.y < -1.5f)
gameObject.GetComponent<Rigidbody>().velocity = new Vector3(GetComponent<Rigidbody>().velocity.x, moveY * 12f, 0f);
if (gameObject.transform.position.y > -1.9f)
if (action == 3)
GetComponent<Rigidbody>().velocity = new Vector3(GetComponent<Rigidbody>().velocity.x * 0.5f, GetComponent<Rigidbody>().velocity.y, 0f);
}
else
{
gameObject.GetComponent<Rigidbody>().velocity = new Vector3(0f, moveY * 12f, 0f);
moveX = 0f;
gameObject.transform.position = new Vector3(gameObject.transform.position.x + moveX, gameObject.transform.position.y, 5f);
gameObject.GetComponent<Rigidbody>().velocity = new Vector3(moveX * 50f, GetComponent<Rigidbody>().velocity.y, 0f);
if (gameObject.transform.position.x > -(invertMult) * 11f)
if (gameObject.transform.position.x + transform.parent.transform.position.x < -(invertMult) * 1f)
gameObject.transform.position = new Vector3(-(invertMult) * 11f, gameObject.transform.position.y, 5f);
}
if (gameObject.transform.position.x < -(invertMult) * 2f)
{
gameObject.transform.position = new Vector3(-(invertMult) * 2f, gameObject.transform.position.y, 5f);
gameObject.transform.position = new Vector3(-(invertMult) * 1f + transform.parent.transform.position.x, gameObject.transform.position.y, gameObject.transform.position.z);
if (gameObject.transform.position.x < -(invertMult) * 11f)
if (gameObject.transform.position.x + transform.parent.transform.position.x > -(invertMult) * 1f)
gameObject.transform.position = new Vector3(-(invertMult) * 11f, gameObject.transform.position.y, 5f);
gameObject.transform.position = new Vector3(-(invertMult) * 1f + transform.parent.transform.position.x, gameObject.transform.position.y, gameObject.transform.position.z);
if (gameObject.transform.position.x > -(invertMult) * 2f)
{
gameObject.transform.position = new Vector3(-(invertMult) * 2f, gameObject.transform.position.y, 5f);
}
}
if (gameObject.transform.position.y < -2f)
{
gameObject.transform.position = new Vector3(gameObject.transform.position.x, -2f, 5f);
}
scoreText.GetComponent<Text>().text = score.ToString();

invertMult = 1f;
}
gameObject.transform.position = new Vector3(-(invertMult) * 7f, -1.5f, 5f);
gameObject.transform.position = new Vector3(-(invertMult) * 7f, -1.5f, 0f) + transform.parent.transform.position;
gameObject.GetComponent<Rigidbody>().velocity = new Vector3(0f, 0f, 0f);
}
}

929
unity-environment/Assets/ML-Agents/Examples/Tennis/Tennis.unity
文件差异内容过多而无法显示
查看文件

18
unity-environment/README.md


For more informoation on each of these environments, see this [documentation page](../docs/Example-Environments.md).
Within `ML-Agents/Template` there also exists:
* **Template** - An empty Unity scene with a single _Academy_, _Brain_, and _Agent_. Designed to be used as a template for new environments.
* **Template** - An empty Unity scene with a single _Academy_, _Brain_, and _Agent_. Designed to be used as a template for new environments.
## Agents SDK Package
A link to Unity package containing the Agents SDK for Unity 2017.1 can be downloaded here :
* [ML-Agents package without TensorflowSharp](https://s3.amazonaws.com/unity-agents/ML-AgentsNoPlugin.unitypackage)
* [ML-Agents package with TensorflowSharp](https://s3.amazonaws.com/unity-agents/ML-AgentsWithPlugin.unitypackage)
## Agents SDK
A link to Unity package containing the Agents SDK for Unity 2017.1 can be downloaded here :
* [ML-Agents package without TensorflowSharp](https://s3.amazonaws.com/unity-agents/0.2/ML-AgentsNoPlugin.unitypackage)
* [ML-Agents package with TensorflowSharp](https://s3.amazonaws.com/unity-agents/0.2/ML-AgentsWithPlugin.unitypackage)
For information on the use of each script, see the comments and documentation within the files themselves, or read the [documentation](../../../wiki).
For information on the use of each script, see the comments and documentation within the files themselves, or read the [documentation](../../../wiki).
## Creating your own Unity Environment
For information on how to create a new Unity Environment, see the walkthrough [here](../docs/Making-a-new-Unity-Environment.md). If you have questions or run into issues, please feel free to create issues through the repo, and we will do our best to address them.

1. Make sure you are using Unity 2017.1 or newer.
2. Make sure the TensorflowSharp plugin is in your Asset folder. A Plugins folder which includes TF# can be downloaded [here](https://s3.amazonaws.com/unity-agents/TFSharpPlugin.unitypackage).
2. Make sure the TensorflowSharp [plugin](https://s3.amazonaws.com/unity-agents/0.2/TFSharpPlugin.unitypackage) is in your Asset folder.
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
2. Select `Scripting Runtime Version` to `Experimental (.NET 4.6 Equivalent)`
2. Select `Scripting Runtime Version` to `Experimental (.NET 4.6 Equivalent)`
3. In `Scripting Defined Symbols`, add the flag `ENABLE_TENSORFLOW`
5. Restart the Unity Editor.

56
python/PPO.ipynb


"summary_freq = 10000 # Frequency at which to save training statistics.\n",
"save_freq = 50000 # Frequency at which to save model.\n",
"env_name = \"environment\" # Name of the training environment file.\n",
"curriculum_file = None\n",
"\n",
"### Algorithm-specific parameters for tuning\n",
"gamma = 0.99 # Reward discount rate.\n",

"num_epoch = 5 # Number of gradient descent steps per batch of experiences.\n",
"num_layers = 2 # Number of hidden layers between state/observation encoding and value/policy layers.\n",
"batch_size = 64 # How many experiences per gradient descent update step."
"batch_size = 64 # How many experiences per gradient descent update step.\n",
"normalize = False\n",
"\n",
"### Logging dictionary for hyperparameters\n",
"hyperparameter_dict = {'max_steps':max_steps, 'run_path':run_path, 'env_name':env_name,\n",
" 'curriculum_file':curriculum_file, 'gamma':gamma, 'lambd':lambd, 'time_horizon':time_horizon,\n",
" 'beta':beta, 'num_epoch':num_epoch, 'epsilon':epsilon, 'buffe_size':buffer_size,\n",
" 'leaning_rate':learning_rate, 'hidden_units':hidden_units, 'batch_size':batch_size}"
]
},
{

{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"env = UnityEnvironment(file_name=env_name)\n",
"env = UnityEnvironment(file_name=env_name, curriculum=curriculum_file)\n",
"brain_name = env.brain_names[0]"
"brain_name = env.external_brain_names[0]"
]
},
{

"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true,
"scrolled": true
},
"outputs": [],

"if curriculum_file == \"None\":\n",
" curriculum_file = None\n",
"\n",
"\n",
"def get_progress():\n",
" if curriculum_file is not None:\n",
" if env._curriculum.measure_type == \"progress\":\n",
" return steps / max_steps\n",
" elif env._curriculum.measure_type == \"reward\":\n",
" return last_reward\n",
" else:\n",
" return None\n",
" else:\n",
" return None\n",
"\n",
" beta=beta, max_step=max_steps)\n",
" beta=beta, max_step=max_steps, \n",
" normalize=normalize, num_layers=num_layers)\n",
"\n",
"is_continuous = (env.brains[brain_name].action_space_type == \"continuous\")\n",
"use_observations = (env.brains[brain_name].number_observations > 0)\n",

" saver.restore(sess, ckpt.model_checkpoint_path)\n",
" else:\n",
" sess.run(init)\n",
" steps = sess.run(ppo_model.global_step)\n",
" steps, last_reward = sess.run([ppo_model.global_step, ppo_model.last_reward]) \n",
" info = env.reset(train_mode=train_model)[brain_name]\n",
" trainer = Trainer(ppo_model, sess, info, is_continuous, use_observations, use_states)\n",
" info = env.reset(train_mode=train_model, progress=get_progress())[brain_name]\n",
" trainer = Trainer(ppo_model, sess, info, is_continuous, use_observations, use_states, train_model)\n",
" if train_model:\n",
" trainer.write_text(summary_writer, 'Hyperparameters', hyperparameter_dict, steps)\n",
" info = env.reset(train_mode=train_model)[brain_name]\n",
" info = env.reset(train_mode=train_model, progress=get_progress())[brain_name]\n",
" new_info = trainer.take_action(info, env, brain_name)\n",
" new_info = trainer.take_action(info, env, brain_name, steps, normalize)\n",
" info = new_info\n",
" trainer.process_experiences(info, time_horizon, gamma, lambd)\n",
" if len(trainer.training_buffer['actions']) > buffer_size and train_model:\n",

" # Write training statistics to tensorboard.\n",
" trainer.write_summary(summary_writer, steps)\n",
" trainer.write_summary(summary_writer, steps, env._curriculum.lesson_number)\n",
" if len(trainer.stats['cumulative_reward']) > 0:\n",
" mean_reward = np.mean(trainer.stats['cumulative_reward'])\n",
" sess.run(ppo_model.update_reward, feed_dict={ppo_model.new_reward: mean_reward})\n",
" last_reward = sess.run(ppo_model.last_reward)\n",
" # Final save Tensorflow model\n",
" if steps != 0 and train_model:\n",
" save_model(sess, model_path=model_path, steps=steps, saver=saver)\n",

2
python/setup.py


required = f.read().splitlines()
setup(name='unityagents',
version='0.1.1',
version='0.2.0',
description='Unity Machine Learning Agents',
license='Apache License 2.0',
author='Unity Technologies',

200
python/test_unityagents.py


import pytest
import socket
import mock
import struct
import json
from unityagents import UnityEnvironment, UnityEnvironmentException, UnityActionException, BrainInfo, BrainParameters
from unityagents import UnityEnvironment, UnityEnvironmentException, UnityActionException, BrainInfo, BrainParameters, Curriculum
def append_length(input):
return struct.pack("I", len(input.encode())) + input.encode()
"externalBrainNames": ["RealFakeBrain"],
"logPath":"RealFakePath",
"apiNumber":"API-2",
"brainParameters": [{
"stateSize": 3,
"actionSize": 2,

dummy_reset = [
'CONFIG_REQUEST'.encode(),
append_length(
'''
{
"brain_name": "RealFakeBrain",

"actions": null,
"actions": [1,2,3,4],
}'''.encode(),
}'''),
'''
append_length('''
"actions": null,
"actions": [1,2,3,4,5,6],
}'''.encode(),
}'''),
'''
append_length('''
"actions": null,
"actions": [1,2,3,4,5,6],
}'''.encode(),
}'''),
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
with pytest.raises(UnityActionException):
env.step([0])
assert env.brain_names[0] == 'RealFakeBrain'
env.close()
with mock.patch('glob.glob') as mock_glob:
mock_glob.return_value = ['FakeLaunchPath']
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
with pytest.raises(UnityActionException):
env.step([0])
assert env.brain_names[0] == 'RealFakeBrain'
env.close()
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
brain = env.brains['RealFakeBrain']
mock_socket.recv.side_effect = dummy_reset
brain_info = env.reset()
env.close()
assert not env.global_done
assert isinstance(brain_info, dict)
assert isinstance(brain_info['RealFakeBrain'], BrainInfo)
assert isinstance(brain_info['RealFakeBrain'].observations, list)
assert isinstance(brain_info['RealFakeBrain'].states, np.ndarray)
assert len(brain_info['RealFakeBrain'].observations) == brain.number_observations
assert brain_info['RealFakeBrain'].states.shape[0] == len(brain_info['RealFakeBrain'].agents)
assert brain_info['RealFakeBrain'].states.shape[1] == brain.state_space_size
with mock.patch('glob.glob') as mock_glob:
mock_glob.return_value = ['FakeLaunchPath']
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
brain = env.brains['RealFakeBrain']
mock_socket.recv.side_effect = dummy_reset
brain_info = env.reset()
env.close()
assert not env.global_done
assert isinstance(brain_info, dict)
assert isinstance(brain_info['RealFakeBrain'], BrainInfo)
assert isinstance(brain_info['RealFakeBrain'].observations, list)
assert isinstance(brain_info['RealFakeBrain'].states, np.ndarray)
assert len(brain_info['RealFakeBrain'].observations) == brain.number_observations
assert brain_info['RealFakeBrain'].states.shape[0] == len(brain_info['RealFakeBrain'].agents)
assert brain_info['RealFakeBrain'].states.shape[1] == brain.state_space_size
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
brain = env.brains['RealFakeBrain']
mock_socket.recv.side_effect = dummy_reset
brain_info = env.reset()
mock_socket.recv.side_effect = dummy_step
brain_info = env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
with pytest.raises(UnityActionException):
env.step([0])
brain_info = env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
with pytest.raises(UnityActionException):
env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
env.close()
assert env.global_done
assert isinstance(brain_info, dict)
assert isinstance(brain_info['RealFakeBrain'], BrainInfo)
assert isinstance(brain_info['RealFakeBrain'].observations, list)
assert isinstance(brain_info['RealFakeBrain'].states, np.ndarray)
assert len(brain_info['RealFakeBrain'].observations) == brain.number_observations
assert brain_info['RealFakeBrain'].states.shape[0] == len(brain_info['RealFakeBrain'].agents)
assert brain_info['RealFakeBrain'].states.shape[1] == brain.state_space_size
assert not brain_info['RealFakeBrain'].local_done[0]
assert brain_info['RealFakeBrain'].local_done[2]
with mock.patch('glob.glob') as mock_glob:
mock_glob.return_value = ['FakeLaunchPath']
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
brain = env.brains['RealFakeBrain']
mock_socket.recv.side_effect = dummy_reset
brain_info = env.reset()
mock_socket.recv.side_effect = dummy_step
brain_info = env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
with pytest.raises(UnityActionException):
env.step([0])
brain_info = env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
with pytest.raises(UnityActionException):
env.step([0] * brain.action_space_size * len(brain_info['RealFakeBrain'].agents))
env.close()
assert env.global_done
assert isinstance(brain_info, dict)
assert isinstance(brain_info['RealFakeBrain'], BrainInfo)
assert isinstance(brain_info['RealFakeBrain'].observations, list)
assert isinstance(brain_info['RealFakeBrain'].states, np.ndarray)
assert len(brain_info['RealFakeBrain'].observations) == brain.number_observations
assert brain_info['RealFakeBrain'].states.shape[0] == len(brain_info['RealFakeBrain'].agents)
assert brain_info['RealFakeBrain'].states.shape[1] == brain.state_space_size
assert not brain_info['RealFakeBrain'].local_done[0]
assert brain_info['RealFakeBrain'].local_done[2]

with mock.patch('socket.socket') as mock_socket:
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
assert env._loaded
env.close()
assert not env._loaded
mock_socket.close.assert_called_once()
with mock.patch('glob.glob') as mock_glob:
mock_glob.return_value = ['FakeLaunchPath']
mock_socket.return_value.accept.return_value = (mock_socket, 0)
mock_socket.recv.return_value.decode.return_value = dummy_start
env = UnityEnvironment(' ')
assert env._loaded
env.close()
assert not env._loaded
mock_socket.close.assert_called_once()
dummy_curriculum= json.loads('''{
"measure" : "reward",
"thresholds" : [10, 20, 50],
"min_lesson_length" : 3,
"signal_smoothing" : true,
"parameters" :
{
"param1" : [0.7, 0.5, 0.3, 0.1],
"param2" : [100, 50, 20, 15],
"param3" : [0.2, 0.3, 0.7, 0.9]
}
}''')
bad_curriculum= json.loads('''{
"measure" : "reward",
"thresholds" : [10, 20, 50],
"min_lesson_length" : 3,
"signal_smoothing" : false,
"parameters" :
{
"param1" : [0.7, 0.5, 0.3, 0.1],
"param2" : [100, 50, 20],
"param3" : [0.2, 0.3, 0.7, 0.9]
}
}''')
def test_curriculum():
open_name = '%s.open' % __name__
with mock.patch('json.load') as mock_load:
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = 0
mock_load.return_value = bad_curriculum
with pytest.raises(UnityEnvironmentException):
curriculum = Curriculum('test_unityagents.py', {"param1":1,"param2":1,"param3":1})
mock_load.return_value = dummy_curriculum
with pytest.raises(UnityEnvironmentException):
curriculum = Curriculum('test_unityagents.py', {"param1":1,"param2":1})
curriculum = Curriculum('test_unityagents.py', {"param1":1,"param2":1,"param3":1})
assert curriculum.get_lesson_number() == 0
curriculum.set_lesson_number(1)
assert curriculum.get_lesson_number() == 1
curriculum.get_lesson(10)
assert curriculum.get_lesson_number() == 1
curriculum.get_lesson(30)
curriculum.get_lesson(30)
assert curriculum.get_lesson_number() == 1
assert curriculum.lesson_length == 3
assert curriculum.get_lesson(30) == {'param1': 0.3, 'param2': 20, 'param3': 0.7}
assert curriculum.lesson_length == 0
assert curriculum.get_lesson_number() == 2

89
python/ppo.py


Options:
--help Show this message.
--max-steps=<n> Maximum number of steps to run environment [default: 1e6].
--batch-size=<n> How many experiences per gradient descent update step [default: 64].
--beta=<n> Strength of entropy regularization [default: 2.5e-3].
--buffer-size=<n> How large the experience buffer should be before gradient descent [default: 2048].
--curriculum=<file> Curriculum json file for environment [default: None].
--epsilon=<n> Acceptable threshold around ratio of old and new policy probabilities [default: 0.2].
--gamma=<n> Reward discount rate [default: 0.99].
--hidden-units=<n> Number of units in hidden layer [default: 64].
--keep-checkpoints=<n> How many model checkpoints to keep [default: 5].
--lambd=<n> Lambda parameter for GAE [default: 0.95].
--learning-rate=<rate> Model learning rate [default: 3e-4].
--load Whether to load the model or randomly initialize [default: False].
--max-steps=<n> Maximum number of steps to run environment [default: 1e6].
--normalize Whether to normalize the state input using running statistics [default: False].
--num-epoch=<n> Number of gradient descent steps per batch of experiences [default: 5].
--num-layers=<n> Number of hidden layers between state/observation and outputs [default: 2].
--load Whether to load the model or randomly initialize [default: False].
--train Whether to train model, or only run inference [default: True].
--save-freq=<n> Frequency at which to save model [default: 50000].
--save-freq=<n> Frequency at which to save model [default: 50000].
--gamma=<n> Reward discount rate [default: 0.99].
--lambd=<n> Lambda parameter for GAE [default: 0.95].
--beta=<n> Strength of entropy regularization [default: 1e-3].
--num-epoch=<n> Number of gradient descent steps per batch of experiences [default: 5].
--epsilon=<n> Acceptable threshold around ratio of old and new policy probabilities [default: 0.2].
--buffer-size=<n> How large the experience buffer should be before gradient descent [default: 2048].
--learning-rate=<rate> Model learning rate [default: 3e-4].
--hidden-units=<n> Number of units in hidden layer [default: 64].
--batch-size=<n> How many experiences per gradient descent update step [default: 64].
--keep-checkpoints=<n> How many model checkpoints to keep [default: 5].
--worker-id=<n> Number to add to communication port (5005). Used for asynchronous agent scenarios [default: 0].
--train Whether to train model, or only run inference [default: False].
--worker-id=<n> Number to add to communication port (5005). Used for multi-environment [default: 0].
'''
options = docopt(_USAGE)

env_name = options['<env>']
keep_checkpoints = int(options['--keep-checkpoints'])
worker_id = int(options['--worker-id'])
curriculum_file = str(options['--curriculum'])
if curriculum_file == "None":
curriculum_file = None
# Algorithm-specific parameters for tuning
gamma = float(options['--gamma'])

num_epoch = int(options['--num-epoch'])
num_layers = int(options['--num-layers'])
normalize = options['--normalize']
env = UnityEnvironment(file_name=env_name, worker_id=worker_id)
env = UnityEnvironment(file_name=env_name, worker_id=worker_id, curriculum=curriculum_file)
brain_name = env.brain_names[0]
brain_name = env.external_brain_names[0]
tf.reset_default_graph()

beta=beta, max_step=max_steps)
beta=beta, max_step=max_steps,
normalize=normalize, num_layers=num_layers)
is_continuous = (env.brains[brain_name].action_space_type == "continuous")
use_observations = (env.brains[brain_name].number_observations > 0)

init = tf.global_variables_initializer()
saver = tf.train.Saver(max_to_keep=keep_checkpoints)
def get_progress():
if curriculum_file is not None:
if env._curriculum.measure_type == "progress":
return steps / max_steps
elif env._curriculum.measure_type == "reward":
return last_reward
else:
return None
else:
return None
if ckpt == None:
print('The model {0} could not be found. Make sure you specified the right '
'--run-path'.format(model_path))
steps = sess.run(ppo_model.global_step)
steps, last_reward = sess.run([ppo_model.global_step, ppo_model.last_reward])
info = env.reset(train_mode=train_model)[brain_name]
trainer = Trainer(ppo_model, sess, info, is_continuous, use_observations, use_states)
info = env.reset(train_mode=train_model, progress=get_progress())[brain_name]
trainer = Trainer(ppo_model, sess, info, is_continuous, use_observations, use_states, train_model)
if train_model:
trainer.write_text(summary_writer, 'Hyperparameters', options, steps)
info = env.reset(train_mode=train_model)[brain_name]
info = env.reset(train_mode=train_model, progress=get_progress())[brain_name]
trainer.reset_buffers(info, total=True)
new_info = trainer.take_action(info, env, brain_name)
new_info = trainer.take_action(info, env, brain_name, steps, normalize)
info = new_info
trainer.process_experiences(info, time_horizon, gamma, lambd)
if len(trainer.training_buffer['actions']) > buffer_size and train_model:

# Write training statistics to tensorboard.
trainer.write_summary(summary_writer, steps)
trainer.write_summary(summary_writer, steps, env._curriculum.lesson_number)
steps += 1
sess.run(ppo_model.increment_step)
if train_model:
steps += 1
sess.run(ppo_model.increment_step)
if len(trainer.stats['cumulative_reward']) > 0:
mean_reward = np.mean(trainer.stats['cumulative_reward'])
sess.run(ppo_model.update_reward, feed_dict={ppo_model.new_reward: mean_reward})
last_reward = sess.run(ppo_model.last_reward)
export_graph(model_path, env_name)
graph_name = (env_name.strip()
.replace('.app', '').replace('.exe', '').replace('.x86_64', '').replace('.x86', ''))
graph_name = os.path.basename(os.path.normpath(graph_name))
export_graph(model_path, graph_name)

1
python/unityagents/__init__.py


from .environment import *
from .brain import *
from .exception import *
from .curriculum import *

3
python/unityagents/brain.py


class BrainInfo:
def __init__(self, observation, state, memory=None, reward=None, agents=None, local_done=None):
def __init__(self, observation, state, memory=None, reward=None, agents=None, local_done=None, action =None):
"""
Describes experience at current step of all agents linked to a brain.
"""

self.rewards = reward
self.local_done = local_done
self.agents = agents
self.previous_actions = action
class BrainParameters:

31
python/unityagents/exception.py


import logging
logger = logging.getLogger("unityagents")
class UnityEnvironmentException(Exception):
"""
Related to errors starting and closing environment.

Related to errors with sending actions.
"""
pass
class UnityTimeOutException(Exception):
"""
Related to errors with communication timeouts.
"""
def __init__(self, message, log_file_path = None):
if log_file_path is not None:
try:
with open(log_file_path, "r") as f:
printing = False
unity_error = '\n'
for l in f:
l=l.strip()
if (l == 'Exception') or (l=='Error'):
printing = True
unity_error += '----------------------\n'
if (l == ''):
printing = False
if printing:
unity_error += l + '\n'
logger.info(unity_error)
logger.error("An error might have occured in the environment. "
"You can check the logfile for more information at {}".format(log_file_path))
except:
logger.error("An error might have occured in the environment. "
"No unity-environment.log file could be found.")
super(UnityTimeOutException, self).__init__(message)

247
python/unityagents/environment.py


import os
import socket
import subprocess
import struct
from .exception import UnityEnvironmentException, UnityActionException
from .exception import UnityEnvironmentException, UnityActionException, UnityTimeOutException
from .curriculum import Curriculum
logger = logging.getLogger(__name__)
logger = logging.getLogger("unityagents")
base_port=5005):
base_port=5005, curriculum=None):
"""
Starts a new unity environment and establishes a connection with the environment.
Notice: Currently communication between Unity and Python takes place over an open socket without authentication.

atexit.register(self.close)
self.port = base_port + worker_id
self._buffer_size = 120000
self._buffer_size = 12000
self._python_api = "API-2"
self._loaded = False
self._open_socket = False

"or use a different worker number.".format(str(worker_id)))
cwd = os.getcwd()
try:
true_filename = os.path.basename(os.path.normpath(file_name))
launch_string = ""
if platform == "linux" or platform == "linux2":
candidates = glob.glob(os.path.join(cwd, file_name) + '.x86_64')
if len(candidates) == 0:
candidates = glob.glob(os.path.join(cwd, file_name) + '.x86')
if len(candidates) > 0:
launch_string = candidates[0]
else:
raise UnityEnvironmentException("Couldn't launch new environment. Provided filename "
"does not match any environments in {}. ".format(cwd))
elif platform == 'darwin':
launch_string = os.path.join(cwd, file_name + '.app', 'Contents', 'MacOS', true_filename)
elif platform == 'win32':
launch_string = os.path.join(cwd, file_name + '.exe')
file_name = (file_name.strip()
.replace('.app', '').replace('.exe', '').replace('.x86_64', '').replace('.x86', ''))
true_filename = os.path.basename(os.path.normpath(file_name))
launch_string = None
if platform == "linux" or platform == "linux2":
candidates = glob.glob(os.path.join(cwd, file_name) + '.x86_64')
if len(candidates) == 0:
candidates = glob.glob(os.path.join(cwd, file_name) + '.x86')
if len(candidates) == 0:
candidates = glob.glob(file_name + '.x86_64')
if len(candidates) == 0:
candidates = glob.glob(file_name + '.x86')
if len(candidates) > 0:
launch_string = candidates[0]
elif platform == 'darwin':
candidates = glob.glob(os.path.join(cwd, file_name + '.app', 'Contents', 'MacOS', true_filename))
if len(candidates) == 0:
candidates = glob.glob(os.path.join(file_name + '.app', 'Contents', 'MacOS', true_filename))
if len(candidates) > 0:
launch_string = candidates[0]
elif platform == 'win32':
candidates = glob.glob(os.path.join(cwd, file_name + '.exe'))
if len(candidates) == 0:
candidates = glob.glob(file_name + '.exe')
if len(candidates) > 0:
launch_string = candidates[0]
if launch_string is None:
self.close()
raise UnityEnvironmentException("Couldn't launch the {0} environment. "
"Provided filename does not match any environments."
.format(true_filename))
else:
except os.error:
self.close()
raise UnityEnvironmentException("Couldn't launch new environment. "
"Provided filename does not match any \environments in {}."
.format(cwd))
self._socket.settimeout(30)
self._socket.settimeout(30)
self._conn.setblocking(1)
self._conn.settimeout(30)
raise UnityTimeOutException(
"The Unity environment took too long to respond. Make sure {} does not need user interaction to "
"launch and that the Academy and the external Brain(s) are attached to objects in the Scene."
.format(str(file_name)))
if "apiNumber" not in p:
self._unity_api = "API-1"
else:
self._unity_api = p["apiNumber"]
if self._unity_api != self._python_api:
"The Unity environment took too long to respond. Make sure {} does not need user interaction to launch "
"and that the Academy and the external Brain(s) are attached to objects in the Scene.".format(
str(file_name)))
"The API number is not compatible between Unity and python. Python API : {0}, Unity API : "
"{1}.\nPlease go to https://github.com/Unity-Technologies/ml-agents to download the latest version "
"of ML-Agents.".format(self._python_api, self._unity_api))
self._data = {}
self._global_done = None
self._academy_name = p["AcademyName"]
self._log_path = p["logPath"]
self._brains = {}
self._brain_names = p["brainNames"]
self._external_brain_names = p["externalBrainNames"]
self._external_brain_names = [] if self._external_brain_names is None else self._external_brain_names
self._num_brains = len(self._brain_names)
self._num_external_brains = len(self._external_brain_names)
self._resetParameters = p["resetParameters"]
self._curriculum = Curriculum(curriculum, self._resetParameters)
for i in range(self._num_brains):
self._brains[self._brain_names[i]] = BrainParameters(self._brain_names[i], p["brainParameters"][i])
self._loaded = True
logger.info("\n'{}' started successfully!".format(self._academy_name))
if (self._num_external_brains == 0):
logger.warning(" No External Brains found in the Unity Environment. "
"You will not be able to pass actions to your agent(s).")
self._data = {}
self._global_done = None
self._academy_name = p["AcademyName"]
self._num_brains = len(p["brainParameters"])
self._brains = {}
self._brain_names = p["brainNames"]
self._resetParameters = p["resetParameters"]
for i in range(self._num_brains):
self._brains[self._brain_names[i]] = BrainParameters(self._brain_names[i], p["brainParameters"][i])
self._conn.send(b".")
self._loaded = True
logger.info("\n'{}' started successfully!".format(self._academy_name))
@property
def logfile_path(self):
return self._log_path
@property
def brains(self):

@property
def number_brains(self):
return self._num_brains
@property
def number_external_brains(self):
return self._num_external_brains
@property
def external_brain_names(self):
return self._external_brain_names
@staticmethod
def _process_pixels(image_bytes=None, bw=False):
"""

"\n\t\t".join([str(k) + " -> " + str(self._resetParameters[k])
for k in self._resetParameters])) + '\n' + \
'\n'.join([str(self._brains[b]) for b in self._brains])
def _recv_bytes(self):
try:
s = self._conn.recv(self._buffer_size)
message_length = struct.unpack("I", bytearray(s[:4]))[0]
s = s[4:]
while len(s) != message_length:
s += self._conn.recv(self._buffer_size)
except socket.timeout as e:
raise UnityTimeOutException("The environment took too long to respond.", self._log_path)
return s
def _get_state_image(self, bw):
"""

"""
s = self._conn.recv(self._buffer_size)
s = self._recv_bytes()
s = self._process_pixels(image_bytes=s, bw=bw)
self._conn.send(b"RECEIVED")
return s

Receives dictionary of state information from socket, and confirms.
:return:
"""
state = self._conn.recv(self._buffer_size).decode('utf-8')
state = self._recv_bytes().decode('utf-8')
def reset(self, train_mode=True, config=None):
def reset(self, train_mode=True, config=None, progress=None):
config = config or {}
old_lesson = self._curriculum.get_lesson_number()
if config is None:
config = self._curriculum.get_lesson(progress)
if old_lesson != self._curriculum.get_lesson_number():
logger.info("\nLesson changed. Now in Lesson {0} : \t{1}"
.format(self._curriculum.get_lesson_number(),
', '.join([str(x) + ' -> ' + str(config[x]) for x in config])))
elif config != {}:
logger.info("\nAcademy Reset with parameters : \t{0}"
.format(', '.join([str(x) + ' -> ' + str(config[x]) for x in config])))
for k in config:
if (k in self._resetParameters) and (isinstance(config[k], (int, float))):
self._resetParameters[k] = config[k]
elif not isinstance(config[k], (int, float)):
raise UnityEnvironmentException(
"The value for parameter '{0}'' must be an Integer or a Float.".format(k))
else:
raise UnityEnvironmentException("The parameter '{0}' is not a valid parameter.".format(k))
self._conn.recv(self._buffer_size)
try:
self._conn.recv(self._buffer_size)
except socket.timeout as e:
raise UnityTimeOutException("The environment took too long to respond.", self._log_path)
for k in config:
if (k in self._resetParameters) and (isinstance(config[k], (int, float))):
self._resetParameters[k] = config[k]
elif not isinstance(config[k], (int, float)):
raise UnityEnvironmentException(
"The value for parameter '{0}'' must be an Integer or a Float.".format(k))
else:
raise UnityEnvironmentException("The parameter '{0}' is not a valid parameter.".format(k))
return self._get_state()
else:
raise UnityEnvironmentException("No Unity environment is loaded.")

raise UnityActionException("Brain {0} has an invalid state. "
"Expecting {1} {2} state but received {3}."
.format(b, n_agent if self._brains[b].state_space_type == "discrete"
else str(self._brains[b].state_space_size * n_agent),
self._brains[b].state_space_type,
len(state_dict["states"])))
else str(self._brains[b].state_space_size * n_agent),
self._brains[b].state_space_type,
len(state_dict["states"])))
# actions = state_dict["actions"]
if n_agent > 0:
actions = np.array(state_dict["actions"]).reshape((n_agent, -1))
else:
actions = np.array([])
observations = []
for o in range(self._brains[b].number_observations):

observations.append(np.array(obs_n))
self._data[b] = BrainInfo(observations, states, memories, rewards, agents, dones)
self._data[b] = BrainInfo(observations, states, memories, rewards, agents, dones, actions)
self._global_done = self._conn.recv(self._buffer_size).decode('utf-8') == 'True'
try:
self._global_done = self._conn.recv(self._buffer_size).decode('utf-8') == 'True'
except socket.timeout as e:
raise UnityTimeOutException("The environment took too long to respond.", self._log_path)
return self._data

:param memory: a dictionary of lists of of memories.
:param value: a dictionary of lists of of value estimates.
"""
self._conn.recv(self._buffer_size)
try:
self._conn.recv(self._buffer_size)
except socket.timeout as e:
raise UnityTimeOutException("The environment took too long to respond.", self._log_path)
action_message = {"action": action, "memory": memory, "value": value}
self._conn.send(json.dumps(action_message).encode('utf-8'))

arr = [float(x) for x in arr]
return arr
def step(self, action, memory=None, value=None):
def step(self, action=None, memory=None, value=None):
"""
Provides the environment with an action, moves the environment dynamics forward accordingly, and returns
observation, state, and reward information to the agent.

:return: A Data structure corresponding to the new state of the environment.
"""
action = {} if action is None else action
if self._num_brains > 1:
if self._num_external_brains == 1:
action = {self._external_brain_names[0]: action}
elif self._num_external_brains > 1:
action = {self._brain_names[0]: action}
raise UnityActionException(
"There are no external brains in the environment, "
"step cannot take an action input")
if self._num_brains > 1:
if self._num_external_brains == 1:
memory = {self._external_brain_names[0]: memory}
elif self._num_external_brains > 1:
memory = {self._brain_names[0]: memory}
raise UnityActionException(
"There are no external brains in the environment, "
"step cannot take a memory input")
if self._num_brains > 1:
if self._num_external_brains == 1:
value = {self._external_brain_names[0]: value}
elif self._num_external_brains > 1:
value = {self._brain_names[0]: value}
raise UnityActionException(
"There are no external brains in the environment, "
"step cannot take a value input")
for b in self._brain_names:
for brain_name in list(action.keys()) + list(memory.keys()) + list(value.keys()):
if brain_name not in self._external_brain_names:
raise UnityActionException(
"The name {0} does not correspond to an external brain "
"in the environment".format(brain_name))
for b in self._external_brain_names:
n_agent = len(self._data[b].agents)
if b not in action:
raise UnityActionException("You need to input an action for the brain {0}".format(b))

raise UnityActionException(
"There was a mismatch between the provided memory and environment's expectation: "
"The brain {0} expected {1} memories but was given {2}"
.format(b, self._brains[b].memory_space_size * n_agent, len(memory[b])))
.format(b, self._brains[b].memory_space_size * n_agent, len(memory[b])))
if not ((self._brains[b].action_space_type == "discrete" and len(action[b]) == n_agent) or
(self._brains[b].action_space_type == "continuous" and len(
action[b]) == self._brains[b].action_space_size * n_agent)):

.format(b, n_agent if self._brains[b].action_space_type == "discrete" else
.format(b, n_agent if self._brains[b].action_space_type == "discrete" else
str(action[b])))
str(action[b])))
self._conn.send(b"STEP")
self._send_action(action, memory, value)
return self._get_state()

self._socket.close()
self._loaded = False
else:
raise UnityEnvironmentException("No Unity environment is loaded.")
raise UnityEnvironmentException("No Unity environment is loaded.")

85
python/ppo/trainer.py


class Trainer(object):
def __init__(self, ppo_model, sess, info, is_continuous, use_observations, use_states):
def __init__(self, ppo_model, sess, info, is_continuous, use_observations, use_states, training):
Responsible for collecting experinces and training PPO model.
Responsible for collecting experiences and training PPO model.
:param ppo_model: Tensorflow graph defining model.
:param sess: Tensorflow session.
:param info: Environment BrainInfo object.

stats = {'cumulative_reward': [], 'episode_length': [], 'value_estimate': [],
'entropy': [], 'value_loss': [], 'policy_loss': [], 'learning_rate': []}
self.stats = stats
self.is_training = training
self.reset_buffers(info, total=True)
self.history_dict = empty_all_history(info)
def take_action(self, info, env, brain_name):
def running_average(self, data, steps, running_mean, running_variance):
"""
Computes new running mean and variances.
:param data: New piece of data.
:param steps: Total number of data so far.
:param running_mean: TF op corresponding to stored running mean.
:param running_variance: TF op corresponding to stored running variance.
:return: New mean and variance values.
"""
mean, var = self.sess.run([running_mean, running_variance])
current_x = np.mean(data, axis=0)
new_mean = mean + (current_x - mean) / (steps + 1)
new_variance = var + (current_x - new_mean) * (current_x - mean)
return new_mean, new_variance
def take_action(self, info, env, brain_name, steps, normalize):
"""
Decides actions given state/observation information, and takes them in environment.
:param info: Current BrainInfo from environment.

"""
epsi = None
feed_dict = {self.model.batch_size: len(info.states)}
run_list = [self.model.output, self.model.probs, self.model.value, self.model.entropy,
self.model.learning_rate]
if self.is_continuous:
epsi = np.random.randn(len(info.states), env.brains[brain_name].action_space_size)
feed_dict[self.model.epsilon] = epsi

feed_dict[self.model.state_in] = info.states
actions, a_dist, value, ent, learn_rate = self.sess.run([self.model.output, self.model.probs,
self.model.value, self.model.entropy,
self.model.learning_rate],
feed_dict=feed_dict)
if self.is_training and env.brains[brain_name].state_space_type == "continuous" and self.use_states and normalize:
new_mean, new_variance = self.running_average(info.states, steps, self.model.running_mean,
self.model.running_variance)
feed_dict[self.model.new_mean] = new_mean
feed_dict[self.model.new_variance] = new_variance
run_list = run_list + [self.model.update_mean, self.model.update_variance]
actions, a_dist, value, ent, learn_rate, _, _ = self.sess.run(run_list, feed_dict=feed_dict)
else:
actions, a_dist, value, ent, learn_rate = self.sess.run(run_list, feed_dict=feed_dict)
self.stats['value_estimate'].append(value)
self.stats['entropy'].append(ent)
self.stats['learning_rate'].append(learn_rate)

history['cumulative_reward'] = 0
history['episode_steps'] = 0
def reset_buffers(self, brain_info=None, total=False):
"""
Resets either all training buffers or local training buffers
:param brain_info: The BrainInfo object containing agent ids.
:param total: Whether to completely clear buffer.
"""
if not total:
for key in self.history_dict:
self.history_dict[key] = empty_local_history(self.history_dict[key])
else:
self.history_dict = empty_all_history(agent_info=brain_info)
def update_model(self, batch_size, num_epoch):
"""
Uses training_buffer to update model.

self.stats['value_loss'].append(total_v)
self.stats['policy_loss'].append(total_p)
self.training_buffer = vectorize_history(empty_local_history({}))
for key in self.history_dict:
self.history_dict[key] = empty_local_history(self.history_dict[key])
def write_summary(self, summary_writer, steps):
def write_summary(self, summary_writer, steps, lesson_number):
print("Mean Reward: {0}".format(np.mean(self.stats['cumulative_reward'])))
if len(self.stats['cumulative_reward']) > 0:
mean_reward = np.mean(self.stats['cumulative_reward'])
print("Step: {0}. Mean Reward: {1}. Std of Reward: {2}."
.format(steps, mean_reward, np.std(self.stats['cumulative_reward'])))
summary = tf.Summary()
for key in self.stats:
if len(self.stats[key]) > 0:

summary.value.add(tag='Info/Lesson', simple_value=lesson_number)
def write_text(self, summary_writer, key, input_dict, steps):
"""
Saves text to Tensorboard.
Note: Only works on tensorflow r1.2 or above.
:param summary_writer: writer associated with Tensorflow session.
:param key: The name of the text.
:param input_dict: A dictionary that will be displayed in a table on Tensorboard.
:param steps: Number of environment steps in training process.
"""
try:
s_op = tf.summary.text(key,
tf.convert_to_tensor(([[str(x), str(input_dict[x])] for x in input_dict]))
)
s = self.sess.run(s_op)
summary_writer.add_summary(s, steps)
except:
print("Cannot write text summary for Tensorboard. Tensorflow version must be r1.2 or above.")
pass

134
python/ppo/models.py


from unityagents import UnityEnvironmentException
def create_agent_model(env, lr=1e-4, h_size=128, epsilon=0.2, beta=1e-3, max_step=5e6):
def create_agent_model(env, lr=1e-4, h_size=128, epsilon=0.2, beta=1e-3, max_step=5e6, normalize=False, num_layers=2):
"""
Takes a Unity environment and model-specific hyper-parameters and returns the
appropriate PPO agent model for the environment.

:return: a sub-class of PPOAgent tailored to the environment.
:param max_step: Total number of training steps.
"""
if num_layers < 1: num_layers = 1
return ContinuousControlModel(lr, brain, h_size, epsilon, max_step)
return ContinuousControlModel(lr, brain, h_size, epsilon, max_step, normalize, num_layers)
return DiscreteControlModel(lr, brain, h_size, epsilon, beta, max_step)
return DiscreteControlModel(lr, brain, h_size, epsilon, beta, max_step, normalize, num_layers)
def save_model(sess, saver, model_path="./", steps=0):

print("Saved Model")
def export_graph(model_path, env_name="env", target_nodes="action"):
def export_graph(model_path, env_name="env", target_nodes="action,value_estimate,action_probs"):
"""
Exports latest saved model to .bytes format for Unity embedding.
:param model_path: path of model checkpoints.

class PPOModel(object):
def create_visual_encoder(self, o_size_h, o_size_w, bw, h_size, num_streams, activation):
def __init__(self):
self.normalize = False
def create_global_steps(self):
"""Creates TF ops to track and increment global training step."""
self.global_step = tf.Variable(0, name="global_step", trainable=False, dtype=tf.int32)
self.increment_step = tf.assign(self.global_step, self.global_step + 1)
def create_reward_encoder(self):
"""Creates TF ops to track and increment recent average cumulative reward."""
self.last_reward = tf.Variable(0, name="last_reward", trainable=False, dtype=tf.float32)
self.new_reward = tf.placeholder(shape=[], dtype=tf.float32, name='new_reward')
self.update_reward = tf.assign(self.last_reward, self.new_reward)
def create_visual_encoder(self, o_size_h, o_size_w, bw, h_size, num_streams, activation, num_layers):
"""
Builds a set of visual (CNN) encoders.
:param o_size_h: Height observation size.

name='observation_0')
streams = []
for i in range(num_streams):
self.conv1 = tf.layers.conv2d(self.observation_in, 32, kernel_size=[3, 3], strides=[2, 2],
self.conv1 = tf.layers.conv2d(self.observation_in, 16, kernel_size=[8, 8], strides=[4, 4],
self.conv2 = tf.layers.conv2d(self.conv1, 64, kernel_size=[3, 3], strides=[2, 2],
self.conv2 = tf.layers.conv2d(self.conv1, 32, kernel_size=[4, 4], strides=[2, 2],
hidden = tf.layers.dense(c_layers.flatten(self.conv2), h_size, use_bias=False, activation=activation)
hidden = c_layers.flatten(self.conv2)
for j in range(num_layers):
hidden = tf.layers.dense(hidden, h_size, use_bias=False, activation=activation)
def create_continuous_state_encoder(self, s_size, h_size, num_streams, activation):
def create_continuous_state_encoder(self, s_size, h_size, num_streams, activation, num_layers):
"""
Builds a set of hidden state encoders.
:param s_size: state input size.

:return: List of hidden layer tensors.
"""
self.state_in = tf.placeholder(shape=[None, s_size], dtype=tf.float32, name='state')
if self.normalize:
self.running_mean = tf.get_variable("running_mean", [s_size], trainable=False, dtype=tf.float32,
initializer=tf.zeros_initializer())
self.running_variance = tf.get_variable("running_variance", [s_size], trainable=False, dtype=tf.float32,
initializer=tf.ones_initializer())
self.normalized_state = tf.clip_by_value((self.state_in - self.running_mean) / tf.sqrt(
self.running_variance / (tf.cast(self.global_step, tf.float32) + 1)), -5, 5, name="normalized_state")
self.new_mean = tf.placeholder(shape=[s_size], dtype=tf.float32, name='new_mean')
self.new_variance = tf.placeholder(shape=[s_size], dtype=tf.float32, name='new_variance')
self.update_mean = tf.assign(self.running_mean, self.new_mean)
self.update_variance = tf.assign(self.running_variance, self.new_variance)
else:
self.normalized_state = self.state_in
hidden_1 = tf.layers.dense(self.state_in, h_size, use_bias=False, activation=activation)
hidden_2 = tf.layers.dense(hidden_1, h_size, use_bias=False, activation=activation)
streams.append(hidden_2)
hidden = self.normalized_state
for j in range(num_layers):
hidden = tf.layers.dense(hidden, h_size, use_bias=False, activation=activation)
streams.append(hidden)
def create_discrete_state_encoder(self, s_size, h_size, num_streams, activation):
def create_discrete_state_encoder(self, s_size, h_size, num_streams, activation, num_layers):
"""
Builds a set of hidden state encoders from discrete state input.
:param s_size: state input size (discrete).

state_in = tf.reshape(self.state_in, [-1])
state_onehot = c_layers.one_hot_encoding(state_in, s_size)
streams = []
hidden = state_onehot
hidden = tf.layers.dense(state_onehot, h_size, use_bias=False, activation=activation)
for j in range(num_layers):
hidden = tf.layers.dense(hidden, h_size, use_bias=False, activation=activation)
streams.append(hidden)
return streams

:param lr: Learning rate
:param max_step: Total number of training steps.
"""
r_theta = probs / old_probs
decay_epsilon = tf.train.polynomial_decay(epsilon, self.global_step,
max_step, 1e-2,
power=1.0)
r_theta = probs / (old_probs + 1e-10)
p_opt_b = tf.clip_by_value(r_theta, 1 - epsilon, 1 + epsilon) * self.advantage
p_opt_b = tf.clip_by_value(r_theta, 1 - decay_epsilon, 1 + decay_epsilon) * self.advantage
self.loss = self.policy_loss + self.value_loss - beta * tf.reduce_mean(entropy)
decay_beta = tf.train.polynomial_decay(beta, self.global_step,
max_step, 1e-5,
power=1.0)
self.loss = self.policy_loss + self.value_loss - decay_beta * tf.reduce_mean(entropy)
self.global_step = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int32)
self.learning_rate = tf.train.polynomial_decay(lr, self.global_step,
max_step, 1e-10,
power=1.0)

self.increment_step = tf.assign(self.global_step, self.global_step + 1)
def __init__(self, lr, brain, h_size, epsilon, max_step):
def __init__(self, lr, brain, h_size, epsilon, max_step, normalize, num_layers):
super(ContinuousControlModel, self).__init__()
self.normalize = normalize
self.create_global_steps()
self.create_reward_encoder()
h_size, w_size = brain.camera_resolutions[0]['height'], brain.camera_resolutions[0]['width']
height_size, width_size = brain.camera_resolutions[0]['height'], brain.camera_resolutions[0]['width']
hidden_visual = self.create_visual_encoder(h_size, w_size, bw, h_size, 2, tf.nn.tanh)
hidden_visual = self.create_visual_encoder(height_size, width_size, bw, h_size, 2, tf.nn.tanh, num_layers)
hidden_state = self.create_continuous_state_encoder(s_size, h_size, 2, tf.nn.tanh)
hidden_state = self.create_continuous_state_encoder(s_size, h_size, 2, tf.nn.tanh, num_layers)
hidden_state = self.create_discrete_state_encoder(s_size, h_size, 2, tf.nn.tanh)
hidden_state = self.create_discrete_state_encoder(s_size, h_size, 2, tf.nn.tanh, num_layers)
if hidden_visual is None and hidden_state is None:
raise Exception("No valid network configuration possible. "

self.batch_size = tf.placeholder(shape=None, dtype=tf.int32, name='batch_size')
self.mu = tf.layers.dense(hidden_policy, a_size, activation=None, use_bias=False,
kernel_initializer=c_layers.variance_scaling_initializer(factor=0.1))
self.log_sigma_sq = tf.Variable(tf.zeros([a_size]))
kernel_initializer=c_layers.variance_scaling_initializer(factor=0.01))
self.log_sigma_sq = tf.get_variable("log_sigma_squared", [a_size], dtype=tf.float32,
initializer=tf.zeros_initializer())
self.sigma_sq = tf.exp(self.log_sigma_sq)
self.epsilon = tf.placeholder(shape=[None, a_size], dtype=tf.float32, name='epsilon')

a = tf.exp(-1 * tf.pow(tf.stop_gradient(self.output) - self.mu, 2) / (2 * self.sigma_sq))
b = 1 / tf.sqrt(2 * self.sigma_sq * np.pi)
self.probs = a * b
self.probs = tf.multiply(a, b, name="action_probs")
self.value = tf.identity(self.value, name="value_estimate")
self.old_probs = tf.placeholder(shape=[None, a_size], dtype=tf.float32, name='old_probabilities')

class DiscreteControlModel(PPOModel):
def __init__(self, lr, brain, h_size, epsilon, beta, max_step):
def __init__(self, lr, brain, h_size, epsilon, beta, max_step, normalize, num_layers):
super(DiscreteControlModel, self).__init__()
self.create_global_steps()
self.create_reward_encoder()
self.normalize = normalize
h_size, w_size = brain.camera_resolutions[0]['height'], brain.camera_resolutions[0]['width']
height_size, width_size = brain.camera_resolutions[0]['height'], brain.camera_resolutions[0]['width']
hidden_visual = self.create_visual_encoder(h_size, w_size, bw, h_size, 1, tf.nn.elu)[0]
hidden_visual = self.create_visual_encoder(height_size, width_size, bw, h_size, 1, tf.nn.elu, num_layers)[0]
hidden_state = self.create_continuous_state_encoder(s_size, h_size, 1, tf.nn.elu)[0]
hidden_state = self.create_continuous_state_encoder(s_size, h_size, 1, tf.nn.elu, num_layers)[0]
hidden_state = self.create_discrete_state_encoder(s_size, h_size, 1, tf.nn.elu)[0]
hidden_state = self.create_discrete_state_encoder(s_size, h_size, 1, tf.nn.elu, num_layers)[0]
if hidden_visual is None and hidden_state is None:
raise Exception("No valid network configuration possible. "

self.batch_size = tf.placeholder(shape=None, dtype=tf.int32, name='batch_size')
self.policy = tf.layers.dense(hidden, a_size, activation=None, use_bias=False,
kernel_initializer=c_layers.variance_scaling_initializer(factor=0.1))
self.probs = tf.nn.softmax(self.policy)
self.action = tf.multinomial(self.policy, 1)
self.output = tf.identity(self.action, name='action')
self.value = tf.layers.dense(hidden, 1, activation=None, use_bias=False)
kernel_initializer=c_layers.variance_scaling_initializer(factor=0.01))
self.probs = tf.nn.softmax(self.policy, name="action_probs")
self.output = tf.multinomial(self.policy, 1)
self.output = tf.identity(self.output, name="action")
self.value = tf.layers.dense(hidden, 1, activation=None, use_bias=False,
kernel_initializer=c_layers.variance_scaling_initializer(factor=1.0))
self.value = tf.identity(self.value, name="value_estimate")
self.entropy = -tf.reduce_sum(self.probs * tf.log(self.probs + 1e-10), axis=1)

self.old_responsible_probs = tf.reduce_sum(self.old_probs * self.selected_actions, axis=1)
self.create_ppo_optimizer(self.responsible_probs, self.old_responsible_probs,
self.value, self.entropy, beta, epsilon, lr, max_step)
self.value, self.entropy, beta, epsilon, lr, max_step)

85
docs/Example-Environments.md


# Example Learning Environments
### About Example Environments
Unity ML Agents currently contains three example environments which demonstrate various features of the platform. In the coming months more will be added. We are also actively open to adding community contributed environments as examples, as long as they are small, simple, demonstrate a unique feature of the platform, and provide a unique non-trivial challenge to modern RL algorithms. Feel free to submit these environments with a Pull-Request explaining the nature of the environment and task.
Unity ML Agents contains a set of example environments which demonstrate various features of the platform. In the coming months more will be added. We are also actively open to adding community contributed environments as examples, as long as they are small, simple, demonstrate a unique feature of the platform, and provide a unique non-trivial challenge to modern RL algorithms. Feel free to submit these environments with a Pull-Request explaining the nature of the environment and task.
## Basic
* Set-up: A linear movement task where the agent must move left or right to rewarding states.
* Goal: Move to the most reward state.
* Agents: The environment contains one agent linked to a single brain.
* Agent Reward Function:
* +0.1 for arriving at suboptimal state.
* +1.0 for arriving at optimal state.
* Brains: One brain with the following state/action space.
* State space: (Discrete) One variable corresponding to current state.
* Action space: (Discrete) Two possible actions (Move left, move right).
* Observations: 0
* Reset Parameters: None
## 3DBall

* Observations: None
* Reset Parameters: One, corresponding to size of ball.
## Area
### Push Area
![Push](../images/push.png)
* Set-up: A platforming environment where the agent can push a block around.
* Goal: The agent must push the block to the goal.
* Agents: The environment contains one agent linked to a single brain.
* Agent Reward Function:
* -0.01 for every step.
* +1.0 if the block touches the goal.
* -1.0 if the agent falls off the platform.
* Brains: One brain with the following state/action space.
* State space: (Continuous) 15 variables corresponding to position and velocities of agent, block, and goal.
* Action space: (Discrete) Size of 6, corresponding to movement in cardinal directions, jumping, and no movement.
* Observations: None.
* Reset Parameters: One, corresponding to number of steps in training. Used to adjust size of elements for Curriculum Learning.
### Wall Area
![Wall](../images/wall.png)
* Set-up: A platforming environment where the agent can jump over a wall.
* Goal: The agent must use the block to scale the wall and reach the goal.
* Agents: The environment contains one agent linked to a single brain.
* Agent Reward Function:
* -0.01 for every step.
* +1.0 if the agent touches the goal.
* -1.0 if the agent falls off the platform.
* Brains: One brain with the following state/action space.
* State space: (Continuous) 16 variables corresponding to position and velocities of agent, block, and goal, plus the height of the wall.
* Action space: (Discrete) Size of 6, corresponding to movement in cardinal directions, jumping, and no movement.
* Observations: None.
* Reset Parameters: One, corresponding to number of steps in training. Used to adjust size of the wall for Curriculum Learning.
## Reacher
![Tennis](../images/reacher.png)
* Set-up: Double-jointed arm which can move to target locations.
* Goal: The agents must move it's hand to the goal location, and keep it there.
* Agents: The environment contains 32 agent linked to a single brain.
* Agent Reward Function (independent):
* +0.1 Each step agent's hand is in goal location.
* Brains: One brain with the following state/action space.
* State space: (Continuous) 26 variables corresponding to position, rotation, velocity, and angular velocities of the two arm rigidbodies.
* Action space: (Continuous) Size of 4, corresponding to torque applicable to two joints.
* Observations: None
* Reset Parameters: Two, corresponding to goal size, and goal movement speed.
## Crawler
![Crawler](../images/crawler.png)
* Set-up: A creature with 4 arms and 4 forearms.
* Goal: The agents must move its body along the x axis without falling.
* Agents: The environment contains 3 agent linked to a single brain.
* Agent Reward Function (independent):
* +1 times velocity in the x direction
* -1 for falling.
* -0.01 times the action squared
* -0.05 times y position change
* -0.05 times velocity in the z direction
* Brains: One brain with the following state/action space.
* State space: (Continuous) 117 variables corresponding to position, rotation, velocity, and angular velocities of each limb plus the acceleration and angular acceleration of the body.
* Action space: (Continuous) Size of 12, corresponding to torque applicable to 12 joints.
* Observations: None
* Reset Parameters: None

40
docs/Making-a-new-Unity-Environment.md


## Setting up the Unity Project
1. Open an existing Unity project, or create a new one and import the RL interface package:
* [ML-Agents package without TensorflowSharp](https://s3.amazonaws.com/unity-agents/ML-AgentsNoPlugin.unitypackage)
* [ML-Agents package with TensorflowSharp](https://s3.amazonaws.com/unity-agents/ML-AgentsWithPlugin.unitypackage)
1. Open an existing Unity project, or create a new one and import the RL interface package:
* [ML-Agents package without TensorflowSharp](https://s3.amazonaws.com/unity-agents/0.2/ML-AgentsNoPlugin.unitypackage)
* [ML-Agents package with TensorflowSharp](https://s3.amazonaws.com/unity-agents/0.2/ML-AgentsWithPlugin.unitypackage)
2. Rename `TemplateAcademy.cs` (and the contained class name) to the desired name of your new academy class. All Template files are in the folder `Assets -> Template -> Scripts`. Typical naming convention is `YourNameAcademy`.

6. If you will be using Tensorflow Sharp in Unity, you must:
1. Make sure you are using Unity 2017.1 or newer.
2. Make sure the TensorflowSharp plugin is in your Asset folder. It can be downloaded [here](https://s3.amazonaws.com/unity-agents/TFSharpPlugin.unitypackage).
2. Make sure the TensorflowSharp [plugin](https://s3.amazonaws.com/unity-agents/0.2/TFSharpPlugin.unitypackage) is in your Asset folder.
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
2. Select `Scripting Runtime Version` to `Experimental (.NET 4.6 Equivalent)`
2. Select `Scripting Runtime Version` to `Experimental (.NET 4.6 Equivalent)`
3. In `Scripting Defined Symbols`, add the flag `ENABLE_TENSORFLOW`
5. Note that some of these changes will require a Unity Restart

* `Target Frame Rate` Frequency of frame rendering. If environment utilizes observations, increase this during training, and set to `60` during inference. If no observations are used, this can be set to `1` during training.
* **`Default Reset Parameters`** You can set the default configuration to be passed at reset. This will be a mapping from strings to float values that you can call in the academy with `resetParameters["YourDefaultParameter"]`
3. Within **`InitializeAcademy()`**, you can define the initialization of the Academy. Note that this command is ran only once at the beginning of the training session.
3. Within **`InitializeAcademy()`**, you can define the initialization of the Academy. Note that this command is ran only once at the beginning of the training session. Do **not** use `Awake()`, `Start()` or `OnEnable()`
3. Within **`AcademyStep()`**, you can define the environment logic each step. Use this function to modify the environment for the agents that will live in it.

For each Brain game object in your academy :
For each Brain game object in your academy :
2. In the inspector tab, you can modify the characteristics of the brain in **`Brain Parameters`**
2. In the inspector tab, you can modify the characteristics of the brain in **`Brain Parameters`**
* `State Size` Number of variables within the state provided to the agent(s).
* `Action Size` The number of possible actions for each individual agent to take.
* `Memory Size` The number of floats the agents will remember each step.

* `Heuristic` : You can have your brain automatically react to the observations and states in a customizable way. You will need to drag a `Decision` script into `YourNameBrain`. To create a custom reaction, you must :
* Rename `TemplateDecision.cs` (and the contained class name) to the desired name of your new reaction. Typical naming convention is `YourNameDecision`.
* Implement `Decide`: Given the state, observation and memory of an agent, this function must return an array of floats corresponding to the actions taken by the agent. If the action space type is discrete, the array must be of size 1.
* Optionally, implement `MakeMemory`: Given the state, observation and memory of an agent, this function must return an array of floats corresponding to the new memories of the agent.
* Optionally, implement `MakeMemory`: Given the state, observation and memory of an agent, this function must return an array of floats corresponding to the new memories of the agent.
* `Internal` : Note that you must have Tensorflow Sharp setup (see top of this page). Here are the fields that must be completed:
* `Graph Model` : This must be the `bytes` file corresponding to the pretrained Tensorflow graph. (You must first drag this file into your Resources folder and then from the Resources folder into the inspector)
* `Graph Scope` : If you set a scope while training your tensorflow model, all your placeholder name will have a prefix. You must specify that prefix here.

* `Name` : Corresponds to the name of the placeholder.
* `Value Type` : Either Integer or Floating Point.
* `Min Value` and 'Max Value' : Specify the minimum and maximum values (included) the placeholder can take. The value will be sampled from the uniform distribution at each step. If you want this value to be fixed, set both `Min Value` and `Max Value` to the same number.
## Implementing `YourNameAgent`
1. Rename `TemplateAgent.cs` (and the contained class name) to the desired name of your new agent. Typical naming convention is `YourNameAgent`.

5. If `Reset On Done` is checked, `Reset()` will be called when the agent is done. Else, `AgentOnDone()` will be called. Note that if `Reset On Done` is unchecked, the agent will remain "done" until the Academy resets. This means that it will not take actions in the environment.
6. Implement the following functions in `YourNameAgent.cs` :
* `InitializeAgent()` : Use this method to initialize your agent. This method is called then the agent is created.
* `InitializeAgent()` : Use this method to initialize your agent. This method is called when the agent is created. Do **not** use `Awake()`, `Start()` or `OnEnable()`.
* `AgentStep()` : This function will be called every frame, you must define what your agent will do given the input actions. You must also specify the rewards and whether or not the agent is done. To do so, modify the public fields of the agent `reward` and `done`.
* `AgentStep()` : This function will be called every frame, you must define what your agent will do given the input actions. You must also specify the rewards and whether or not the agent is done. To do so, modify the public fields of the agent `reward` and `done`.
* `AgentReset()` : This function is called at start, when the Academy resets and when the agent is done (if `Reset On Done` is checked).
* `AgentOnDone()` : If `Reset On Done` is not checked, this function will be called when the agent is done. `Reset()` will only be called when the Academy resets.

Small negative rewards are also typically used each step in scenarios where the optimal agent behavior is to complete an episode as quickly as possible.
Note that the reward is reset to 0 at every step, you must add to the reward (`reward += rewardIncrement`). If you use `skipFrame` in the Academy and set your rewards instead of incrementing them, you might lose information since the reward is sent at every step, not at every frame.
## Agent Monitor
* You can add the script `AgentMonitor.cs` to any gameObject with a component `YourNameAgent.cs`. In the inspector of this component, you will see:
* `Fixed Position` : If this box is checked, the monitor will be on the left corner of the screen and will remain here. Note that you can only have one agent with a fixed monitor or multiple monitors will overlap.
* `Vertical Offset`: If `Fixed Position` is unchecked, the monitor will follow the Agent on the screen. Use `Vertical Offset` to decide how far above the agent the monitor should be.
* `Display Brain Name` : If this box is checked, the name of the brain will appear in the monitor. (Can be useful if you have similar agents using different brains).
* `Display Brain Type` : If this box is checked, the type of the brain of the agent will be displayed.
* `Display FrameCount` : If this box is checked, the number of frames that elapsed since the agent was reset will be displayed.
* `Display Current Reward`: If this box is checked, the current reward of the agent will be displayed.
* `Display Max Reward` : If this box is checked, the maximum reward obtained during this training session will be displayed.
* `Display State` : If this box is checked, the current state of the agent will be displayed.
* `Display Action` : If this box is checked, the current action the agent performs will be displayed.
If you passed a `value` from an external brain, the value will be displayed as a bar (green if value is positive / red if value is negative) above the monitor. The bar's maximum value is set to 1 by default but if the value of the agent is above this number, it becomes the new maximum.

14
docs/Using-TensorFlow-Sharp-in-Unity-(Experimental).md


## Requirements
* Unity 2017.1 or above
* Unity Tensorflow Plugin ([Download here](https://s3.amazonaws.com/unity-agents/TFSharpPlugin.unitypackage))
* Unity Tensorflow Plugin ([Download here](https://s3.amazonaws.com/unity-agents/0.2/TFSharpPlugin.unitypackage))
In order to bring a fully trained agent back into Unity, you will need to make sure the nodes of your graph have appropriate names. You can give names to nodes in Tensorflow :
In order to bring a fully trained agent back into Unity, you will need to make sure the nodes of your graph have appropriate names. You can give names to nodes in Tensorflow :
```python
variable= tf.identity(variable, name="variable_name")
```

Go to `Edit` -> `Player Settings` and add `ENABLE_TENSORFLOW` to the `Scripting Define Symbols` for each type of device you want to use (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**).
Set the Brain you used for training to `Internal`. Drag `your_name_graph.bytes` into Unity and then drag it into The `Graph Model` field in the Brain. If you used a scope when training you graph, specify it in the `Graph Scope` field. Specify the names of the nodes you used in your graph. If you followed these instructions well, the agents in your environment that use this brain will use you fully trained network to make decisions.
Set the Brain you used for training to `Internal`. Drag `your_name_graph.bytes` into Unity and then drag it into The `Graph Model` field in the Brain. If you used a scope when training you graph, specify it in the `Graph Scope` field. Specify the names of the nodes you used in your graph. If you followed these instructions well, the agents in your environment that use this brain will use you fully trained network to make decisions.
* Once you build for iOS in the editor, Xcode will launch.
* Once you build for iOS in the editor, Xcode will launch.
* In `General` -> `Linked Frameworks and Libraries`:
* Add a framework called `Framework.accelerate`
* Remove the library `libtensorflow-core.a`

* Drag the library `libtensorflow-core.a` from the `Project Navigator` on the left under `Libraries/ML-Agents/Plugins/iOS` into the flag list.
# Using TensorflowSharp without ML-Agents
Beyond controlling an in-game agent, you may desire to use TensorFlowSharp for more general computation. The below instructions describe how to generally embed Tensorflow models without using the ML-Agents framework.

Put the file `your_name_graph.bytes` into Resources.
In your C# script :
At the top, add the line
At the top, add the line
```csharp
using Tensorflow;
```

TensorFlowSharp.Android.NativeBinding.Init();
#endif
```
Put your graph as a text asset in the variable `graphModel`. You can do so in the inspector by making `graphModel` a public variable and dragging you asset in the inspector or load it from the Resources folder :
Put your graph as a text asset in the variable `graphModel`. You can do so in the inspector by making `graphModel` a public variable and dragging you asset in the inspector or load it from the Resources folder :
```csharp
TextAsset graphModel = Resources.Load (your_name_graph) as TextAsset;
```

25
docs/Readme.md


# Unity ML Agents Documentation
## Basic
## About
* [Example Environments](Example-Environments.md)
## Tutorials
* [Example Environments](Example-Environments.md)
* [Making a new Unity Environment](Making-a-new-Unity-Environment.md)
* [How to use the Python API](Unity-Agents---Python-API.md)
## Advanced
* [How to make a new Unity Environment](Making-a-new-Unity-Environment.md)
* [Best practices when designing an Environment](best-practices.md)
* [Best practices when training using PPO](best-practices-ppo.md)
* [How to organize the Scene](Organizing-the-Scene.md)
* [How to use the Python API](Unity-Agents---Python-API.md)
* [How to use TensorflowSharp inside Unity [Experimental]](Using-TensorFlow-Sharp-in-Unity-(Experimental).md)
## Features
* [Scene Organization](Organizing-the-Scene.md)
* [Curriculum Learning](curriculum.md)
* [Broadcast](broadcast.md)
* [Monitor](monitor.md)
* [TensorflowSharp in Unity [Experimental]](Using-TensorFlow-Sharp-in-Unity-(Experimental).md)
## Best Practices
* [Best practices when creating an Environment](best-practices.md)
* [Best practices when training using PPO](best-practices-ppo.md)
## Help
* [Limitations & Common Issues](Limitations-&-Common-Issues.md)

11
docs/best-practices.md


## General
* It is often helpful to being with the simplest version of the problem, to ensure the agent can learn it. From there increase
complexity over time.
complexity over time. This can either be done manually, or via Curriculum Learning, where a set of lessons which progressively increase in difficulty are presented to the agent ([learn more here](../docs/curriculum.md)).
* For locomotion tasks, a small positive reward (+0.1) for forward progress is typically used.
* If you want the agent the finish a task quickly, it is often helpful to provide a small penalty every step (-0.1).
* For locomotion tasks, a small positive reward (+0.1) for forward velocity is typically used.
* If you want the agent the finish a task quickly, it is often helpful to provide a small penalty every step (-0.05) that the agent does not complete the task. In this case completion of the task should also coincide with the end of the episode.
* Overly-large negative rewards can cause undesirable behavior where an agent learns to avoid any behavior which might produce the negative reward, even if it is also behavior which can eventually lead to a positive reward.
* The magnitude of each state variable should be normalized to around 1.0.
* Rotation information on GameObjects should be recorded as `state.Add(transform.rotation.eulerAngles.y/180.0f-1.0f);` rather than `state.Add(transform.rotation.y);`.
* Positional information of relevant GameObjects should be encoded in relative coordinates wherever possible. This is often relative to the agent position.
* Be sure to set the action-space-size to the number of used actions, and not greater, as doing the latter can interfere with the efficency of the training process.

4
docs/Getting-Started-with-Balance-Ball.md


Because TensorFlowSharp support is still experimental, it is disabled by default. In order to enable it, you must follow these steps. Please note that the `Internal` Brain mode will only be available once completing these steps.
1. Make sure you are using Unity 2017.1 or newer.
2. Make sure the TensorFlowSharp plugin is in your `Assets` folder. A Plugins folder which includes TF# can be downloaded [here](https://s3.amazonaws.com/unity-agents/TFSharpPlugin.unitypackage). Double click and import it once downloaded.
2. Make sure the TensorFlowSharp plugin is in your `Assets` folder. A Plugins folder which includes TF# can be downloaded [here](https://s3.amazonaws.com/unity-agents/0.2/TFSharpPlugin.unitypackage). Double click and import it once downloaded.
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
4. For each of the platforms you target (**`PC, Mac and Linux Standalone`**, **`iOS`** or **`Android`**):
1. Go into `Other Settings`.
2. Select `Scripting Runtime Version` to `Experimental (.NET 4.6 Equivalent)`
3. In `Scripting Defined Symbols`, add the flag `ENABLE_TENSORFLOW`

45
docs/best-practices-ppo.md


### Batch Size
`batch_size` corresponds to how many experiences are used for each gradient descent update. This should always be a fraction
of the `buffer_size`. If you are using a continuous action space, this value should be large. If you are using a discrete action space, this value should be smaller.
of the `buffer_size`. If you are using a continuous action space, this value should be large (in 1000s). If you are using a discrete action space, this value should be smaller (in 10s).
Typical Range (Continuous): `512` - `5120`

### Beta
### Beta (Used only in Discrete Control)
`beta` corresponds to the strength of the entropy regularization. This ensures that discrete action space agents properly
explore during training. Increasing this will ensure more random actions are taken. This should be adjusted such that
the entropy (measurable from TensorBoard) slowly decreases alongside increases in reward. If entropy drops too quickly,
increase `beta`. If entropy drops too slowly, decrease `beta`.
`beta` corresponds to the strength of the entropy regularization, which makes the policy "more random." This ensures that discrete action space agents properly explore during training. Increasing this will ensure more random actions are taken. This should be adjusted such that the entropy (measurable from TensorBoard) slowly decreases alongside increases in reward. If entropy drops too quickly, increase `beta`. If entropy drops too slowly, decrease `beta`.
Typical Range: `1e-4` - `1e-2`

This should be a multiple of `batch_size`.
This should be a multiple of `batch_size`. Typically larger buffer sizes correspond to more stable training updates.
`epsilon` corresponds to the acceptable threshold between the old and new policies during gradient descent updating.
`epsilon` corresponds to the acceptable threshold of divergence between the old and new policies during gradient descent updating. Setting this value small will result in more stable updates, but will also slow the training process.
Typical Range: `0.1` - `0.3`

### Number of Epochs
`num_epoch` is the number of passes through the experience buffer during gradient descent. The larger the batch size, the
larger it is acceptable to make this.
larger it is acceptable to make this. Decreasing this will ensure more stable updates, at the cost of slower learning.
Typical Range: `3` - `10`

In cases where there are frequent rewards within an episode, or episodes are prohibitively large, this can be a smaller number.
For most stable training however, this number should be large enough to capture all the important behavior within a sequence of
an agent's actions.
In cases where there are frequent rewards within an episode, or episodes are prohibitively large, this can be a smaller number. For most stable training however, this number should be large enough to capture all the important behavior within a sequence of an agent's actions.
### Max Steps
`max_steps` corresponds to how many steps of the simulation (multiplied by frame-skip) are run durring the training process. This value should be increased for more complex problems.
Typical Range: `5e5 - 1e7`
### Normalize
`normalize` corresponds to whether normalization is applied to the state inputs. This normalization is based on the running average and variance of the states.
Normalization can be helpful in cases with complex continuous control problems, but may be harmful with simpler discrete control problems.
### Number of Layers
`num_layers` corresponds to how many hidden layers are present after the state input, or after the CNN encoding of the observation. For simple problems,
fewer layers are likely to train faster and more efficiently. More layers may be necessary for more complex control problems.
Typical range: `1` - `3`
## Training Statistics
To view training statistics, use Tensorboard. For information on launching and using Tensorboard, see [here](./Getting-Started-with-Balance-Ball.md#observing-training-progress).

The general trend in reward should consistently increase over time. Small ups and downs are to be expected.
The general trend in reward should consistently increase over time. Small ups and downs are to be expected. Depending on the complexity of the task, a significant increase in reward may not present itself until millions of steps into the training process.
This corresponds to how random the decisions of a brain are. This should consistently decrease during training. If it decreases
too soon or not at all, `beta` should be adjusted (when using discrete action space).
This corresponds to how random the decisions of a brain are. This should consistently decrease during training. If it decreases too soon or not at all, `beta` should be adjusted (when using discrete action space).
### Learning Rate

### Value Estimate
These values should increase with the reward. They corresponds to how much future reward the agent predicts itself receiving at
any given point.
These values should increase with the reward. They corresponds to how much future reward the agent predicts itself receiving at any given point.
### Value Loss

495
images/push.png

之前 之后
宽度: 2550  |  高度: 1494  |  大小: 192 KiB

1001
images/reacher.png
文件差异内容过多而无法显示
查看文件

695
images/wall.png

之前 之后
宽度: 2444  |  高度: 1424  |  大小: 255 KiB

1001
images/crawler.png
文件差异内容过多而无法显示
查看文件

488
images/curriculum.png

之前 之后
宽度: 2069  |  高度: 449  |  大小: 116 KiB

173
images/math.png
文件差异内容过多而无法显示
查看文件

563
images/monitor.png

之前 之后
宽度: 2372  |  高度: 1186  |  大小: 146 KiB

213
images/broadcast.png

之前 之后
宽度: 550  |  高度: 550  |  大小: 64 KiB

260
images/curriculum_progress.png

之前 之后
宽度: 1441  |  高度: 619  |  大小: 96 KiB

380
unity-environment/Assets/ML-Agents/Scripts/Monitor.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using Newtonsoft.Json;
using System.Linq;
/** The type of monitor the information must be displayed in.
* <slider> corresponds to a slingle rectangle which width is given
* by a float between -1 and 1. (green is positive, red is negative)
* <hist> corresponds to n vertical sliders.
* <text> is a text field.
* <bar> is a rectangle of fixed length to represent the proportions
* of a list of floats.
*/
public enum MonitorType
{
slider,
hist,
text,
bar
}
/** Monitor is used to display information. Use the log function to add
* information to your monitor.
*/
public class Monitor : MonoBehaviour
{
static bool isInstanciated;
static GameObject canvas;
private struct DisplayValue
{
public float time;
public object value;
public MonitorType monitorDisplayType;
}
static Dictionary<Transform, Dictionary<string, DisplayValue>> displayTransformValues;
static private Color[] barColors;
[HideInInspector]
static public float verticalOffset = 3f;
/**< \brief This float represents how high above the target the monitors will be. */
static GUIStyle keyStyle;
static GUIStyle valueStyle;
static GUIStyle greenStyle;
static GUIStyle redStyle;
static GUIStyle[] colorStyle;
static bool initialized;
/** Use the Monitor.Log static function to attach information to a transform.
* If displayType is <text>, value can be any object.
* If sidplayType is <slider>, value must be a float.
* If sidplayType is <hist>, value must be a List or Array of floats.
* If sidplayType is <bar>, value must be a list or Array of positive floats.
* Note that <slider> and <hist> caps values between -1 and 1.
* @param key The name of the information you wish to Log.
* @param value The value you want to display.
* @param displayType The type of display.
* @param target The transform you want to attach the information to.
*/
public static void Log(
string key,
object value,
MonitorType displayType = MonitorType.text,
Transform target = null)
{
if (!isInstanciated)
{
InstanciateCanvas();
isInstanciated = true;
}
if (target == null)
{
target = canvas.transform;
}
if (!displayTransformValues.Keys.Contains(target))
{
displayTransformValues[target] = new Dictionary<string, DisplayValue>();
}
Dictionary<string, DisplayValue> displayValues = displayTransformValues[target];
if (value == null)
{
RemoveValue(target, key);
return;
}
if (!displayValues.ContainsKey(key))
{
DisplayValue dv = new DisplayValue();
dv.time = Time.timeSinceLevelLoad;
dv.value = value;
dv.monitorDisplayType = displayType;
displayValues[key] = dv;
while (displayValues.Count > 20)
{
string max = displayValues.Aggregate((l, r) => l.Value.time < r.Value.time ? l : r).Key;
RemoveValue(target, max);
}
}
else
{
DisplayValue dv = displayValues[key];
dv.value = value;
displayValues[key] = dv;
}
}
/** Remove a value from a monitor
* @param target The transform to which the information is attached
* @param key The key of the information you want to remove
*/
public static void RemoveValue(Transform target, string key)
{
if (target == null)
{
target = canvas.transform;
}
if (displayTransformValues.Keys.Contains(target))
{
if (displayTransformValues[target].ContainsKey(key))
{
displayTransformValues[target].Remove(key);
if (displayTransformValues[target].Keys.Count == 0)
{
displayTransformValues.Remove(target);
}
}
}
}
/** Remove all information from a monitor
* @param target The transform to which the information is attached
*/
public static void RemoveAllValues(Transform target)
{
if (target == null)
{
target = canvas.transform;
}
if (displayTransformValues.Keys.Contains(target))
{
displayTransformValues.Remove(target);
}
}
/** Use SetActive to enable or disable the Monitor via script
* @param active Set the Monitor's status to the value of active
*/
public static void SetActive(bool active){
if (!isInstanciated)
{
InstanciateCanvas();
isInstanciated = true;
}
canvas.SetActive(active);
}
private static void InstanciateCanvas()
{
canvas = GameObject.Find("AgentMonitorCanvas");
if (canvas == null)
{
canvas = new GameObject();
canvas.name = "AgentMonitorCanvas";
canvas.AddComponent<Monitor>();
}
displayTransformValues = new Dictionary<Transform, Dictionary< string , DisplayValue>>();
}
private float[] ToFloatArray(object input)
{
try
{
return JsonConvert.DeserializeObject<float[]>(
JsonConvert.SerializeObject(input, Formatting.None));
}
catch
{
}
try
{
return new float[1]
{JsonConvert.DeserializeObject<float>(
JsonConvert.SerializeObject(input, Formatting.None))
};
}
catch
{
}
return new float[0];
}
void OnGUI()
{
if (!initialized)
{
Initialize();
initialized = true;
}
var toIterate = displayTransformValues.Keys.ToList();
foreach (Transform target in toIterate)
{
if (target == null)
{
displayTransformValues.Remove(target);
continue;
}
float widthScaler = (Screen.width / 1000f);
float keyPixelWidth = 100 * widthScaler;
float keyPixelHeight = 20 * widthScaler;
float paddingwidth = 10 * widthScaler;
float scale = 1f;
Vector2 origin = new Vector3(0, Screen.height);
if (!(target == canvas.transform))
{
Vector3 cam2obj = target.position - Camera.main.transform.position;
scale = Mathf.Min(1, 20f / (Vector3.Dot(cam2obj, Camera.main.transform.forward)));
Vector3 worldPosition = Camera.main.WorldToScreenPoint(target.position + new Vector3(0, verticalOffset, 0));
origin = new Vector3(worldPosition.x - keyPixelWidth * scale, Screen.height - worldPosition.y);
}
keyPixelWidth *= scale;
keyPixelHeight *= scale;
paddingwidth *= scale;
keyStyle.fontSize = (int)(keyPixelHeight * 0.8f);
if (keyStyle.fontSize < 2)
{
continue;
}
Dictionary<string, DisplayValue> displayValues = displayTransformValues[target];
int index = 0;
foreach (string key in displayValues.Keys.OrderBy(x => -displayValues[x].time))
{
keyStyle.alignment = TextAnchor.MiddleRight;
GUI.Label(new Rect(origin.x, origin.y - (index + 1) * keyPixelHeight, keyPixelWidth, keyPixelHeight), key, keyStyle);
if (displayValues[key].monitorDisplayType == MonitorType.text)
{
valueStyle.alignment = TextAnchor.MiddleLeft;
GUI.Label(new Rect(
origin.x + paddingwidth + keyPixelWidth,
origin.y - (index + 1) * keyPixelHeight,
keyPixelWidth, keyPixelHeight),
JsonConvert.SerializeObject(displayValues[key].value, Formatting.None), valueStyle);
}
else if (displayValues[key].monitorDisplayType == MonitorType.slider)
{
float sliderValue = 0f;
if (displayValues[key].value.GetType() == typeof(float))
{
sliderValue = (float)displayValues[key].value;
}
else
{
Debug.LogError(string.Format("The value for {0} could not be displayed as " +
"a slider because it is not a number.", key));
}
sliderValue = Mathf.Min(1f, sliderValue);
GUIStyle s = greenStyle;
if (sliderValue < 0)
{
sliderValue = Mathf.Min(1f, -sliderValue);
s = redStyle;
}
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * sliderValue, keyPixelHeight * 0.8f),
GUIContent.none, s);
}
else if (displayValues[key].monitorDisplayType == MonitorType.hist)
{
float histWidth = 0.15f;
float[] vals = ToFloatArray(displayValues[key].value);
for (int i = 0; i < vals.Length; i++)
{
float value = Mathf.Min(vals[i], 1);
GUIStyle s = greenStyle;
if (value < 0)
{
value = Mathf.Min(1f, -value);
s = redStyle;
}
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth + (keyPixelWidth * histWidth + paddingwidth / 2) * i,
origin.y - (index + 0.1f) * keyPixelHeight,
keyPixelWidth * histWidth, -keyPixelHeight * value),
GUIContent.none, s);
}
}
else if (displayValues[key].monitorDisplayType == MonitorType.bar)
{
float[] vals = ToFloatArray(displayValues[key].value);
float valsSum = 0f;
float valsCum = 0f;
foreach (float f in vals)
{
valsSum += Mathf.Max(f, 0);
}
if (valsSum == 0)
{
Debug.LogError(string.Format("The Monitor value for key {0} must be "
+ "a list or array of positive values and cannot be empty.", key));
}
else
{
for (int i = 0; i < vals.Length; i++)
{
float value = Mathf.Max(vals[i], 0) / valsSum;
GUI.Box(new Rect(
origin.x + paddingwidth + keyPixelWidth + keyPixelWidth * valsCum,
origin.y - (index + 0.9f) * keyPixelHeight,
keyPixelWidth * value, keyPixelHeight * 0.8f),
GUIContent.none, colorStyle[i % colorStyle.Length]);
valsCum += value;
}
}
}
index++;
}
}
}
private void Initialize()
{
keyStyle = GUI.skin.label;
valueStyle = GUI.skin.label;
valueStyle.clipping = TextClipping.Overflow;
valueStyle.wordWrap = false;
barColors = new Color[6]{ Color.magenta, Color.blue, Color.cyan, Color.green, Color.yellow, Color.red };
colorStyle = new GUIStyle[barColors.Length];
for (int i = 0; i < barColors.Length; i++)
{
Texture2D texture = new Texture2D(1, 1, TextureFormat.ARGB32, false);
texture.SetPixel(0, 0, barColors[i]);
texture.Apply();
GUIStyle staticRectStyle = new GUIStyle();
staticRectStyle.normal.background = texture;
colorStyle[i] = staticRectStyle;
}
greenStyle = colorStyle[3];
redStyle = colorStyle[5];
}
}

12
unity-environment/Assets/ML-Agents/Scripts/Monitor.cs.meta


fileFormatVersion: 2
guid: e59a31a1cc2f5464d9a61bef0bc9a53b
timeCreated: 1508031727
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Area.meta


fileFormatVersion: 2
guid: dd0ac6aeac49a4adcb3e8db0f7280fc0
folderAsset: yes
timeCreated: 1506303336
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher.meta


fileFormatVersion: 2
guid: 605a889b6a7da4449a954adbd51b3c3b
folderAsset: yes
timeCreated: 1508533646
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Crawler.meta


fileFormatVersion: 2
guid: 0efc731e39fd04495bee94884abad038
folderAsset: yes
timeCreated: 1509574928
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

10
unity-environment/Assets/ML-Agents/Examples/Tennis/Prefabs.meta


fileFormatVersion: 2
guid: cbd3b3ae7cdbe42eaa03e192885900cf
folderAsset: yes
timeCreated: 1511815356
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

40
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TennisArea : MonoBehaviour {
public GameObject ball;
public GameObject agentA;
public GameObject agentB;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void MatchReset() {
float ballOut = Random.Range(4f, 11f);
int flip = Random.Range(0, 2);
if (flip == 0)
{
ball.transform.position = new Vector3(-ballOut, 5f, 0f) + transform.position;
}
else
{
ball.transform.position = new Vector3(ballOut, 5f, 0f) + transform.position;
}
ball.GetComponent<Rigidbody>().velocity = new Vector3(0f, 0f, 0f);
ball.transform.localScale = new Vector3(1, 1, 1);
}
void FixedUpdate() {
Vector3 rgV = ball.GetComponent<Rigidbody>().velocity;
ball.GetComponent<Rigidbody>().velocity = new Vector3(Mathf.Clamp(rgV.x, -9f, 9f), Mathf.Clamp(rgV.y, -9f, 9f), rgV.z);
}
}

13
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs.meta


fileFormatVersion: 2
guid: bc15854a4efe14dceb84a3183ca4c896
timeCreated: 1511824270
licenseType: Pro
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

81
python/unityagents/curriculum.py


import json
import numpy as np
from .exception import UnityEnvironmentException
class Curriculum(object):
def __init__(self, location, default_reset_parameters):
"""
Initializes a Curriculum object.
:param location: Path to JSON defining curriculum.
:param default_reset_parameters: Set of reset parameters for environment.
"""
self.lesson_number = 0
self.lesson_length = 0
self.measure_type = None
if location is None:
self.data = None
else:
try:
with open(location) as data_file:
self.data = json.load(data_file)
except FileNotFoundError:
raise UnityEnvironmentException(
"The file {0} could not be found.".format(location))
except UnicodeDecodeError:
raise UnityEnvironmentException("There was an error decoding {}".format(location))
self.smoothing_value = 0
for key in ['parameters', 'measure', 'thresholds',
'min_lesson_length', 'signal_smoothing']:
if key not in self.data:
raise UnityEnvironmentException("{0} does not contain a "
"{1} field.".format(location, key))
parameters = self.data['parameters']
self.measure_type = self.data['measure']
self.max_lesson_number = len(self.data['thresholds'])
for key in parameters:
if key not in default_reset_parameters:
raise UnityEnvironmentException(
"The parameter {0} in Curriculum {1} is not present in "
"the Environment".format(key, location))
for key in parameters:
if len(parameters[key]) != self.max_lesson_number + 1:
raise UnityEnvironmentException(
"The parameter {0} in Curriculum {1} must have {2} values "
"but {3} were found".format(key, location,
self.max_lesson_number + 1, len(parameters[key])))
@property
def measure(self):
return self.measure_type
def get_lesson_number(self):
return self.lesson_number
def set_lesson_number(self, value):
self.lesson_length = 0
self.lesson_number = max(0, min(value, self.max_lesson_number))
def get_lesson(self, progress):
"""
Returns reset parameters which correspond to current lesson.
:param progress: Measure of progress (either reward or percentage steps completed).
:return: Dictionary containing reset parameters.
"""
if self.data is None or progress is None:
return {}
if self.data["signal_smoothing"]:
progress = self.smoothing_value * 0.25 + 0.75 * progress
self.smoothing_value = progress
self.lesson_length += 1
if self.lesson_number < self.max_lesson_number:
if ((progress > self.data['thresholds'][self.lesson_number]) and
(self.lesson_length > self.data['min_lesson_length'])):
self.lesson_length = 0
self.lesson_number += 1
config = {}
parameters = self.data["parameters"]
for key in parameters:
config[key] = parameters[key][self.lesson_number]
return config

87
docs/curriculum.md


# Training with Curriculum Learning
## Background
Curriculum learning is a way of training a machine learning model where more difficult
aspects of a problem are gradually introduced in such a way that the model is always
optimally challenged. Here is a link to the original paper which introduces the ideal
formally. More generally, this idea has been around much longer, for it is how we humans
typically learn. If you imagine any childhood primary school education, there is an
ordering of classes and topics. Arithmetic is taught before algebra, for example.
Likewise, algebra is taught before calculus. The skills and knowledge learned in the
earlier subjects provide a scaffolding for later lessons. The same principle can be
applied to machine learning, where training on easier tasks can provide a scaffolding
for harder tasks in the future.
![Math](../images/math.png)
_Example of a mathematics curriculum. Lessons progress from simpler topics to more
complex ones, with each building on the last._
When we think about how Reinforcement Learning actually works, the primary learning
signal is a scalar reward received occasionally throughout training. In more complex
or difficult tasks, this reward can often be sparse, and rarely achieved. For example,
imagine a task in which an agent needs to scale a wall to arrive at a goal. The starting
point when training an agent to accomplish this task will be a random policy. That
starting policy will have the agent running in circles, and will likely never, or very
rarely scale the wall properly to the achieve the reward. If we start with a simpler
task, such as moving toward an unobstructed goal, then the agent can easily learn to
accomplish the task. From there, we can slowly add to the difficulty of the task by
increasing the size of the wall, until the agent can complete the initially
near-impossible task of scaling the wall. We are including just such an environment with
ML-Agents 0.2, called Wall Area.
![Wall](../images/curriculum.png)
_Demonstration of a curriculum training scenario in which a progressively taller wall
obstructs the path to the goal._
To see this in action, observe the two learning curves below. Each displays the reward
over time for an agent trained using PPO with the same set of training hyperparameters.
The difference is that the agent on the left was trained using the full-height wall
version of the task, and the right agent was trained using the curriculum version of
the task. As you can see, without using curriculum learning the agent has a lot of
difficulty. We think that by using well-crafted curricula, agents trained using
reinforcement learning will be able to accomplish tasks otherwise much more difficult.
![Log](../images/curriculum_progress.png)
## How-To
So how does it work? In order to define a curriculum, the first step is to decide which
parameters of the environment will vary. In the case of the Wall Area environment, what
varies is the height of the wall. We can define this as a reset parameter in the Academy
object of our scene, and by doing so it becomes adjustable via the Python API. Rather
than adjusting it by hand, we then create a simple JSON file which describes the
structure of the curriculum. Within it we can set at what points in the training process
our wall height will change, either based on the percentage of training steps which have
taken place, or what the average reward the agent has received in the recent past is.
Once these are in place, we simply launch ppo.py using the `–curriculum-file` flag to
point to the JSON file, and PPO we will train using Curriculum Learning. Of course we can
then keep track of the current lesson and progress via TensorBoard.
```json
{
"measure" : "reward",
"thresholds" : [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5],
"min_lesson_length" : 2,
"signal_smoothing" : true,
"parameters" :
{
"min_wall_height" : [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5],
"max_wall_height" : [1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5, 6.0]
}
}
```
* `measure` - What to measure learning progress, and advancement in lessons by.
* `reward` - Uses a measure received reward.
* `progress` - Uses ratio of steps/max_steps.
* `thresholds` (float array) - Points in value of `measure` where lesson should be increased.
* `min_lesson_length` (int) - How many times the progress measure should be reported before
incrementing the lesson.
* `signal_smoothing` (true/false) - Whether to weight the current progress measure by previous values.
* If `true`, weighting will be 0.75 (new) 0.25 (old).
* `parameters` (dictionary of key:string, value:float array) - Corresponds to academy reset parameters to control. Length of each array
should be one greater than number of thresholds.

18
docs/monitor.md


# Using the Monitor
![Monitor](../images/monitor.png)
The monitor allows visualizing information related to the agents or training process within a Unity scene.
You can track many different things both related and unrelated to the agents themselves. To use the Monitor, call the Log function anywhere in your code :
```csharp
Monitor.Log(key, value, displayType , target)
```
* *`key`* is the name of the information you want to display.
* *`value`* is the information you want to display.
* *`displayType`* is a MonitorType that can be either `text`, `slider`, `bar` or `hist`.
* `text` will convert `value` into a string and display it. It can be useful for displaying error messages!
* `slider` is used to display a single float between -1 and 1. Note that value must be a float if you want to use a slider. If the value is positive, the slider will be green, if the value is negative, the slider will be red.
* `hist` is used to display multiple floats. Note that value must be a list or array of floats. The Histogram will be a sequence of vertical sliders.
* `bar` is used to see the proportions. Note that value must be a list or array of positive floats. For each float in values, a rectangle of width of value divided by the sum of all values will be show. It is best for visualizing values that sum to 1.
* *`target`* is the transform to which you want to attach information. If the transform is `null` the information will be attached to the global monitor.

12
docs/broadcast.md


# Using the Broadcast Feature
The Player, Heuristic and Internal brains have been updated to support broadcast. The broadcast feature allows you to collect data from your agents in python without controling them.
## How to use : Unity
To turn it on in Unity, simply check the `Broadcast` box as shown bellow:
![Broadcast](../images/broadcast.png)
## How to use : Python
When you launch your Unity Environment from python, you can see what the agents connected to non-external brains are doing. When calling `step` or `reset` on your environment, you retrieve a dictionary from brain names to `BrainInfo` objects. Each `BrainInfo` the non-external brains set to broadcast.
Just like with an external brain, the `BrainInfo` object contains the fields for `observations`, `states`, `memories`,`rewards`, `local_done`, `agents` and `previous_actions`. Note that `previous_actions` corresponds to the actions that were taken by the agents at the previous step, not the current one.
Note that when you do a `step` on the environment, you cannot provide actions for non-external brains. If there are no external brains in the scene, simply call `step()` with no arguments.
You can use the broadcast feature to collect data generated by Player, Heuristics or Internal brains game sessions. You can then use this data to train an agent in a supervised context.

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials.meta


fileFormatVersion: 2
guid: 2cdd8b62c2af449a994ea35fb15ef060
folderAsset: yes
timeCreated: 1508602923
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

77
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Goal
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _ALPHAPREMULTIPLY_ON _EMISSION
m_LightmapFlags: 1
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 3000
stringTagMap:
RenderType: Transparent
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 10
- _GlossMapScale: 1
- _Glossiness: 0
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 3
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 0
m_Colors:
- _Color: {r: 0, g: 0.8235294, b: 0.7553752, a: 0.478}
- _EmissionColor: {r: 0.13235295, g: 0.13235295, b: 0.13235295, a: 1}

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal.mat.meta


fileFormatVersion: 2
guid: 3779d76a368ea4ebbaea1e027d5dfd98
timeCreated: 1508602972
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

84
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal_on.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Goal_on
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords: _ALPHAPREMULTIPLY_ON _EMISSION
m_LightmapFlags: 1
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 3000
stringTagMap:
RenderType: Transparent
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _Illum:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 10
- _Emission: 1
- _GlossMapScale: 1
- _Glossiness: 0
- _GlossyReflections: 1
- _InvFade: 1
- _Metallic: 0
- _Mode: 3
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 0
m_Colors:
- _Color: {r: 0, g: 1, b: 0.37931037, a: 0.497}
- _EmissionColor: {r: 0.13202855, g: 0.27205884, b: 0.16196606, a: 1}
- _TintColor: {r: 0.5, g: 0.5, b: 0.5, a: 0.5}

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Goal_on.mat.meta


fileFormatVersion: 2
guid: 2baf65e1ee49345a7aa8ae8b1da3619b
timeCreated: 1508627473
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

76
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Hand.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Hand
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 0.5691557, g: 0.33915442, b: 0.9044118, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Hand.mat.meta


fileFormatVersion: 2
guid: 739f205e426344cdda6eb7116a4726df
timeCreated: 1508602934
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials.meta


fileFormatVersion: 2
guid: 5a0ca92b23a3f4614a5065ce0810c316
folderAsset: yes
timeCreated: 1508614299
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

76
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker 1.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: checker 1
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 2}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 2800000, guid: f9543e9c54b0842eda898856d1e86c12, type: 3}
m_Scale: {x: 1, y: 2}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.081
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0.5441177, g: 0.5441177, b: 0.5441177, a: 1}

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker 1.mat.meta


fileFormatVersion: 2
guid: 3736de91af62e4be7a3d8752592c6c61
timeCreated: 1508614636
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

76
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: checker
m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 1, y: 1}
- _MainTex:
m_Texture: {fileID: 2800000, guid: 6f9d5afaf9e504fd88a7150b2541bb1c, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 1, y: 1}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/Materials/checker.mat.meta


fileFormatVersion: 2
guid: bfd3a3b322f7b4c4b9f1e07c525fd048
timeCreated: 1508614299
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

25
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/checker.jpg

之前 之后
宽度: 540  |  高度: 540  |  大小: 12 KiB

74
unity-environment/Assets/ML-Agents/Examples/Reacher/Materials/checker.jpg.meta


fileFormatVersion: 2
guid: f9543e9c54b0842eda898856d1e86c12
timeCreated: 1508614618
licenseType: Pro
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs.meta


fileFormatVersion: 2
guid: ffae7416983f34bb884e4abe537d2a10
folderAsset: yes
timeCreated: 1508535289
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab.meta


fileFormatVersion: 2
guid: 2f13abef2db804f96bdc7692a1dcf2b2
timeCreated: 1508535292
licenseType: Pro
NativeFormatImporter:
mainObjectFileID: 100100000
userData:
assetBundleName:
assetBundleVariant:

821
unity-environment/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!1001 &100100000
Prefab:
m_ObjectHideFlags: 1
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications: []
m_RemovedComponents: []
m_ParentPrefab: {fileID: 0}
m_RootGameObject: {fileID: 1395682910799436}
m_IsPrefabParent: 1
--- !u!1 &1053261483945176
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4340471134207970}
- component: {fileID: 33129520809121966}
- component: {fileID: 136195163595303310}
- component: {fileID: 23956479694601514}
- component: {fileID: 54174454281860166}
- component: {fileID: 153663134575695956}
m_Layer: 0
m_Name: Capsule (1)
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1065277484498824
GameObject:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4836354168995630}
- component: {fileID: 33827327769986516}
- component: {fileID: 23913365403597130}
m_Layer: 0
m_Name: GoalOn
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1157728520783578
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4386034827704486}
- component: {fileID: 33524134957751370}
- component: {fileID: 135370188030477524}
- component: {fileID: 23420739137250176}
- component: {fileID: 54773405692192206}
m_Layer: 0
m_Name: Sphere
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1395682910799436
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4067321601414524}
- component: {fileID: 114955921823023820}
m_Layer: 0
m_Name: Agent
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1644872085946016
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4944997927059382}
- component: {fileID: 33736038482486542}
- component: {fileID: 136942565296027158}
- component: {fileID: 23266194367773042}
- component: {fileID: 54420197405084076}
- component: {fileID: 153054247742330296}
m_Layer: 0
m_Name: Capsule
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1654288206095398
GameObject:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4910003592613346}
- component: {fileID: 33164921905814718}
- component: {fileID: 135867312924178508}
- component: {fileID: 23588591193572352}
m_Layer: 0
m_Name: Hand
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!1 &1986879271678326
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_Component:
- component: {fileID: 4984025405280428}
- component: {fileID: 33192810276213476}
- component: {fileID: 135746602902751552}
- component: {fileID: 23595512991530936}
- component: {fileID: 114928491800121992}
m_Layer: 0
m_Name: Goal
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &4067321601414524
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395682910799436}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 10, y: 0, z: -10}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 4984025405280428}
- {fileID: 4944997927059382}
- {fileID: 4340471134207970}
- {fileID: 4386034827704486}
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4340471134207970
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
m_LocalRotation: {x: 1, y: -0, z: -0, w: 0}
m_LocalPosition: {x: 0, y: -10, z: 0}
m_LocalScale: {x: 1, y: 3, z: 1}
m_Children:
- {fileID: 4910003592613346}
m_Father: {fileID: 4067321601414524}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 180, y: 0, z: 0}
--- !u!4 &4386034827704486
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1157728520783578}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 4067321601414524}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4836354168995630
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1065277484498824}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 4984025405280428}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4910003592613346
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1654288206095398}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 1, z: 0}
m_LocalScale: {x: 2, y: 0.66, z: 2}
m_Children: []
m_Father: {fileID: 4340471134207970}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &4944997927059382
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
m_LocalRotation: {x: 1, y: -0, z: -0, w: 0}
m_LocalPosition: {x: 0, y: -4, z: 0}
m_LocalScale: {x: 1, y: 3, z: 1}
m_Children: []
m_Father: {fileID: 4067321601414524}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 180, y: 0, z: 0}
--- !u!4 &4984025405280428
Transform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1986879271678326}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: -2.41, y: -6.25, z: -9.8}
m_LocalScale: {x: 5, y: 5, z: 5}
m_Children:
- {fileID: 4836354168995630}
m_Father: {fileID: 4067321601414524}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!23 &23266194367773042
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23420739137250176
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1157728520783578}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23588591193572352
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1654288206095398}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 739f205e426344cdda6eb7116a4726df, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23595512991530936
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1986879271678326}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 3779d76a368ea4ebbaea1e027d5dfd98, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23913365403597130
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1065277484498824}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 2baf65e1ee49345a7aa8ae8b1da3619b, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!23 &23956479694601514
MeshRenderer:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!33 &33129520809121966
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33164921905814718
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1654288206095398}
m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33192810276213476
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1986879271678326}
m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33524134957751370
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1157728520783578}
m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33736038482486542
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &33827327769986516
MeshFilter:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1065277484498824}
m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0}
--- !u!54 &54174454281860166
Rigidbody:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
serializedVersion: 2
m_Mass: 1
m_Drag: 0
m_AngularDrag: 0.05
m_UseGravity: 1
m_IsKinematic: 0
m_Interpolate: 0
m_Constraints: 0
m_CollisionDetection: 0
--- !u!54 &54420197405084076
Rigidbody:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
serializedVersion: 2
m_Mass: 1
m_Drag: 0
m_AngularDrag: 0.05
m_UseGravity: 1
m_IsKinematic: 0
m_Interpolate: 0
m_Constraints: 0
m_CollisionDetection: 0
--- !u!54 &54773405692192206
Rigidbody:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1157728520783578}
serializedVersion: 2
m_Mass: 1
m_Drag: 0
m_AngularDrag: 0.05
m_UseGravity: 0
m_IsKinematic: 1
m_Interpolate: 0
m_Constraints: 0
m_CollisionDetection: 0
--- !u!114 &114928491800121992
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1986879271678326}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a8c5005c8e6b84f1089c132cb87b44c4, type: 3}
m_Name:
m_EditorClassIdentifier:
agent: {fileID: 1395682910799436}
hand: {fileID: 1654288206095398}
goalOn: {fileID: 1065277484498824}
--- !u!114 &114955921823023820
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1395682910799436}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 220b156e3b142406c8b76d4db981d044, type: 3}
m_Name:
m_EditorClassIdentifier:
brain: {fileID: 0}
observations: []
maxStep: 2000
resetOnDone: 1
reward: 0
done: 0
value: 0
CummulativeReward: 0
stepCounter: 0
agentStoredAction: []
memory: []
id: 0
pendulumA: {fileID: 1644872085946016}
pendulumB: {fileID: 1053261483945176}
hand: {fileID: 1654288206095398}
goal: {fileID: 1986879271678326}
--- !u!135 &135370188030477524
SphereCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1157728520783578}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Radius: 0.5
m_Center: {x: 0, y: 0, z: 0}
--- !u!135 &135746602902751552
SphereCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1986879271678326}
m_Material: {fileID: 0}
m_IsTrigger: 1
m_Enabled: 1
serializedVersion: 2
m_Radius: 0.5
m_Center: {x: 0, y: 0, z: 0}
--- !u!135 &135867312924178508
SphereCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1654288206095398}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Radius: 0.5
m_Center: {x: 0, y: 0, z: 0}
--- !u!136 &136195163595303310
CapsuleCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
m_Radius: 0.5
m_Height: 2
m_Direction: 1
m_Center: {x: 0, y: 0, z: 0}
--- !u!136 &136942565296027158
CapsuleCollider:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
m_Radius: 0.5
m_Height: 2
m_Direction: 1
m_Center: {x: 0, y: 0, z: 0}
--- !u!153 &153054247742330296
ConfigurableJoint:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1644872085946016}
m_ConnectedBody: {fileID: 54773405692192206}
m_Anchor: {x: 0, y: -1.34, z: 0}
m_Axis: {x: 1, y: 0, z: 0}
m_AutoConfigureConnectedAnchor: 1
m_ConnectedAnchor: {x: 0, y: 0.01999998, z: 0}
serializedVersion: 2
m_SecondaryAxis: {x: 0, y: 1, z: 0}
m_XMotion: 0
m_YMotion: 0
m_ZMotion: 0
m_AngularXMotion: 2
m_AngularYMotion: 0
m_AngularZMotion: 2
m_LinearLimitSpring:
spring: 0
damper: 0
m_LinearLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularXLimitSpring:
spring: 0
damper: 0
m_LowAngularXLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_HighAngularXLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularYZLimitSpring:
spring: 0
damper: 0
m_AngularYLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularZLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_TargetPosition: {x: 0, y: 0, z: 0}
m_TargetVelocity: {x: 0, y: 0, z: 0}
m_XDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_YDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_ZDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_TargetRotation: {x: 0, y: 0, z: 0, w: 1}
m_TargetAngularVelocity: {x: 0, y: 0, z: 0}
m_RotationDriveMode: 0
m_AngularXDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_AngularYZDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_SlerpDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_ProjectionMode: 0
m_ProjectionDistance: 0.1
m_ProjectionAngle: 180
m_ConfiguredInWorldSpace: 0
m_SwapBodies: 0
m_BreakForce: Infinity
m_BreakTorque: Infinity
m_EnableCollision: 0
m_EnablePreprocessing: 1
m_MassScale: 1
m_ConnectedMassScale: 1
--- !u!153 &153663134575695956
ConfigurableJoint:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1053261483945176}
m_ConnectedBody: {fileID: 54420197405084076}
m_Anchor: {x: 0, y: -1, z: 0}
m_Axis: {x: 1, y: 0, z: 0}
m_AutoConfigureConnectedAnchor: 1
m_ConnectedAnchor: {x: 0, y: 1, z: 0}
serializedVersion: 2
m_SecondaryAxis: {x: 0, y: 1, z: 0}
m_XMotion: 0
m_YMotion: 0
m_ZMotion: 0
m_AngularXMotion: 2
m_AngularYMotion: 0
m_AngularZMotion: 2
m_LinearLimitSpring:
spring: 0
damper: 0
m_LinearLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularXLimitSpring:
spring: 0
damper: 0
m_LowAngularXLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_HighAngularXLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularYZLimitSpring:
spring: 0
damper: 0
m_AngularYLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_AngularZLimit:
limit: 0
bounciness: 0
contactDistance: 0
m_TargetPosition: {x: 0, y: 0, z: 0}
m_TargetVelocity: {x: 0, y: 0, z: 0}
m_XDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_YDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_ZDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_TargetRotation: {x: 0, y: 0, z: 0, w: 1}
m_TargetAngularVelocity: {x: 0, y: 0, z: 0}
m_RotationDriveMode: 0
m_AngularXDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_AngularYZDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_SlerpDrive:
serializedVersion: 3
positionSpring: 0
positionDamper: 0
maximumForce: 3.4028233e+38
m_ProjectionMode: 0
m_ProjectionDistance: 0.1
m_ProjectionAngle: 180
m_ConfiguredInWorldSpace: 0
m_SwapBodies: 0
m_BreakForce: Infinity
m_BreakTorque: Infinity
m_EnableCollision: 0
m_EnablePreprocessing: 1
m_MassScale: 1
m_ConnectedMassScale: 1

1001
unity-environment/Assets/ML-Agents/Examples/Reacher/Scene.unity
文件差异内容过多而无法显示
查看文件

8
unity-environment/Assets/ML-Agents/Examples/Reacher/Scene.unity.meta


fileFormatVersion: 2
guid: e58a3c10c43de4b6b91b7149838d1dfb
timeCreated: 1504127824
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts.meta


fileFormatVersion: 2
guid: 94ce022e6e3984390bf7dffa80eae215
folderAsset: yes
timeCreated: 1503355437
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

9
unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels.meta


fileFormatVersion: 2
guid: 1de426e37371f4489a43b8b9ad99a00c
folderAsset: yes
timeCreated: 1508690694
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

412
unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.bytes


7
global_stepConst*
value B :��!*
dtype0
R
global_step/readIdentity global_step*
T0*
_class
loc:@global_step
?
state Placeholder*
shape: ���������*
dtype0
�
running_meanConst*}
valuetBr"hYP?�S�<��:��=��Լ�C=���i�%<Իz=�ZE<b�=Xy/�c���<o��H��:5��=�h�牒�g\�>ᚦ:�=����ݼ/�d���1�Fy�*
dtype0
U
running_mean/readIdentity running_mean*
T0*
_class
loc:@running_mean
�
running_varianceConst*}
valuetBr"h���FktE�)�D} }E}�F�&�E30Fk)H�T�H0�HA��F]O�EZ)7F�G�G��VHD�G�6AG�m;H��BJKI�$I�?�1I5�HЎ�J86I*
dtype0
a
running_variance/readIdentityrunning_variance*
T0*#
_class
loc:@running_variance
-
subSubstaterunning_mean/read*
T0
6
CastCastglobal_step/read*
DstT0*
SrcT0
2
add/yConst*
value B
*�?*
dtype0
addAddCastadd/y*
T0
7
truedivRealDivrunning_variance/readadd*
T0

SqrtSqrttruediv*
T0
(
truediv_1RealDivsubSqrt*
T0
G
normalized_state/Minimum/yConst*
dtype0*
value B
*�@
S
normalized_state/MinimumMinimum truediv_1normalized_state/Minimum/y*
T0
?
normalized_state/yConst*
value B
*��*
dtype0
R
normalized_stateMaximumnormalized_state/Minimumnormalized_state/y*
T0
�4
dense/kernelConst*�4
value�4B�4@"�4̤'>4Z�>��z��ER��{p>ض>=��g=��̌�Z��=C�S>�>�=�����H�> �ý+��<)����l\>(��=�ҽ��$�2�=�3�� �q�=���=b���^q=x-u<�+�ld����!>ؗT�>1>�:��\�=�*�=��>��>Ό6���=��i���D Ľ�Ȃ�H�>��!���`>(o������𙽧�=�Ro>���=maS��`�=Q�= ��<������=�;���X��:>�UA>�x�=�����)>R���3>��(F)>�_�>ũ>��e��@P��!�=N����&�-G޽�^�6(�=�j �o�
>,C�=��3>�45�|k/��O;����<$Ȩ=�M >�Ľq�<68~>T <���L�#�漟���r��=Pd
>����>� ����{���B�>��=p�5��C&�m��=/�"��3;<���=��ɽ�H�=eF5����=M65���?>��Y=�Ͻl�ʽH�R>Z�>u�=�3B�T֑��*��� ���>�6>� I>n!�>ݗb>
4>�a���<s+�=�>�=���=�b����=.�Ƚ6�=�I�>���=π[���N��^߽<f<>��>�6*s����<��> �n0��!�>���<m;>6�5�^&����u>&
>�7��;�Q='>�A8�.dl�Y�B>� ���K>Ι>��O>��Z>h������sG�|�F>��>>x�=���<Y�t�e�<�����<��k��Z�ni#=Z��#�=L� ���Y���佂���s� i���*��� &�H�����>íX�" �=�ӛ��ϔ<�p+>#,�{k�8�����> � �E<�� �[��P�"�!��#�=,�h=�_L��˞��9^>�F��w�C��W�>��\>
�<>���<��=�@E=�qc>є >��׽'Nc>t��>7~�=�{J��e�>"5 �H�={��:��;�쫽����<� �=?� >΁ν���>�;_�0@P>ǖ�<`G�>����q��~s=7´:ْ=�3>��>s���YzּICN>�hc��>��=�����D j>���b?T<�t%����=|�>Xr�芉�)%׽[��=o_��^Ń�6�b> ��=�_<&��G*�<5�F����a~�= 輩7������A��=5`�.4Z��ܤ=d0 �i�>��f>0
>>0Q��RE>��>���<*9����f>^�->bE�����>�`�=��S>�=����`=E������v�l����>�����mC>Ȏ��s$�=/���� >��N>���� \ >)��=���<|�>糽p筽O�������M���:>���>�tĽ��=�=>_�����=�=��,>6)�>j �= #�=l]��<p�����;q�<P�!�]M>���YR�=A��=�������=�4�^z_=i�= [�,��=m��=$�k������>���=�޳=�A�=�\��%�r��b� u#���1>�-!>���������9��՘��(L�?G׺FI=����P3>U��>oEw�e�U=��=:=����>�޽�r���51>���=q��=�`��d�0��@>� S=�Ź�`��`��aV�<��=D�>��|��GG>H�3>�Ԃ��B<>*Z��^T"�����?3�=n=�݂r>�\���<���'-�J���h/>�1>��>�ev>��=��Ǽ��\>�{���4��f���X�8>Fo@����<��(�W[M�-.�="pC>9߁�%*����a���j������8������<N쇽.>�����<�J>�*���V
�R՛���R��؍>�c�=��2�&M�`��>� >�>)>��{>o�x�Gl>��>��O�>?G#�"�k��Aj�� ����1�*�����^���P>Y��>�
�=�{����#=r��>b�$>�m�=a$��`4�=��=Zx7>a��7��=��&>ez>ѝG<��7��}6���ห�g���N�f�o>ܒ���S�%�>ؑ �P��=��3��>�"���<=X�{�\7����;h�|���|>�����:�!L��
)��t�<@��=���/����)�����>Itc�Ƿ��?����f����Ƚ(�罸����=7�>Z�]>�W8>��1>�^->��>�� =�g�={�ӽ�JT>q�n=1���4�ּ� �>��ƽ�X��f9��H�>�H��d�� �=G�>�����\C�ȏ�>��0=:��=�-�=�H�=�G���7�=�'�ؘ1>:�8������bQ�gt�=�cE>`y��Tw`<1���Շ.��ZD=�����1 =��$:k{>Z�"�8|%��M>���>,I �I�=0���y�>��J�K?��љ��U��FA>���=^����;\%,�"ɒ��q���r;>���IǪ�����/�Q�R��=�U��>;1�<�s��,��HD�xnO>ŧ�=���;`G����f<��N��WD>��>�u��$�7��=�y�>�a�J�=����=����p=���ѱ��1�=�o ]>�T�/��zk>K%'=|oǼfB����=����b�Y=ag"����r␾��=�����õ����=֏=�.m�B >��0�MT���n��0�=�#��A'>g�ֽ^[Ӽ�Hv�1@:=S��=����>`�&>��=�.��Lj��ql����>ꎽ� =B̼�U>�I��鞼�y��N��-�n<.��<� >��F>�vA>�"ѽy<>�(>)�ʽ�%b��5��@�4���>�=�>�y$=dC�<{����?>F2�<l(��\��?`�=�}�k��> ��:��=�d=U>��� >�^k>r�>��f��i_��|��}�̽!{=�c#�k�E�O�>��)>��������΄���ٽL=�=���x6=䍯=��p=�k/>(�@��j�=dl��4>S����>�K�$�ν풡��Ԓ� mԻ���#}9>�O>'U=�NK<�=��F>�-�'�&�콘=�},� ������g�nz����<�T��1P`��Nj��k�$<�
����=/��>�a��#J >†>�k���ٓ��.۽�!>ȉ���Y*��2��4�=���"��=G�����_�s<��.}=B;۽�'g��p�;9F>�J�=� �<�b�
2�<P��>�)8>f8���Cs>�:^> 5Y>�r�SW>�/<�ؿ;�~>cʆ>�,>R1ڼ�G>~:>�*>:��>J1�>�>���=z�o6���4��7޽5R�=��5�|����l��ü����>�y������5�U��!ݽ�H>������<�Ȉ=�ܽA�ͽ^̜=�ڼG!�=5�H>�K-=<��=�uE�j2����սy'g>N �J�>����=Y�T>��?�A�;���5�Ƚ�9�E���y+��k+��w�>��Z�����I+">h=k������>1����>j��P���o�<�' �a�>��>�>�� �F�d>x/�-�.�k7H�j�>�õ����:>�ԋ=A����\>���=�b4>����j|?�m�B��\v�����O~��j>��#>�E�=�*�k����m�z�">�]�ÀG:�N[�,�ؼ^3��Gq8��=��ƹ�R >"ƽG-�>�s�=��&���<��,">+b(>��
>c��<�b���W���<��
��O�=�Ls<�ܦ�*
>D�<�ki>ZF>e�.=�;>ϣ�+~4=;�����=�c�<���+Ⓗ��c>ơt�����t��@߄�3�Z=}�W����=h@~�ܷ�=�zi����@=�����=,4K��|P��$#=л&>"�>�����r�>�U��e����H|> b@>{�W��9)>Ё=�~ >�T=�"��=[��H���uH��RH>(���7�@�">$�?�5�>�R����J\�!�<jv׽�>�����O�=p<�c{�=���=���;3�>遏>�߽>Q�<(�`������9�=��~�]��W%>��.>*�־q�W�����s�䃾���=/�}�`&Z����=�J"�(ț����K�> Aʽ>Ko>p>��R��KR=B�Ľ����� >��%��M�� >������j����qN�;<��=7�����5=�&>m�<>�j�=��0>^��=�Ή>�2^� �>">�3.�1�h���T>����R�����=~���P���q�n��{����e��h�������0W=��ֽ>=��o��i;>,�V=v[���c�<���=`R�=L\ƽ�V������窽��P>��f�ʡӽ�'�=��Q�|�"���>�O=7��=E�s������*R>3���!R�jL>y����>���h�=�kV���=ٔ?��z��U�=k��<3˄��8>v`&���>ܲ}��d�H}>%�>�3���� F�� g=���=�o�>� ;���¼Q��=�>-��XŽ��/><-K�@���ָ��nJ�;s�=d�!>�̏<+�>���<Dֽ��u�,=�>c������<m��=�P,>�в=�T�s�>��=4G�=�C��ˇ��> (��`<*N�<�bE��0�:� N�7��� �o�V��Z���-�;1n���z�=jD��nͤ=�K��;x���=���o>�~۽�d��_�>��#>�i�>T�ƽ]�>�н�U�����ũ�<��1�L����s>?=�9O����2>¬=����08���~���l>U��|=�``=�g�=�h��������<���=Đ �qRg>�e�=Y�Ph��w��;4]���n�`��=\�g>��e>\ �=���d5�=
��2��ʔ#>��g>c彘%���e��>FK!�
�.>��ͼ@G>����!��9�=��=2�>����Л�
�)>��>�7M;��J<��j>5��<:� �*����� ���z���ܽ��j�~���t>Z >�O=����1@\= �}><�=��=}K��y�?>��p=��M����<�����}�<x��"zW>����.>��F>�轿�#>R�=��P�ذ �5a=!;|=||>���=�Y=��ʽb�i����oEa�>)���e>�s>�8�=o�E���=��:���=���>�%d>S����H���>�*=>�CT>�c/>��r=ﻕ=��d>��k>t�R>��>�������<fx�+`>[�<p���\����rn�A��=^�E>o�-��*�=��C�>��6>��Z>��p=*�G��V>��(<9V.�8Ҕ��
�=���=���=�rH� ��=� ���j��8ǽ_(#>� �~D`�J��b<��/>KGr�@�,����=�X����b��=I�K>�G+=����ӍO=��>.r���P�=5�8>��4��$�=�� >�jW� s >Sո�Yʌ�Txu>&X�U�_=Q�F�p�=�� <����d�=Z?>Dn>^=ي�>�n�U�R���z���Z>�q�x�%�DN>���<�|�� �=�y>4�>��<��)>8�\>B>p���T��ܐ�=�䓽�q�=�<�x(>8����ʽxN3>�SA�4�=�6��}$L�`�@��I=��3>@k8�����hf>��lK�=�>W�v��b���6k�g�>`oѼ�.���J
�|�w�t�������Ha���I>�~��6{= >���=���=7���e��=�A�@>�e�>CG$��r�=�S?��G��ڏ>�!d>L�.>,� >��V�>_\�w轒�+�(ɒ��4����=C�9:�.V�DHO>M���R'>&�&�QL%>�4;>�1A��-�>Iz��V+����=\����2��~Pv����=�T���;�=�7�=d�_���b>�;B��2�5�=�Ud�,�=��r�x�=)�3>��>�s4<�On�y+D���->��!��5W=�a���x=��;W,L��B�= �j��;��2{7�;�>֢�>uN���>��=oe���y�F�>�M-�<�q�<"9�>o�>`���M7>`�1���n�e�0>��j�Ƒ�=/:�=���=� ��Ej�=��G�g�,>ѐ�hz��Ea>�4?�g��=��r>�ҽ� S�m�I����=sg�=U�}���]��<}���u���>�(=����<r<��n߽��M>�L�=[ǚ=���=��=]׽y���>���<fH��UX;> ����� ��M�B�t��=]Al<� 5��t��'��>��x>���=�������)���W��G�=���<V=3>�V�>,�>��D>1��=1�=��;����N��=�K>�H}�>E:�=�?�=�ڢ���N��z�<�{L��ɫ��>c���T5R����<as6=�OH��D��S��>C<�q�;kμ,�M���a�=J�Z� ��f��?>I99��&��O>m��N+����;"2C�M�$>j|K>еY�����0>G)�>)z�=�}�z��<�.��-�>x�#�s��=u���]U��y�"��P&>{�k=R��=˹<��W��`:>�(��?�<���V�0=T�N>���>�1+��[}�# �v�E�|��za�L[*�S��>w�c=49>�*�P�3���>[R*���𽓸 ���a��Z���r�����E��=Xv���oN>W �=���=��`���D���;�M��Dw�>�ns�"_ݽb1C=.�� �Q��S��N���Sa�2>������Z�����_�<�����n��*
dtype0
U
dense/kernel/readIdentity dense/kernel*
T0*
_class
loc:@dense/kernel
j
dense/MatMulMatMulnormalized_statedense/kernel/read*
T0*
transpose_a(*
transpose_b(
)
dense/TanhTanh dense/MatMul*
T0
ǀ
dense_1/kernelConst*��
value��B��@@"���I�� �;hXQ>�q�p�0���_=0{>yה�$؟�J�>Ƥ�Ht��\�s�q=��9���y�Žٔ�����(�[=`S���� >SY��rq*������tM����<��=�I/ ���^�
���m>��==덾�E>*r\=g�g�_m����=��k>��7=�j@>�&�>f��=���>ET�>I��==�n���x= pܽ���!r�>}�>Q�[����NY�=�ږ=P�^=b�����=O��#w`��\-;���>�XB=�� ����>�;P����1�>x��<����,Y���<"�^>$��eƏ=$��h�=�|����O�=b鉾}���w��wbZ>(W}�2���A�;B;�=$�Y���Խ+zo� ������>8�Y>��>�9�����q�F>N�N��_���n�>����‾�n���K�Q� �Z�>�R�>L�=�g���g>LǼ<�0�_�=�p�����=�ja>�?�=�]þ�1���u=�I� >��=�u~>��=��)�Y��<�C�=��;˄==��/T�=�š=�Cǽ�j� �߽�"�=.TS�
���9gS>7��=�V���|R<>� >�m����!�b|�e�>�a�>�t��Bw3>]�I�l\�<y�/�&��tf�=eP�w~(���W����=lU=B(�@����^B��+�=���e���&2���:�^ɽ�=T��B�=�EM>�V��*=�\�=,���}I����Ƽ;cĽC>��=G����=�Q����;�iý���C{�JwR��PV���p="��<fR�>�e��܇= �ּ����+G>��>��+>厬�Á>�9�=u3���J>)���j Z��>$�D=�%q>���h������>��>#�a�t0�>i�=�?���=���;�A|��)� �=�B=���<x�T�4���`ؽ'؂=�
0��D-�'؞�kE�mV>}݃��P^�jF�;2�.>њ�=�˒>ߺ�<Sl�<�S�=�U����=�;��-S>ljҾ�H�>�%��#I���,&���<������.�4�0��$�=�K�<�}X��*#>��>�!>cj+�|.>N����Z�9>�� >������=R����j����X>3HԽ� ����6�O�">�#\=:x�"r�<�������'��=a�=��/��,�=3���;���N����<�;0�4�K#����>���>1�o>d-`=���=g�O��D��!��<��Wf>�4�=�.>�5C>q�h>w�}��`v>�>&��nb�Wm>�^>�߽��<�$>��>y��<q����>�[X=(�\=ۋ�o�-=ˋ#�{X��J�5�����=ˢ�=�ٮ=9; >3�>���<�83�����ƻ����J�� �-�Ae[��
>KuN���ٽ� >����ŽZO��6\�=U��=��$>�۽ �p1P��+7>;�!���=7�I>oQ+>&�i>U23�E�ὍjZ>��x>Rm�=^i=g>�3|-���=5�Y=K��=�to��#:>$�i>z8�>R�<'�A= ��=0�3>�U�;�}���T>ۜƼ����CV'>���=�m�]E��w��<�o����=�ͼl������L>Kuz���=��8>�%��������~��2����;��;P�(=='������C �=�� ��Ɔ��0�=4nS>�8����;Ù=z��=����콏�%>��*���y>
��= 3 ����=�P����=� >D8<(�m>�2ܼ�7>�� =c���X�����F���h>��K��>��=G�0�t�c=����3�켅`u�?
�=��>��ѽ��p>ݱG���;�ψ>i<J����Etx����>f�) ���ǝ=��3��`���p����;��G�= 2>8�h����=<��>k�=M�d��ї�ӥf�^��\���(D+��ф=-o���=����n����=�cy>o��M��=�!>jC�>��g��>�����6>��^>rb�=4�>�g"�~��=e��>áݽ����ﱽwt��Z,>r�����>E/�d5��=ü<󥾊t���O��� �=��}>���<V�=������|=�9b=���^�=������<�Q>��v�W��=,/d>��S���ʽe�ż#T>3Ba=�<V������>�t�>�� >�B1�'�:>\��B>��jM�=ߖI>���<���=.���M��=U��=��q>��8>��>=4 [����>)k�<C�l�Li.��4 >ʚ;��=˴�=�6�|-���+>�s��6l=���<�T=uH��(=���2���3>�c��c8&�rm>�ǑG�+R�=��l�R�LB<A�����=A-.>v�~���;�����F>mv
�0��>�=�q�<�����fm>��2>��k�-���q��<�r>MY�������X=#�'>�Wr>S�=J<��a������=��%>DF��s0�$�K<�.���ѽ�a�=��S�F�k�����Љs��zN>]V�>Y��<U�A=�b��Ñ�=�R�=��?��Q���>��p>u�> :>>w����> 3���6���B�;,5;{�"�_}>=��<>�!P��Z#>x[^�j��=��^>D)潔�A>^F?��o��dG!�����>_�>��&>⓴=��Z�l_�=:.�gF�=��D>���,g7>dp����̽�:�=�@�7����=%��<5Ʒ�2.�<���tw ��A0>V�>>�nn=L�����<�B�=����'�W�.�X �qA1��=+5�<�I >X�->�w��:���KǙ=:�ҽjS����A=�xL��}Q�V��=� <>�'>4c;��ȽSE�=�k���vټ����/�=��v�r�(>W-4>wR�����q�� �n=+�q�'{��/�_�%3����=T�1��>*e9>(� >��=��߽��"=μ=��:�����b�!<7�޽����%R>��u�e���̩����>b�f���1>\��>��$���@=W��>�I����>�`>9�>� .��>���=��ʻ��꽵7Z>OF����=��y�<�/�= � �E<�Ҵ=>��;_ �=���>a)�<��=�����=���=���=�I��c-=�w�����<�/��@8O���j=BG> nk�͈�=����G�>�պ���>�s�>c��=�%k>&�ȼ���|�Z>!���a+�����dL<yh�=aPA>�i�<��>
"˽�3�=���������MyS�0Lͽ+8O>�G�#���<�I㑽D�?�V�B�E����s�������&)���n���P�V>z�>R��������,>fX'=d��=3��=Vc;����=ԇҽ�a�!�:�S=Խ�� >�́��_����Dt�=���=?]�=��8�m���G>N�>;ٲ=�|��@�=���x> �<�>]�F�4Ԇ��E�=�:�>3�=�������=�V�����o����=|=���ʼ4�5��=�>�d� {�=<=9�½��-��R>'u�'x��1�=e6<Bfl>j;��S>����r^=bQ4��Y�;v%2���(>G�����=hS=��<�B��u:=��C��t>�O��ݑֽ[��=R퐽���=�� =�l(���E��;;�?���~r���>i�E>�o�='�>`�L>`����N�㸳���=>%Zx���!>�(���c�o�-;e�P��y��>d�(�@�������|�=����XE�=1�� L}�o&�xX���==ko���$>{]������c �[;ͽ;��F.>�Y�R��B�u�!�����<���<�Ȓ��aU>��W>�*�>̦���t��D�<H�g>|+Y�]k=ï�{��=.�N>w�
>�C>��= �<��>���~�=<
>?&>Th��;��<�u/>r�0����� �I>�s=�4��m��=��ҽ�A���}> ��<Bp�(c��4"�=b��=Z]ݽ���k&�L�̽�&��V�J>Td>=�=��K>s�A�@��=mr6>#�����>fY^=��H>G;��:�w=GX*����=�x=�Y�>�a�n�>��aX�:�jֽ�fۻ>���'�����"�">�?��*�����=��>X���!�+����?%=��>V�>��|��8��AN��TQ�k ��
��$>$՚�CW <ƃ>�)>��>���)�S>���r�S>4�/���Ƚ�w��h`6>O�c5�=3�wt�>�>t.= �=w�9tO<��=$E�=К�>pmZ=E����^p>��'=#��;k8���ǽ��9���U�v]>'4��j��.{�=���>��ѽ�W>���>�KB>8�7���;��=��v;��=MN'>�p�<�:�oH�<��E�f��8�>���=R&F��He>;�4>hm">�/='I��E>�L�=G�?>���>j������:g�>c�üi�>�=�T�����=,"���l��� >zo��3��>�>��_><� ż��>���>�E��ߞ�<ؾ"���}����=��=�:��/��IR�x���-����<�����J����%���<�{*�=�|�>K(ҽ��Q�>''��v�>@�e�V�>����a=�� �E�M���X;���=��}>; �=��>D���n+>�Z�=Bi����>���=�x��*��}]�=� >�6���>��@�E쐽et�����mN>�bH>���=�����>$��>�>I���T���r��<�u��2Y=�y>.���uE�>�#$�zT���C�F�W>�μb�{�E>V�">E,Ӿ���9�0����ѽi��=)]=ɐ���y�o�>MW*>����X'�\9轡B��(����/>Y\�qj�����N��=��<>T�p�醩�#|���,����<�Ao<���S��=�A���=�it���=*-i�֭0=4h�=��=�az��]>=��m���3>�=v������僽���<�b=����=.L>.��=Z���w��Q,>�>Q a>\>�(���1<]�(��ډ����=i>c}�=%�V<;��V�+�*�<ϖ���!>`$>f�����I>VΨ�
����'>��~�w�>fA!>�� >Ɵ�u<m|9�ट�)}U;���<�-_�pR�=�$1>��=}�ܽ���>a�=A�s��=w��; ����<�-��"�դ*��U�=�}R��: �';|$����>��4>wa��νwm��X"�>�u��dЃ��@��X�>B 8=PHP� �D����BO�<ʭ2>/C�>Q=�E#�ٱq>�p>����w �Ȳ�=���=߻�>�E?=�{%>� V�� �=�����|H����<��;��d@>q�=CR�<��U��O����>|���/'>@��Ų���7���" ���a�jݽ� ��yrȽ�J�����<q��=kP> �l>�}C>�.%>4WT=<���cX��@��ps$<��ཱ¿= �<;߽�yQ��湻��6�]ف����:n$����=%u>����[�c�̽������M���=�i�=
��={��=6{�=z�-=�F��Z�
����[y�=�s1������DŽ<�K>�:3����=L��ұ�>">>}8�<��?��N��G�>�<�=|*M�Lj�>�>? �=SM?=6+D>���<�O�
��> �1��x>��=;��={�{=�M7=؋L�j���R��=$��=Ho'>�#>cٔ���X�Y��<�$�=b&��5�> ��/�=o ]>Lc̽b �=���=̯�=�2��<�D��=[#>�9��>��j >
��hkK��QL=�$>��#=��t��QU�
��MH{�_}Q=���z|ƽ�(�����=��^�#�J�@C.��{J�� �=_.�>1#>�ǀ=q�=`�>�(@�·�=�y�>�%���?=�>�>�>d0��>��>���>��t�D��=��8���R���t�z���;�_�����h��>��!=�ç>�,�>���(8>�c?>���=�� �A��1l.> bZ�_-ľK�]���9>�斾���<��<r L�˜->�
�(�>���� 6l�b�>����֞<t,W=���=�h>���b����=���=��s>G�s�)߬�q'�=����yG>�?����<��u��?�=N
�>^��={j?@8.�!��=����5C>��� 1>{�Q��Q�<��$�3C>7¿���=�����۽�q>��>=��.><F>����V>ʬ��$�T=�����Z&���!�gJ!>��>n���9񡽩;�<����O�7>I=>�x >C���}I2�짥;}Z�=tm ���q=qGջ�E<&�W��>�Oa5��O�=� ��9���p� {����=�e��H��v�<4�E�����t�<� R=�ϼ�[ =:>���=��V�L|;�����ý�=h�=��H=A��=�V:>�O�<HE⽈p�=� >nZY=��*>1]�= $���.o>�Ţ����<CD�=�o,>�,>��ڽ2I��?�\=>0>M#�a��;�4�=N_뽺�V�������='�۽3V@�R3����ֽ����;����n>���k&�<�A$���� �$�2A�>�� >ܢ�>���=�Q9�B}��lB����4=�ᦽ��E��=(ѽ:p0=&�*>g6�=g;=O��=* H>�
=V>����wɥ=G��GM>v��=�p�<�X?>�o��.���ֽ��Z�P��k<�=]q>) �>K�>�@�>Zϴ=Q�����=��,�Ն��++��y&����=��)=�R{>��<M)~>3|>�F>āy��5>���=g;�<�nW>��(��M�S^��]?)>T�>ɤ`��G�= Mɽ�D<��޽�)��%=���<j�=$���`�P��,�� y>6b9��D�=˜��[�B��3�=�A0>�g= U��:K-��v=u�ּr��=O.���}V<��j>��a>�u�=.�!>�T�a��R�g>Q�F��H������d��Ŭ_=�߁;���>�Mj�q��)�>"X��� >Vߨ=��|�|��=3� >i�9=�}�5�޽o~�=tmý�_�=>��S(�\w,>�<��J�&<mX�=3*>M>�]�=_V=Ѽ㽷:�C���J|�=U��:��>�ۈ<�w�=�[=(�P�}�<�6��a]��0���漉26>�k}=�9��s>~�'��&&���3�4�M>,!�1��.��QM���!�����=gݛ<X��>�Y����P>1ߠ=`x�>��7>�Wy>&;�=o�c�}p>f`�=sqj�mN=<׻�)��BI�rX=�m����!>���>R�:>�}Ž��<}��=�_�<OO�9����Խ&t�„������>�������n���'5>��G<�Ƙ>��&=�����ٽ��̽lou��Ē�Z� �i/X�=�=�1;� �=��r�0>���>�����=��s����>��>���=C���>�e>�bP>6i�a�)�!.*�����q�=���a������k�T���>0Z<p/�.볽��>�׽��_>�?5>�@�=|�Z=��A>��=�������=2E���o9>�]Y=��=o�=u�e>*|>���=[q����M<�$>Ď=_~1;���<-_�Z�/=Y<�<��>d�;;!>BB�<�·��#
��(�s����hj�gf> �+���T�>��8y�#��=�Zd��=�j��G�<��(�~�ս��>�F���7*;MΑ�)�'��W =î2>&?0�TO>��������/>u��������0>��Ͻ�3q� ��Q��=��1� ���O==Y�=ؕ���u��D� >��� ���ϼ�\������2c��d ��F ���ݓ=�r�'B��)�>MQg�T�:���V>h� >�.c>��ս�G�\���.��=H˽�m��V�齼���x�9B ���\>c���ؗ�=��5����=�5&���X>JS#���]> z�=�)m�r�x�E(P���#�dM>���t1�=iq>���=�$/����6�D=��S�c�Ժ�>Eg׽�RP��?���>~H����7�a?)�/{>�ɽ��;^�N>�7�=�Kl�xq۽?��AĽ����=9�=�-=�iF=Mp=��T�k�2>'Z�|l�=� ��*�<;}z�%h�=N8T>�������Z�h��<�b`=G�=~�v�g���P=�{3���<=��)<���=�ka�W0Խ�X->��W� ��=ՈH�
u$>o�=�J >�M���=�Ҍ>G�3=� w="qV<�[�>_g����R>Q�y>���<چ =��u>$-�=��+= �>��={!>>B�= P����+<�w�=�x콐G�>u˽M�f>�t>�U�>����X:��A����>��N>�<�=��v��V>��,�����:�=�>C�t+T�.��=5�����%�SXl��i�=����^|� ��=Îֽ��;.z=��]>�/��R�½���{��>v�'=��[>/a7=��1�4z��뀾Z��;���e1=�u�=�) �@l�;K��;�>F����e�>)m�>/<&� q���><0>=p�Z�R=�h�<�(=��3>(g=�T�=b�'���>����[y��1o�>kU=؞j���>6��2���Q���j�=ޅN�u�=�=a��TP=Cb�5��=�)̽f������t�����7�r/����=:�>̬��w!ό !>'��<�=�>&1��A�>ca�<�,սr����{�=a�>N�#�'�ѽ�X=���=R扽��>k+|�������=8�B�����I�d� �]���#�����bb<�:=���[���a̽��>��>z&i=WN��K�->�� >0k��J�=R%>�R#>u '�:��>4ie>�7��h~���I>[�=�_��B��]��<B]�py�>�}*>�0��n��}l½o�0<j�%>��M�HV� �>1�Y>���>ŋ�<o��D��� =��½/?6�Q��2��h���������jp7��"Z�u�;<���=��d>�oC<�1n��I������VS�=���;�&̽���<��D�y��<[\d��#��̽S�#=�H߽ Ƈ����=����8��=2C2>Mk6��%�=I�1<�#;�{=z�,>��>�/���3�aH�<o�t>�ő����>R,q>b�۽@�=�4=�󄾃5��+�;<}�&>�j�<-43�$�g�JX@>��2>f�>��<胕����=���>�.��#����"@>:����=�b;���>��8��qռR��=DG꽇tq>���=�yL=�-$����w�=�DC��� �i�>� ��y�<��[�=�6�=(0U����<Z�s>����L�==2��D���%| ���s=�V5=W�9��<�<{��� $�>U�>�zy�#��U��=ݲ&>��@�u�o=�t�<��y�z>?�=QA;ff��a���r�)��}>.�ν�#>�Q�����%�<=^� �p���2X�9@>��νFyh���^=�􀾊*�k�;>q,�]�)�Ӄ����ʻ��=�p�p����|>-Pq����<�
I����,�x�(+�=E�=I��l��=C�v�zL+>��@���i�A��=qL���$�=�0=�������<�F�=0E�w7�<���>Ȓ�=@>̰`��#p���?=��_>�����{c��)>�O# =��;(d-�����g׽�=���=�z����=6 >|k�=�^�<*�6��.�=��6>�u�,$@��/�ӽ��o:>Z� >� �<��)��S>{�t��Ή=5���(ѽ݋�=�] ���b�$��>TY���҄>�g�=,�&>"�=H�!���>"v=F�=�+�=#����>`�S��N�c�>������=L�=;����P>�2��� =���)I1��o�=�EA<AT��ւ����=�ܞ���:�w;C��������<F���O�>,a�=� ��5�-�=X�=A�Q;TԽ�t��>��k�*�5��<; m�7�!>H��^u�=;���<=r�>Fd�cg�=���>#�<���
<��%`H>����)���B>�v޼�7>��a=K雽�( =*�����=�z佈X3���@�B�����l�6�۽����>p >_A�"&��L���������\>),G<
�����=����WN���G��P��ND�=�_U����;���fgy�b#���W��>p�S>��&�E;$�1���"�=�=V>O�½�,�����=0D�=
��=j���V'��l��-�|>*֪=X+�=\SU> Q�=2 �=�d�<�?�<�2�=*����=�=�U�<ʋA>PA��V�0qK������ �����= 6C=�$=�"�<���=0 �=j�>��$�b6�=w�=F能��Ƽ���=M�>;������=)+�=8=�qq�+�P�B.,��!�=�Q�=*�4>.9q���x=9O#��]�>#�O>fo�=��;�<��= ����>�kܽ|)��(��l�� ���K�=��W>쿽%���%y�c�c>wH��O�D�Wv�=�>��`>���<*�:��w���0��Fλ� #>m| ��i�����=�/�<> ?;����Y޽<����?�[���½�>U�>�۽nu��>>;��=_��=�Y~> ��=� >��0���>�=^� >�� >K7��Q�{�>�2c�C��='�8>�X�;��2>V���蔽l��>�� �rRP=}���@v�6��W�<P䟽E�ȽQ� � ���� ;��~>Xhļ?��<A�w�� -�Hp�=��m�����+�R>V�%�(>�@
>�2>j��<������:T��u���z�=GE�����<��=T"����='|C=�\G=�l<+���'Y��c��ߺ�F�<�{�=H:����=���< �'�B�B�X𙾩⓽�>=�$��g�=^���P���V�X%�=[.�=+Q���-����������=���=X��=Sΐ>Ԋ �؇�=u��<�S�;|c9>p�#>ۼ�0S7>K��� ���.=n�>ʹ�>a���tmk>�b�>4@�:��O<gu�>i�i�֜*=��>X��>qV����d>�HN; ����2M���۽�R��n? ��:'=�aR���=[)\>;���@��=���W��
�4=�t\���=>>> 6 >�E��$�"�2��=�F�=�}�lH�(�3�{Z���!%�4�2�93E�D��=.��� ��2��=��^= t/�K�,���2���g��������=�]�<�l�m0|>������� �<����,�>��t��=����9h>u�����=�N�=��@=-��L���d�U>�1��;��=!����=p��<en3> 6�=�>,�H�/�7Q�M�
�� �*�G�8�>��g����=�uX>�U������>��>��">��.>��]<X��=��Ew,>�=�ٶ= �2>�<)���y�ᢒ>��d�lJA���<>}t��^��=��>���Q�<�z5��og>R��=���=�7�N��jd>+�~�I��{�:>4%�=�.�>e��t�?����=�ط���u>��a��朾}�=���� Z����E�zW@��À=�&=>��=�ԣ>Ub �ge�>�Q��쪾{��>�薾3Q���MP�����=ʤ �s��>������$ס=��Z��?>�}�>����r�>�w>���=�����=�� >J.�>����A�=��c=9i��&oj>iSl>^ �;峻=ڙ罙�P>�[u>Ѱ>�L> -��FR��q�>�3�=�r�>ID�>��z>����6�i� �d{;=�緽�,6��O����t��=���>PX��~>ũ¼fΞ>�Li��F�<�uf��m���D
>s�>M$�=�h >w��=Y T�/��
A�������
=o_�=c�2�����+>���=A� ���=GQ��C���c�=R�y=�J��j�>�ͽ�F��6�����h�,= ��;�F��u&+�����#�nn��y@�k��;�^���@=����)-�=J��=��Ľ���=�ԩ�Q�>��'��3���=,C�=�=���;7>���=G�F��JJ�<�+=�vJ<�0#�ֻ�=b�>���;��=ז���E���?�]n$�Ȝμ\�;>('����-��� �ㇻ<��g>���D
p=d�l�w =�HqĽ��T>N�M=v�����V7P:%���X� �L���6�=Cl�>���=q�ɽ�!$>
]i��S��87�=��k������
ѽ��h��w��7���l�]���=����L/>�6=ڹ=o�}>��>:�}����Su>��]�~v���[>�>@����`���=_�C>Ό���c>RW2��KK>�'��Z�=i�;^��~�i�7�=KԵ����.��>��޽U�=\^޽��>���>�>�B�>v�:�K�1>z�b��C����>��h=4�ļ��1��0\>��me$<�%�= +d>K�ܾ!�'=��=��'<׎�> "������˽�.�=μ�>��x�E?����>Ϲ<Y���%� *,��&�@���h,�GU�<��>�����e\�[��;��7=c�;�Ş�<���>k� ��}�.�g���D���Z>��d����&�f�Ǎ�;Μ��Tl��<J���$�)�t=�S�=G�&>��>�ᆽA��Z�>&ɞ�Ֆ7==��=^�u>6������=�K3���`r�P����*��(o=����k�f=N��=J��=m$�=�fF=���=�O��E!D��������n�~���d�۽��c������>��^>c>���=.[���P=�A�=�h&��ڽE`v=|g�>� >0@ཆ=u���>@PV>���>/��=(��=��A=f�.=���=f1M>�j���݈�;CC�{M>�+�>��l�-������=�p��9"�w�Ҽ��:��%�� ���y�>��<�eG�U��<�L����>7]>w�v����'U���0>!��:��n��ZO�mӶ=�`��P̽��]=�Ѝ= l>*e�>�=q=#]� �������av>l�2=fm��҂/>���=���=��r>���>}����N�=Qq�=��V��t�e�I!]>Q����,A>sL �� =<��>D ���=�p�6���x��)
>B����;�mO>�ἎQ;�QM>UYӽ �C>��=�J=�tc>�0I=�f8=�&�<���0FN>��!�M>���L�=(]�=b�,�̈�=\�?=к`��o�=��S�p�E�%#H>��9��X��dF�����=�Y��,V;�Q8>~�� aJ�x�Y:G>���=<���7�=�=H=��=���=h���A�>f<���+>��ѽ0W>5�i>���>6->�Ľ�m����X=��i�F�P=�t�=�A���<�5�=�>�Y�(��< 7���>t�޽��r�>�&��} ҽ��J�u\n����;I�w=�W�>Fӿ���ֽe��>���� ߂>�
�=�h��-!+>9�C��CR>+���}�T4�=��;i������r>�>��=H-K>�Ҝ>" >e6H=���>���c��=5,>G��=F;�=I��=ou\=�4
=mEO=t6 > �<�\⻤ ��L����=�����~�=�BK��(�V�L>r����v�� �=��k>41�lp�=���=��>p��=������;>�p�=�4*>�Ge;GE�*K#>
q[���U����pV>ڀۼU�5�9�F����: �� �<��>R��=���=�u�=��;<ޣ�=�]2��}��� ��=%�*>��>������� �=H_���^�=f[�=���;/o>��
��2�=Y�=sѣ=V�7>b��pH��|�t�^9�=G⽻�Ž�B�$���9�=G�D�L��:�0�.3�=��>�L9>J+>��c=���=4�ܽo=���z�s14�c�A< ����E�<ΐ���ڪ�)��=�>TL�=b�=7�c��R���Z�H�h��7�*4���=�`����<���*O
�X>d��S�r>ֻX��=}>�ֶ=��P��C�=����oA���$�֮ �*� >f��=���=Jx�=��)=���=����N �J^�7�˽M��SP�=_��<�4��Խ�C���2���f��=�g=C� <2�.���6����cg�<��n�%c�$u{�e=�-���P��� ���N�`9J=�5Q������������(��~ҋ��3߽��=� >�z��y�ڽ����x�׽ �Ž�8>>Z����=��H^��_�=]�>|-�=CL��d=93��B7=-O��ǵ=�!�~��y������=Pi8��ĵ�=�] ����>^��= _=?�r=�;=k���A����>��<�綼Ǽ 3>˕n>.�=�dw�ɩ�<�$P�e
��Bf�h@k>��U���l>�}n�١=���=u��>��='�T==D���V2>���4U��Ņ��n)�%J�:����N�>/���$��ݎ>�w��Jɫ>���>.��0l�> �μ���=<K��:{b�EM���T=3Q��>=>�>|�c>(�>�6�>�L�<=��<^S��ϲ=+�s="/=�ܞ=�� ��k����=.��>���>0]2�+L>,��=78�=J��������B3�؁\��yL>�ί�)�|=�!;:: =��=�q��ħ��h����ח=K"�=ے_�J��=�~��'6���� >�v�=(�����:Dc�$��=6� >��i�㝽��>ҫ���]<�v�\e>*󇾆���A&��ܰ�]>�<�dX=��<G�#�W(>�l;���=�O�=Պ�=�>3=D��=*� =S�>�9O=|��r+�
��;�Eh�6��>��%=w�=��s����;!�h��D�#�= Z>|4��`�5r�t�[<U�н��7>Ȅ��L�*>�E�=NЅ=�э�(>%��=���>�6> zq��kK<//�= ��=��<1��=W���V���U^%>X`�_2�=`��=VB�^�v��^�=�}y=���<<>���9>���=�� >���<��>Jl^>�Oͽ�+����>ij=<&�<x�b>���> �<�Kx>��c>�>?_#>K��=�={8���<Vі<���=�� �h.>�ߡ=�RF>��"=��-�Wgڽ���X���[������, "�K�(��1��De�^��=+�!��e��0��?%����I�O��]���-<�%�=�y>c��<�9 >ǫ�>;��=�K���H���C��&����d�<#�>��m }�Oν.��6W>�u��e}= �n��0><K�j�<n��=�_}����=K4Q���.=����hӎ�wm��`�8�=���m�/�����p��=5��}��>G䣾j�`��(L���>�Q���>�sr����=�L>F�¼�a�>a������=�+,��3y��C��&�j�3.z>�{l�AE>��+=)���aR7���=����c>��.>��=i�L�g,�=��������Z=cb�=a˽Wz>�o ><;����=Cַ�jf&�Z>A̽&bƽ2S=թ ���>F=>�0>�L>e^>>=� ��F >�{f���̽�ċ�#d>=M>�^0������<�8B= k�P9�:ɇ�{�2:c�;�N�3�ʋ�5�R�=L�2>.">r���F�0��=U|�=����t[�WZl>�ur<
�=]0}=����X�=��`��ϲ=4��=$�$>z�*��(�>�`�>V�= �m�u`�=��=�3�2�r�-�>��9��ޣs=9�6����pr�I��!� �-���m���G���.�>`_F�n�w=�# �D�/����WQ>r�y=��j�����f%��F�6~��xJ=X{�=��,<# �=�C+����=�5�:!�Y>���<၃>67�����=i����Ò�9XZ>�߬���Y>��o�i�x>
_�]�R��*>y�-=��>�{�=��t>@o�=D��g���M�j�b��=�H�F�4=�� =K��<�r)������Gi��]S>�b�קμe�>X5�=]y�>d�
� \�<�ҽ�$����M���9?�=��w�R*`�SO�=XǓ;z�O�ի<��M>�V�������=A4L>��'�&�佋8?>_
���Lc4��9���ļ�ۃ�l�<Q�����<HPp>Rڼ��D�)��=h��<|]Z=���=T������=F�P>�4p<ڨC>���>�7"��锼J�=u��:� ��;�a~�#�F>7��n">��N��!�=Щ�=��<0�޽ ��=�z����=����=�i(�\I
�$t>��r�6f�ku��i�=��=4@�=jkd>/d>!�O>��[��� >c�(�B��=��5>?��3la��;M�5��=��5>f^=��;kh۽���S�꽰}��FEO�cqԽ`��:F�n�~aȽ�O2��i�=T9i=ӓZ<_���
��T�սv�^>�����ý0�2��a����=�9���
�|*�*
dtype0
[
dense_1/kernel/readIdentitydense_1/kernel*
T0*!
_class
loc:@dense_1/kernel
h
dense_2/MatMulMatMul
dense/Tanhdense_1/kernel/read*
T0*
transpose_a(*
transpose_b(
-
dense_2/TanhTanhdense_2/MatMul*
T0
�4
dense_2/kernelConst*
dtype0*�4
value�4B�4@"�4�oo���!>�tþ�6��+�>Y' ����=�d�>*�=��/�������p��-���{%�eRl�V���c�?PJ�UE�A1�>������>�zh��3�<��Ӿ;�L���9��DŽ>�n���,��QS>�eѾ£�>���mC���@C?A �>�+?.�> ����q��Ú�>3蹽���!!�=H� �� ��M�0>�N_���#<���󺲾�q��i�S=%�>?�?t����z �>������3?�_��4�>]��n�W���H=�����@��a�=7����F_�)nZ>6E�=n���Κ"��'�A.#>�c��|n�%l��+=�X{�2�"�V�
=`U�� 2�=xk�=��d��M:;d��> �����@�X��� w�������3>p����c^�����>]�<?��=�\2>TQ/=.[�=���<�*N��r�<��ս��=��T=ނ�=�>�|Z��u�=��h���j>]���/張�>��?=#B�=������ ����;F>�=*��=���<��d=Kdž���Ⱥ� �=���„�>�,{�#i~<Zd��U;拂>=�����a�=ί�=iY ?"C�=���>������ >��+����=�����>��f=|�j�2��<�
��ސ�=�F�=b��>o>>�T�=���=D<;�ʼn��KB���=��@><^����Ѽ�7�>,<�sƾ�^��7 ����ڽt�"�T�Ľ���=�^��sq�>�L�h0z>�e9��vK�⧘�G� ����0L��E}�=��N? ��z���T������>U��/Q����)>=��=\2 �M��w"����:^S��$:>�q�>�o��[+5��0>��|�w E>,:���\ >XW�=��R>t>�=��ȼ~�I=��F�_�>N��=�ԋ��� >E]���==D�ǻu:�<�z+��Z�<UN>Dχ��p��8'>-{E=�?�<|��<\�˽�� ���=�[�=�$�=�����Q>��D�����>� `=����X��Pu>L������gX>N�|�S�ES���"~=��=(1�=�&�����=��8�|�V�F>@+�ڪ~�O����ҍ=:E��0^R=s߼-����^����;�h,>j��܋�=Za�;;�U��|�=��3��Z���u<�-J<ۼ��$�Q<񁴼���=�=k�,�bT_=Ro�;�$�=��$����:\zn��d����˽�S.�mZc�l�y�7N�j^�����/� <C����MF�_2>���a��/P�=�
��64>z����Q��� ��կ=-f�<�Ur�g!F���0�r�<�k�<���2���x���;.V�>�����ռ2ќ<�І=��m=��4<�����__������숽RF��IJ>��7<���ʣ==4&�<2����S�Ɓ�=Ξ^=o=�z=s�B>D�=�>��<[U=��ƽ �_�M��<|����<�y����{=���<�H@�Ha½���`���qJ=M�U<�����'>l���^�>�r�=�zt9�f��{��=t0�>^#�=��=u"= �� �*<@ƽvv(��ɽ�a[�=�Zļ���=rF=dμj��WT»���=�uU�n�e=m����e���A��&7<��^����@��D��uq��w.�/7����޽0D >�^(�+c=�e��G/I=�/N=�k;�_�<� >y0ͽ
����=�w%>p��Q�����[�p
>��l�)�=7 Ͻ������<�7�7�+���Ž�A�=Jp��J>-�T<���W>5=��=����x�=!@�����=D��=m��+_�f��;�Ž���=^��tj?���.�Y�=ќ��ۻh=�n����Y�B+�� yA���5=_����/K>L,F�U#o�ଽ��»׼�$g�ǐA=L�o�<A= �伃���o�=�T��M�E�{5F��ی=D5Z���μ];�=�Ժ�A|��$.ƽX�����>����S֒��97=�h=���ݔ����YO����o=�!�<�`.�+z�=�,뽼�T<ޟ�=��^=3�8��7�=d��p\��`1����@��:>��<���> �ټ�hQ��c�ɟ&��˒=q<�=n �=J�>�*>��Q=�ݭ<CSY;WI_>��x=�1< A�=9L�<�?ȼ�<�w�0>�I@=m���s:�=0�=�u��+=����Z�*Xe���=?<߽Fh ��d �*��c�=�O7�����A"!����Y��˅�@�5=C�1��<es>*w4�;Df�)�<�*2��m��^3=Z[>B$>z�� �S3��ń�T���fS>I�<��˻��F<��������=+0c���:C��<���=W�ؽ�����5����]=�vx=���<�ʊ=?����p=���<8�=��=Lg�=*(�0�<?�v���4=D^⽓ ���w <�����=�3�>m���u�< �=���=8܄=o�p=Sh�=��L��j�=l��<Uzw;�g���F�[������A�ܽ���:G=7`=�_�=�n'=��ϼ�UP=��<��!���*>bb<V><���<|�k�� 9��i�<��!���P����=N����l����C��=,���o#�P��=�$�=y��={�=��>��(=rd�>�N> ��<<u/�=���<iȄ=E��=DS��$���J>Fn@>�C�=�P�>A� � ;ػ��=��S�����ǡ� �P'�^1l<ۑ>e9>8�����ý�@m>�����[s��/*>�>)>��0>�(վ�>
UT>�p��}y���<;'�[�aP>�4��p��=�u��'9>91�>e���8Z���:>�+��C����2�� ��᯽X��=X�뼵{ͽ�@��u��=�b�n� �cY�=� ��J�Q�ڭ�����>6]S>�L�
o->�����<��c<����7U��:j� �L>z��=<�> �=Uܳ<^6���9�׽����e�U������X��MjL�)<��=q�h��o">�������=A���[��=*C)�N�J=A �=~6��h׽��j�m�z�{C�=#a��P�6�����0��.I�>���=M�:<M���$���ˆ�#��<���=z�=�D��f�%���������j�=�m=��R�#oS�^5彌��=:Ņ��~��,ކ�{��=g\�=z�=^Hٽ�%�J8.��=_/>_�z� =���>.�=6Y= �P��I/>y��Xw�>Q�
�L=$���튅>, ��;e��Ҍ�N�=4�;^,"���f=�XH=���=�$F�q�>�?;do�>A[�=�#d=y�> �K�ail��<">x!�=';n�Q�B� lg< g> �r=H)�<��>�m�=V~\=���ip>�m{�$;"%<*Ň=���<�/,>e�l�~���Tq=�D �B ��%�=d������;Y�[��P�n:�$����=�����>�=�K�=�����V��B/<᯵=o�S;�.T=b���" <��>����!�b=�$4��׊=�ۂ���]���$��Ƴ<�� ����Gr<s�K�+2K��Ҁ>��>}� �4���%{�>���=PC�<�!&�wr�= +��{ >���=Wc �z:Ž���=]U>n����(t�~�b���v�C�5<�:! >��۠�b6K�F���>�;=:�<C�K�¿2<<gw���
�<� �<�R� �O�񥘽���ru�<��������H�C�RZ�<_Vs�� =KV�� v�v_%���=��/=>G��x����һ�̖;��<���=���6��<woi��:9=�_��� ý���=���=���=�|<1�⻫2=��7��0��R�<DU�����%�=�K�;��<Í��Ƌ�<tZ�<���<N���w�=�>Wн8D|��:=�U�=U+�< �=�s3<�����Y���B�=
V1= iX�v��I�<zN�p_�<��W=c�=���,;�c�<�F��"�����κ����zh1�J�"=C�M;��̼晘<Xa�=����aÚ������&�<:Eo<e��<(�J���<>4������!������YVC=�Z��A�2=n�~; ��;��=s>=ͨT��F �諒q1b���M=���{�2������΁��?����;�З��pD=ڗ'=���=$*8<H��<���=�K=s0=ˢ=^8@;��Z��d+=Pj;�t2�< ���r��:Y3���}�=�*)=���=��W��$��]�<8��<DU<���d)g=�NI�q�=�v�<�k=s�m�l�C�����V��N=�j����=�]>��1=�ڦ;/e�=H=n�CD
��`>��Z��xٽ��=ڡS=I��<���W�<�:��s��;C��l΂<�쪽�[<�c=�iQ���:=,;�g��SQ��6���"Fo� ϼ��=˃��(ĽLs=|g��Rʽ�j� e߽�i��c��8ջ��Fp�-��<m�e=��$>d} ����=�Ow��%��<LQ%>^��<�������<WN^>�4w�鷠�ҝ���?!-��z��y=}����]�<a���e�<¸�=�#���<=6�;յ��2����*>�׋< A̻A,N<�9��!�\>��=HXt� �b���>Π�=���O7�<A�$;3h=Ж<�a�<�]���.��{���Ԇ�v�>��X=��>�?��2�7>�X�=Y��>��s>�v�>�{>�����o=S�<Q������$��ɘ�P�D>ve8�44S=jjϽ�(��&�Ž�p��I�>v̎>�EJ�x��<t=�Klb���y�8`�>��g�� �=+o��㶛>�_~>���=4�=�z���?Us>8 h>�d�>0�����<��žP4��!�"�KAH=�Y�=� �=��=T�}��x�=ڴѽ:ؕ��7��
�s[޼���='l�=a�*�����;����~�i��=�*ȼ�&�=�!�����Zؽ��/�`#q>��=�Ç��t]=�K��a��=����ݽG�C"�v����i��@>��9�=m���#z>6|���#>P�n>��=�K=��=-w>��P��LH����H�ӽ�;���9�=�R��&�;=����)ʆ>� �>l�*�����k�����>jZ=�]�>Ol�=��I��3@��٢��N�s��=V%S�v4q=�׌���=M�I>/�/�꛵><_2�Ƃ��� )�va=4�>-�=��E�N���o4��b(���)M>��>+bh>ڴ=��>p���뉾� #>XH�=A�¾��� L>�ޓ�щڽTϾ��!�<�{"?�m�>�₾�+?K4�<�00>��8��^�>�*�>�Lӽ�ɻ>z��#�V=K�f>�5��9�=�=�B�$��=��'�=�:=/=@>���j~>l�M�C핻86M=���}��� �>*�3��>��Ӽ�Fa>�� ��IR>E�M�p��<fi#>��i>� �3 1��~Y>&h>�^�<�]���=�UQ�*@>^d��( = �˼�?���Y�=>:>6Ѫ���l<,�5� �7>����$e�=a >���<,�:>���x̵�|�u�� >$�T�9>0=p�$=�Pl>8� >\�:���l=0,=h;-� ��=��7�Tz >��m>�7v>F>{>�!�Z���G�4��`�_�Ӭ�H��=��ɽ���<������q>ֈ=��>�#�1f��t�>CI�=��E=�?�>}�$<e����>�>��>�$���=��&c���}۽C+��v��=��>��t��y >�h�>R�����b�za��꩹9 `)>��u=�T�>L��=�� �E�?��.�>��>F3>b^��@J$�Og���=ŀe>4�->I�پ�d^>Zf��"��"��F�=Lt#><�.=*,>������ʽ~>ý��S�N����[>�[x����>iFQ��V�� Ҿ2e�=)�=��ξ�.W=��ҽ�c =��=��8p�>�O�=TJ���+����c��,:��}!=ch�>\~�1v[='T��� B=���<��7>��<>B��=�ݏ���>*w�>�r��qC�>(E����> /[=����^�=�Y��)s�=�q��n�<��ҽj)��ABʾ�П���=�K�=��J>U뜾�5*>
q�= =�$�JK�>�
��������F=���=Q�~>�D�^it���;>��>��8�1 ����[�|� ���V�`iu=�������_�ʾ<Y�>�@��b�=8�Ͼ� ?R�=���<����ß�y��> �!>�Y�=��|����:->Ia˾މ<z�=�j�>6-�]�`��>�ta>3ȅ��mR<���>`'�>���>$W>05��0)��4�+b�= c>��.��
� �s�t����>aa?�;c��9}�m �>v��>��B�R�v>�,,�i�>�� ���B�t+y�
�-��[)?C=�>�?X�c��>��6�:���7G���K�=�7�=�H>�X�>m������>���qH�>�hѽ���=p���o���V�� �>�t0>E̙=������X=s﻾)Ͻ� ��>�=م�>��<��%=�K>L)����_�ѹ1�'��'7�>�!�;&��=0��=`ʭ=Ys�>�* >���>n瘾A�v�뢖> �\� �W�R�M>r��>Y�=Ӓ��1�����>T���8��n�>��>B3>�q�=�4�>�9>�A׾��M��
��l7>`>�4�>���=�
;
[
dense_2/kernel/readIdentitydense_2/kernel*
T0*!
_class
loc:@dense_2/kernel
n
dense_3/MatMulMatMulnormalized_statedense_2/kernel/read*
transpose_b(*
T0*
transpose_a(
-
dense_3/TanhTanhdense_3/MatMul*
T0
ǀ
dense_3/kernelConst*��
value��B��@@"��e6㾰���[�C>��;�:�>>�D���z���x=Z�>~>~5Ὓ{� ��>M,c�[=�ɍ�>�?>�T>�E⽚ْ��x�c= �,>W�>�#>�%B� 9 >��̾ ���I?<�>�������[Ȋ��������<����l>=I��{��7��>o�EV��?z�`A���W�����=i�a�:B>�\S�{ ��ģ1�I^�>I58��_}>���BA۾hIB�+���.�=��F�6�=��<�g@���:��%��1�>���'�C���=st���s��k&��λz�d��<ⱕ>�2 �g�E�3>�,��o�>��'��a>�ڎ�Iؽ"�> �b���R>���=��=����w�>�WY>�D��^��sE��?>s�U�B���:=������ >O.���i��rI��AP>���=��Ƽr"O����)�����B=+���W��=�>:cQ��Ej>���
�=DU���2>(@>���>Ⱦ�$HP�d}�&��=��x�
=h7�>~6/��I;>��==J� c�f�>'ֽ�����vC=�i��JM>(�<�U�(>q�>�?M��R��א߽_0���ʲ�J�=���G>^=wR�>V*(>+f>ؠ��>��z>ߴ =,z=�ه��j>pz�>���=Ls,>���=��(>��+>��>�MS=� �=Нӽ��=:@6>�`� |�=F��"�9�������<l���I#��?����Ž�E=&�<���=�ýX�u>˿�����7�%����nPؽ/8>��<�Q>�$>'HI>i��]��>iR��n1>J�۽_F�:��罐74=G�E=� �;U-���|�7t�=^H��x�)<��s>���=LK����=���p�<��=��S=
<�=f��>����Q���.X@>���<�>6��=݋�>C�{=i1�����<@�=¤��IK�=d�T�>����E�>c}p��Z����/��7�>n�+=n�1=>�>c���W%>��d>GE��`�=�"R>:�ҽU���_ף=��٫�=�@���~?Vc����>�� >��j>6������5:��@����>��> ]�=�$��ڿ2>�^~=O��>�ɹ>pkr>�r>��>� ��'��q��އ!>���=ܭ>t��>�ц=��?�7�=@��� ��z7���a�BZ >S?>���=��'>ח=�fԾNŠ=��>��>�C�R�R>�:�����b���AN>�;�=�(���$�>��=iv->fJ�>`��A��=W��>��&3�>O�q>�l(�m�?�#�>�����/��>�����>?�b>��>�U�=b�|<`��H� > U�>�p�>-N>g�M�������t����=���<%?=�X#=4\��ew>��@<�i ��8>K�V<��<��o�L[��|�������y�Y��ӽ���ؼײ'��.�=�R��Ζ�9���l�=Tؾk�I>�t4��� >*C��K4���j�3�>�>�W0>,,>RibX}���!���I�n܈����>�N'�|h���6L=�Z<Zc��P=H���p��>޲�=���=I >��=�5����=~�=/�,����=�\y>cl]=×���?�j@���앹=JN>�BX��D����
� �=mg�N|)>ʌD>���ܓ׽�����lJ��>�'>��U>_����� ��
�=L�=Ӛ/�nc����;]w<����}�)>^�=�K_>�e9��ͼ�>;K�>��=� m>�%�=#��=��=s~�~��=�ޕ���">�:>�#�<�1[=��<D5 �+�>ʈX>�����ݼZ�k=�e��BɾX�i�AN=�D�>Bul>� ?��b �x����<�8>6�u��;��UJ >������=%�>f�=Kɰ=���=�F����=t��>ҫ=i3N�`�A>[l���->��=>?B<>��>u3_��z!>�c�<[J4>;炾�֥�~��>fC�<r}սr+ァn���H�=8
��o0��N��gAw�m����U> @���ڔ�?�J=r�y>�9��R7>�x >���3�.=]Oٽ&y>�>=|�>?�=�
���ۙ=��=��˽�y��k�>�B��O���㼯?�= �<���=4[�=�"@�s[=���;�߬�����=�!ྯZ>��=�� �99�=Uc��T�=/"r>�e�=,��>9�]>���=�? �k���,��d�Y��<ɋҽ +>_!g=���%���I�>�����>)����>�����`�=���={z��}��=�A>��k><{E>�sƼ����Nj=�o�3 q�a�M>X��5>���61���8>�L�n��
�->Jᄑ-�{�7T���>O0�۩_���i�K`>=k�$�e`�`�A��$ >�
=��*�M�:\���)>�SվpmG>UG����<2!R;SwE�'))>�!K��M��o8�,�>�г��������ͣ=T�3>������S�g�ƽX�����=9b���½��<�g� =�H����<x¯��j=[�%�S?����d>�ٮ��/p>_���ϕٽN�<���<�uv>>��dL>�
>ޣ�����jڣ������=�0�>�q�rT���W��:�>�>�p�+�>c6�Fg�<xj�����=9�>Zq+�uq�{����D�<� �������<\�s>���>��j=T��<�$��ݶ�x���Ѡ��\�M��=�=��#�>z�R=ǹ@���z>�o>��⽨��(]�����W�Ǿ�K��l�ҽ��>ű���3[>��������5=p>{����>��0=�$/�R���N(������=�1=���=������>�k#=ږ�n�9>ZA�=��M�Y[(���#>�X�>�c�+/>���>�J >BuB>�F��:h= �I� ����ڂ>v�>����5þ�ⰽʲ���8D�vc>�`�=o��=4dܽ$�W�
D�<�:t>��g�_=0��17��}C>�M�=���=�Rz<�ܚ>L�=��t>:`=��h=a#�=6`�BV�=�Um>� *�������u7�3M>��4�a��>4T���M�U��Z�C>n�K���~>��E��S��(I���l=4������n�=�ߑ��s����="o>Y�s�m���l��>a����3�='Q>�3��@z������Ž�f��<�nB��q��@�|�/Y�>��.�[uҾ(nh=c�=3��y4b>{Ԅ>��=ۼm�U����W�Q�"��N>J��=;ֈ>����ԅȽ `(�\Zo>p��=i�=!o4>w��=�>\>��*>��?>����M�� T>oY�;�̐=���m��uD���=lN�=t����u��CL>|bݽ��4=QQ+=C/��d�͋F�v��>+ǡ>��>��n��$=�<���c%=�D��|�>��<>6>��>=�I=e����+/=jr>��$��tk>�Jm���!>��c>����X����L޽X�D�,��<]Iܼ�׊>{�p>�a= ">��뽿(>����粽ue_>|Y�>�Vؽ#�':=L�!��m��� #�����P>޵>����BOT�J����9�C��=�\e<��S>��<���L�����߽�9;=gZ�;! =F>�4�F�����>�`4>z �=�X5��Z��������>��V��^��A�5�c(��1:����<�3�`>�4->��>�����c��
�S�%�"�;��>������コ�j�G���p�V��o��Ș=}�/�C� ��I>�ˈ=�c�>�%�����5�󎰽j�:��/2��7q�]j�f�C=�μȁS�����=�>�OE=Cʮ�"+�=h��;����uH���ʺ�DI>�h~>8��=V���3�=I>�r��V>>��X={y�=��>�4�=4��#B�<:����<���=��*�P�A>$ה=*��=�av�u� =���=�9g=U��7Ki~>�̼�߽�)�4�Qɽ}�=K���杕=~�8�.|�aAj�\w��Z���νNe|>q���&>� �=�r�<�W����Z>V@W>~'����B��� ��\_������md=�%�����F��=a ��v���G�=�`�<W2�>��j�� �>����<N���:3>k��7*<�=�>5H��r�=�{�=񲸾�s�;���=�����3>s�?IU��uP���c��t���05k=�.�#Z��ɨ�.����վ2�p>*�����=��>�|8=�%�IL?��}>�[�>�n>n��"ؓ;�,ֽ8��>jt%>]�9>&ȽW$^�^��>$0�\2�=��>/)�eH?��q��E>�3�>9�Z���O�4~:>v�=����VO�> 8���2>�"���>\J����<<*_������P>�Y��o��<~�:��\ҹ_l-�����y�=�����67>�L�h����))�cb�=� ��5&�A��n�tھ"8'>➨� ��= T=�|7���zI\�{�}>ʸƽ]X��V��=<�$��d�so�=cb{�
GS>�/߽ ꙾a��=
?�<�޽��>�H�yc���û�xa��T�����ּ��u��Q:�>V�>cā�Ç�>.��>����O�'���=,��=�h��t��>j�>�3y�ҝ���.�S�W>��,<�>b�����>9O�>F(@�v��>=z�<��ٽ�s�=}�<|/�> �-�Lƽ�Z�>�����>(��ڂ�=��Ǿ��q� ��=�Iн"ZH>�����R�=K0W�����A����>2V>˫�/y�=�$`��*9>����IZ��� �;����.^>�KY���:>�9�>m�>�0�0v>.tk>~$D=�m>�+�>�m��a.)����=:���K/=x>>��4>��=B�m>q�*>��Y��<i>��=�T�=O�8>6��"?>D0� o�������e�=��0=�ܼ���n@�����5E���"\>��=%�F>�=v�(=l�z���4����qx�=H�A>2��=nؾ�0��$f�>7QM=ȡ2>�@O>f���P4��0��D�>J�>(�%=�V���K���X`>"Sg=xc�>�.>�z��2<}&s=��O>k~=��T�>��Q�=�OP��g�<���>S��v C>LS��[l�=�=pJ[>��Y�`�ҽ���>�:I�Zo�=��e8�'�;Gt=B˲< �ʽ�k>M-6=~$�<y�>`��=G��y�&>J����;� >�� =�����w���4���.�ne�=(S=���=U.�=�~�=�^N=))D>5��>�W���=�V�=<�Q>EE>�l��!���XO>�B�=μ���(�����=Ùj�/�O=�)�AM������?:L�����+>�&!���=z�!�~_��v�C=����y-�=��]>���=Ec����>����a��>����J��=�E�>�����P>�p�>睽�푽Jc>щɽ,�_=�c�=Uy�=:��d��F>%��=P6�>(�0�[���\"B�L����c���U�=�;�>K���"�5=����Q�� �=݉=��Q>���� F> �x��h.>$��=�e"��S>ԏ{����=�m�=^5(��9����>�b��Q�< ���9�=���>k��D��/u�pi��>P�sT���~C>@J8<3=ٝ>��G��39>p �>�Y>�����>�
��ly��k�$��<��˾�o<"*�� ���a�<�1��I����П��Y�>>?!N�k�>�Ԍ����=�v�>�YV=�>j)��N�P�dz��|B�rc3�7�m��<��V���<����ou���4���q|�?��gǘ���F;�:�=N+˽B����͆=^L���ɾ!о6�޾�[�>I��@b9��Pv�-�>��o��~�>��>У�=O�L>Vʳ�������Ծ��?�>g=)�������Լ��=�~��^/>1��W
e<ήo=���ㆪ�L�ؼ}��� H#=��|>jQu�F9jl>h$�����v�i>+��>R��֒T��:��AH�-[�<�E�<n�I>�����u���>���=��>^�>�5��r)�=b=R����S-���f<>���=��z�����t���<!>0�&��W_>�\� 2�>:�>yO¾����!��>WY��" ׽� ��n�<�z4=za��p#>?�I<+x>�w�+�=G>�G ��|���2>����ذ�=Z����ҋ�W��<�K��ft9>�>��*>%PV�nv�>�5>t�>
���Xc=G�v�Y�A�t��g��=+b�=�>��T>���h�>�;�=���=��">A(h�Su9��RL>����Ƕ����(>��
�mx>���= XȽK>̥�=V
>K9½�%��Kؼ37���9l�@��<�:���r��l�=�x��3��;��d=H\���Y>%~S��\�ԑٽ�D*��4>���>�7��陾ӣ�>~���u���Jt>&T�l��I`�=ݦ>
C�=�\J��� �5�<F!~>Qr�>�#�>���������3dH>��;>Ź껑�={M��:�*��ﲽf�S���h�xћ��W>���=�s*��򟾅tξ
��趙��=��dl� I�}">������c7~<�-���2�a��� K�K� >?����:��
<B�>��,�z=qw>08>�ֽ���Xk̺��p�>/�Y>���z+�s�g>.E��N�=�Q=5%��UZ>�}�� ٽ9<�&�>&�X��T
=ܩ��r����_b�������*>��`=�r3>�l�=M��=M+���A8�]��<sgs�t�>pm�>�d����=ْ�>�>�>SK>G��<
��R��7�>�P�����<�6�>��>>�Qüh��>CF��/Q=]ߕ=Q�����W>췾{�F>_K�=3����`u�ggX>|�<���=�i=fզ�L�>óV>�Vn>=�����>$8��;1޾N��>ޏ>>A�4��o�=�����`>$-��4�=T��>?�6�R��J҇���˼U�1=�:��3gɾ�Ie�USp��&�= ���;y�RÐ��}�K+����|c>�s��Wa>�
���I6��+�����>��x>a��<�]>6��=��-<uMッ�?=�`�����Y��;l X>'� >�vA���H=���>������>���j=�|�>V��>�NN=S̚>�t(=xK�=W��>`�>��ὔ�v>�b��*vc=Yj(=���=�_>^;�M�>ܒC�k�'=�V�<�}�>��۽�a<>��>�s锾�+��N���¯>_BýVl�l��O�5�x/�i!�>�☼U0>��A�FS���`z�y�?>�]>��=gʼ%m̽���ʫ�n��>~��>m��ɱ:=�x���Q<��b��ع���>���#�i�^t>�ঁ��:�=X0���a��M T=��B> R��d�x>� �=�]���nj>� ޽��7>%����џ>�V2���ֽC�>�W/�I�=�KO>�~>K�<�8�>�n>7�=!ג=�&�<%��>�Q*>��=��ѽb��=�>�ⲽ]̟=8��=�z>� o���O�^ej���>�45>
�e�Z�>�6�>7o����O>UC�/�L>��=h?����=�<�>��j��}S����ٴh>)>�q�=O�!�Q�v=k�>ZO,>_�W>ST�=�Ep>ɲ>P��<���U�>���� L�=G8�>d�6�D�4>�)E�����Z�|Dm���>Z���7���%]�>�)>��Ͼ�u����%��>��H!���f<u�$>��!?zs;�ݬ�/�ֽ=��>��=T?����1�>�C9>2t��1�<���>���>�g����I><��=��>=u�u\+>,sT�6�ؽ����G3�I�k=w2�>
�0=���<{a>�w>�s�����C&����>�ٛ����=DI��0��>���>�׼� `���m�����=������D>����پ�>�=�@J>�\Ǿd�ؽ��,>�)�=`
/>�*"�7�,>4w0������ν�;q>0�d�ɼ�>�J9���e=9_<>�����3>���=�I(�` ��&��=����#�V���k����>j�콩3l��=���=�Y��H�>D��=b��=$��;�nɾ�hr>���=��=#�->7�P>GHM��娼��1� =<���=�r�=�c\���i�K7��:��Ac�X|潥�7���>>���bD`��:p=Aa����<��Q�=�$���>�q�� �ڹ->��-�G@�;�뽡�>()��2L��)Gg>DG^��ǰ�����T�����������!�Cha>��1>�OG>e�H=�z�
����V[�{*���*</4�����>v.��
�<��˾=O��>_�^�r�=�z轖�W�ҟ���!�=�u>��=\b�>������a�μ�����������������lf��%����=<D�=�@�P�=�2�� �h>�~Z�@ �> }G>D�%=��>�>�<�B��������>>��ȼ�^6>�̐=$|�=�����-����:l3�=w x�x����<�<�\V<��=���>�C4><R���pz�o�u>u%D�s����h���|[.=})'��%A>��&��k>�+�>�ʽ-�Ƚ��q>?�)>�;���|�=3���:���n�>�S�=��+=�58>l�/�p��>a�ѽB�=�3>�6)��Z���˝�\0�={@f�b}���о����c�y=]�<�����@>��=�*�=�����^��Q>F��Y>�%_���2=�]>
����>B?6��o罤q�>�����)�:t���y6>�ھu�_�$�ļ�Ծ�9�<�Za>��8>��ٽ+J��h4>緝�ڀ�1;�<��=��G>�o>F�Z�pd�9�>�a���S�u��=_�2�t�.�?B�>@��=*~�= �2>�]M�@����'A=��?>S97=3��>���y��� I>#j���Uټ�Vͽ5а=�v�>�;/=z�w���S>9�8�?�q�V>\��;��;Du��^Ú�qc�>*����U��$e>������>���>dD��^�V>�m�>s��8�A�g^�=�Y�)�M>C9>�� �?���.k˾b�]>�#�=`���Z��'۽X@Ծ W��.�]=,t�`��>�-���x�U����F��]>)��>�Ot=��J>c��<_$Q����=��>��,��sj��J�=�>�L�<�Ճ>qSI>�=�=>[�>r:���p�>\��>��|<r���a>} R=�]a��I�>}�3>���=��ҽ ����]e����A=�'��#����>ʨ�<=GD�(�D<k
5����=)� >E�����W�ȽqͽV P>-�6>���Se>����A��t�¼�A =�/��5���� >��/���%�������<{*����=D�=*��;��h=��@�'B�<�`���>v��=Щ&>�#��ܼ7⡾R4.�cYd��U��
���[>�iQ>� �����< ��l�C��+<6?��>P�ֽec�lِ>�I_�=��{�wT>�NJ>���9!�8��>�����>U��=�%>��ʿ;��>�,>
0�)��=�н:U���r =�j=gL\���ľb�+>0��=���<iB~>¹>����uս��ɽ�(>�XU�,̃>�� ���>�E\>�>�݂�C�:�M����rý���G4�=�0S=g���#w�=��>�����R-<���>��>`A>r�<�O����i��P��l-���|�$^�� 60>�i�>4p���R>"�B��� >/�>��=�2?>E>+>��I�r~x=��>���<���=�c�s.�<�,/�ʊ�>p�|��/>sL��"�=_m> ���a���[>�����w�qg;>u��U� �?��= "h��!g>=I����<�G>l��x鎽���>�!����ż������=��j>\7�=�N���9�>���>�Q==���>���=�ܕ�_D�=�U2�j���8�����C�����=��=����pWw>�F�""q>owѽ�B��V�U��l��4����9>X��;κ��̌��t�=q>}M�=��ah+>΅=�k4�|��=����ш!>,B>"���0�=/,ȾsV���ѽFSB����<*��*����¾&싾ޓ>6����W�=���=W�o=aO1>觝�����oǽ��<���k��=v,�>���� �f#�O��=�:�=�Lr<Rg��a���>v"�=oL��G9X=����~��=� I>zP�>c�>�#���>�����>2��D� >���SJ<��+>W�޼�#Q��B�=��o>6)�����޿=g9@;` l<�"�=(D>I������H��X*==o�5<$�=��>j�~���<�$����>�=�g1�<G�P=1LU=^��^��=F�=�G7=���=!��=�x1=l�8=VC>���>\���jq7�~���8h>�3>�ɪ�2s �j?S>v�s>�r>�D�9�T>> ��>�B9�luq="؛<"|=��B�C��D-�{h]>��W����=��=����b�L>Ҡ�<����o>�F��#ٽ6�=y�.>�oܽj�|>�=��-�}Ή>�����՝;&ư��v�#��� ��J�>�<=lR>hY=������,�,��0b>ޯP�xK>��r>���[7�E]>�\ >)O%�g��>��潵�0<���>��>�K��Aл�ӽ=>N�=tO�=T�k�FB�=��h>���>��>$�<�u�=`�o=� �dj����X>s�����=�^>uZ=>��l>�TI�;�<mU齔~t=���fvQ��Y��g'�(>6�<�{ �_�F>��>ף�<��>����Q>��>�_ɼ�g~=�W>��B����=Z�P>8�/>��H����=V)�҇+�l2�<�l�>�E�����^S<��B���\��X�>�M>��=pT}��]F���2�~��>���<���<�\��Ӄ>ȵ ;Ԣнf�3���U>��߽{>=�>+3>���;��RP�=�%�� W$�Ⱦ��Dy�=K���x>�_<E���G>��v�kz`��C�<R�>j[Ǿl<�=�bu>Cr�x{�>"��=n5��4��<Z<>�ʑ>_�>«H<�YY=��\�+��ӂ�>����Z�Լ=>�Ew>U@
>��#�G]>S�ν>�V(������.>z�z>�`��~��y��Իt��)ǽSc�=���?M�=B>���=�<�=(g�P����%��n�����=���=Ի�>m���%H=/ �=�XM<�!=R�&��"�>y��#=���P�,1����='P��X7���@�>Q�,=��h>�|���=w~��SR=RMs=�TN>Mm>�.=<dV�<ct����q>XqI>8�����Wr=�+�N0>P�Y>�m�=����0�f�����<־��훌>��<���E
�> �2=<�<�� >~4���II>�j> #<�$:>ݞ#���d�¡�>b�+�kA��wk^=��=s�=W>���>uM\�� >��=%E�=�C%>�.�>�%�=��f�
c>�Q�>}>���=5�)�d8&�����!�>ʈ >��@>l ,=ې^���н�<>^s��$=ŭ=��<-Z�>jxV��‘���,���[>��� @�=('=�Ӱ=^텾�P��;b�þ�>�����q���]�`��>��޾�t�=y��=>���߆��6�g,>��>�9�>��n�f�˾�LK�`�� �>Y1����� �<���=�g�U��>�s=��%>�����8 ���!>yӓ>��~�3s>����I@��8]>I��:�ݴ>��+��=�(�s�����=c��=?ɽ�݋=$�#����=;<�/��� � =:>��T�?�����K���N�9��=�=��<�
F>Pv<�ʽ��fg�8��=�G��w��ª���=(~�=2J���g=���iub>���=`�+=����|e=�����?�=�Oe��� >��!��>F7�=�b<"i�<���=�'C<5:5=���"���ͽ�y>��D���B=��,�J5J=`D����r���>�g=�}{>6�T����q������n~���>xi��D)��/�!?k�Z�k?R;"�2�R�>F�2���<��'O>΂�>�������=7��>�ھrNd���u�J�|�&���gDоӝ�<�d�z��=��#�?�o�&
v>���=K���=)j��ŀ>#D�=�U�>��e>�?���s>��d�����u��{��+>lf�>��=�� �qq�>p��> =�$>>�u�>�����i>��=m}�>��ڽ89��V!�>K�A��4>�}���!>�V5>�H=�k>��>�����V >�����Xٽ/3>5R�=�L/���h��K�>�se� 78>f��<� �m��Ek ��Ņ>�ƭ=�n>��k��d�;=�(��:����;�����J�M>��<����zR�9dD>J��><C=D�U>d���N~��P�<��׻��Լ���<�r�Jk�>dp߼�l=�u�>?�=�oW�D�;6����0k=Z`���h<IJ�<BL>,��4>��<l�V�'��>s���KQb>��"�������{>�2�=C7ż9��<$!���*�����=b9�6g>��t>�>�����o>/ ����!;��M�����<sÜ��M.��<���튳=�(�>�=}i �9�M�U/]>�K���l�V+>V@���k=N��Jr&��w�ߩʻGJ>[���6��=p^M�E���=�DA�ǁ>���Z�16)>��D�.��< +��J����1Ҕ>��>�����a={>�n����=0�I�[!D>?B6=�P>&J�=�ҋ�I<ԅZ�&��=���=Z�l��8 �u��>�6��z@'��=RDt=������=p��Gn�,L��2�O��`�ӑ��뉚= �s���m�l��^D>
��=����\A����=���=Zxо���= y�=\��>J���$��>1p��Y"�=5j<��2�H��Z���j0 ��Q�>T��>zȝ�]x
>rҗ��RK<[���/�>Ґ�>�Z#>@����T�z��>3UG��]�=�ܟ>���>=g>>��½�]��6��<����C=�<ZU��Go���G>�đ�*?��Z�v�����v}��:�>����ė���E=V>��>������>��K�+��u�@>�
I���?¾ɇ�=��V>�9a�?�����=��X���� �����=�L�>��z;>���=97�<3 h>z�ͽ�>?j��Q����=���>s�m��}�=�R�5Z�>��Ҽ)�;>1��>2Z=!�s�t>UGI>J?������ћ<��&����<|�=5P�����2� � >B �=�0�6�x=x� ��6����D>� �=��F=s� ��Õ=�v�= �=0�ʼg��= U�=��h<��.���=�+-��E�[}�=�<-�Z��觽.�:>�����+㼔e����"�f�X���*��=�/_�/���`s弨��<dm�;I 3��E�<�p&�f�ѾaC�=�Kf>�}H=�'>F��=$��� ����=���=W��� E���/w>P�������JڽG�bs꽇P�=�M���!���2>��N������l<��ڽ(�z:~5����>�(y�� >��<j�=�ug=��=cC>���>���=5p�>�@�=�������;>����V>OY%�
<�ҋV�*@/=�%�<�K�?���!� ��Bb�P}�>�>f�>=e�*���"<$�<c- ��$�>�<��ޑ)��Ey>&k���^i�b��[�Q>�J�<�y��p_�2�_>�^>�y�=����Tv/���ξm�6��ýC_H�o���-�> �g>M͞��*�=F�X<v?�=���v~�=ۧ@��i_�x�)>�Q���@>�0�>p�����>�g>��<>��5���ľ���`x9�9v��*���T�>R̋�����>h�(=�v<� ������Έ���Ea>��4��M�[��=�ڄ��`�HWK>M�>ν�=�E�=r�n�RlR���=���l�>Ï�>ݎ�=��R���缏ay��e
>��=yGL�;��=�3"��9����X>�Ԓ>(��>"�>×.���W>l�;���n>�>]�н.�����(>~�;�A�� *�=�/�����>z`-� ǽ�Mм���<'���0�=]�}>�>�����X���&=]f�=w �>��+>{2����w�����#����!��mx>���=���=S�#��h�>���=�}Y>Û�>�aҽzWV=e�ݽT`>`?ǽڭ�睌=�m.�9��=�󻼥F�=�U�>\�>=R�>ݻ���K>lY+>x��Ev�=*��=�%$�\�Ǿ��E>��>-e]>�N����>�!T��RO>|�>�T@��ج=$�.>�="�<H>%>lܼ�C���pR�<WA>G���uL;��,Ͼt�D�W�
>??�=Fy��1�Zw��Z@:��@*�3�`�c��>��>��u�����*�F>k�=X�7>���dq�=�����=���=j\ý`�=����Yu'��q�=���=�=��i� �>^�=��>�v��Ry����,���:teJ��q��'��=(��=�h>�OպLJ�>Ww�����Z�=U���ܽ<u�U=0�=ˠ�=2��<0�=����������E>����<.2>*4�>q.[��N�����S���Z<�<y���H:�O_>.h���>�<<>���=A�!����+���m-��ʆ���1>ĉ��4�a>k:�>:=�}�<d}r>�TQ>)�=nw4=����W���]����Ǽ�qb���>;��=P�n���ֽӧ����>�%[>妱=u%�m�^�:�a>�
>r�i� �W�T���C����=�K`��T�=�X>�r�<�ʾ�Y=�Q�=�(6>v��<��M�kp �aE���?_��=� <(JZ=&�佊Ea>��>�끽 8= i>�e�7=� >%q> !>Uo`>��>��m��>������=�}����=�5ヒЏ��뼉A6>���>�1�k��}����V:.}o�zC�=�o%���x����=b��<0Ij����< �=�5 �r�仙�Vl>F��>��������_��(� ��|����=&7�;�EͽC���w>AD?�:�a����<\��>w���(h�>/=�q��-�!��Q<�w�><��<Kp����w��y��R�8���U,ɽ�ཾ��>�ܵ=�i߽ �ν��}<DP>���=�@>K�7�è�>�{��[>��н�>�⫽���=Q�z��7����=��m�YЗ�� ��ɧ;����酽�"A>�}��m�(��䟮���{>P����#M>���<ac+����=tZ��~�Q�=e�D��U��������?����>��ٽ� �>��w���R��z]�09��Q��:�&|����[�2���=S����=2ﰽ���̫�>��>���.3�=L�N����-���=����-�����Ôܾy <� �n����ld�+�>�PB>����(�������>*���P�g>ׄ�>-b>F@��)YJ�(�>�Ae�HBu�f��S��=�Y���j���[ ��R���9۟<�D�>�C(>wƾ�����-P�=J�Ǽ���>��<>/L>��&>@ ��ұ�Ç
>ٯ>�4D�� ���sz�԰?>zX���Qg�����k��T�^>Xj ��� >����G�=dս�j
��}a������FA�2���7�������;�>�E>�sy�A�ѽ$5�>: �SS=��h>,^����>P $��������>r��q���|$>w֩;�0�>#��>�Ӿȸӽ�F�>��e�D�f��[����k=��/=�է�X��Q{��3x>�T��)�'>������=�u=�[U���,=�V����K����<��M>l��>7���7�$�9���C�>�M=I������I���ٺ��3��k�>9۳>|!^>]$���l>�_�='�>�n<a��=V(
��������8>�> �?��F=[E���>R������=�R��D%��b^">&���iD�=<7��>{>N��=����
󒾰o���K�> &��ǩ= �V����=�y�=宀�L����\T<�M=��d�b�B�pvt�� �>�"�ZC =���@�c�59<x#c���~���̾[�\=k�2=���E`>T���8t>' f�5����>���>c��>#B�=�j�=.Y� ����> $>g��=�V�=�,>����3t��#=����>��k=D���B�4>�b�=��������1��=r� ��_����彵2X>�Ԩ=p�0>�z=M�$=ְL������>Ĥ�>YӀ�s�0��䅽�����\���Ё��7�=%Qľ.���8\���>��t�ó�<��h>�4S����=ݗw���s=G:f=�>*
dtype0
[
dense_3/kernel/readIdentitydense_3/kernel*
T0*!
_class
loc:@dense_3/kernel
j
dense_4/MatMulMatMul dense_3/Tanhdense_3/kernel/read*
transpose_a(*
transpose_b(*
T0
-
dense_4/TanhTanhdense_4/MatMul*
T0
�
dense_4/kernelConst*�
value�B�@"���>In7<{�>!hD�/7%>-�=��W>��=�x��
5=!�S�ҥȾl[�<� 8>�J >�2 >� H=�������=��m>=�3���<����4�1�|ճ<m�e>C�=�q>�;8��?Zս�F�=�^�<Dr�� ҽ?!�>B�S�~ּ�梾�� �C��=��.�;��Mﲽ��(>*�|; 潒K�>`��>W꥽��"��AP��'I=��.>l�
?��z��*�=�A��q�V��>!y���(��`�<��>��'=��>j��>�>q>��5�=�6����_7?<�"�=�`�>W�y�e��=I�\>"��>��L��]��;ϓ������F>���=A}�=�&�=.���cuV>�H�=N��
�ѽ�T7>f7��}> f���w<�G>�܍�i'>>aD���[�|��%Q�>�g��y���B�<���[g��O�>p��>9\�>��5��&"��ޒ�s�C> ���s��=��1�3)ټn�g>�⨽j�K=������<L ��v��
��=DI>�6%>��Y�g>�b��xB�>qEX=42�〣�Nֵ>|��<� o>��ʼ�Y��:'��r�y>:y���C@��z���)���=���>fx�;Y��<?b>�l=�&����4�=K�=���=�� ���ƽE>����<I�>0�n��'ƻ#b��::�<���J@=]���ؗ>�N��+�Y��\>T�ݽ�ul���H><�I=!*���F =���>,-�=�M8��Ld���=���M��=��:>� 6=2&�>ٜ4=��=�iM>*􌾗�k=C��=5c'��7�x����2��h��Ϋ�=i�@>���=���2T��~��>KY�wMȽ�2�_&.=�~����Q=հ\�q֛>Ʋl=T3�� �>RC�Ӫټ9N=�F���B>��,�/1�<�5�����=4<��?=��?r��=���
]�L��<�ߜ>cڂ���=���>�=�=>^�=��оh6V>�A�c#d>�_�=OW�>��ؽ6�<y��,��= ]����������=*
dtype0
[
dense_4/kernel/readIdentitydense_4/kernel*
T0*!
_class
loc:@dense_4/kernel
j
dense_5/MatMulMatMul dense_2/Tanhdense_4/kernel/read*
T0*
transpose_a(*
transpose_b(
N
log_sigma_squaredConst*%
valueB"PLh?F�V?zoK?�cb?*
dtype0
d
log_sigma_squared/readIdentitylog_sigma_squared*
T0*$
_class
loc:@log_sigma_squared
+
ExpExplog_sigma_squared/read*
T0
A
epsilon Placeholder*
shape: ���������*
dtype0

Sqrt_1SqrtExp*
T0
$
mulMulSqrt_1epsilon*
T0
*
add_1Adddense_5/MatMulmul*
T0
"
actionIdentityadd_1*
T0
-
StopGradient StopGradientaction*
T0
3
sub_1Sub StopGradientdense_5/MatMul*
T0
2
Pow/yConst*
value B
*@*
dtype0
!
PowPowsub_1Pow/y*
T0
4
mul_1/xConst*
value B
*��*
dtype0
#
mul_1Mulmul_1/xPow*
T0
4
mul_2/xConst*
value B
*@*
dtype0
#
mul_2Mulmul_2/xExp*
T0
+
truediv_2RealDivmul_1mul_2*
T0
Exp_1Exp truediv_2*
T0
4
mul_3/xConst*
dtype0*
value B
*@
#
mul_3Mulmul_3/xExp*
T0
4
mul_4/yConst*
value B
*�I@*
dtype0
%
mul_4Mulmul_3mul_4/y*
T0

Sqrt_2Sqrtmul_4*
T0
8
truediv_3/xConst*
value B
*�?*
dtype0
2
truediv_3RealDiv truediv_3/xSqrt_2*
T0
.
action_probsMulExp_1 truediv_3*
T0
�
dense_5/kernelConst*
dtype0*�
value�B�@"�/M?�-����ھ�aA?�L�$H ?H������>1�>�E��3��������?�����>M9?��
�C����h��H��c����3�>X��>��?b�%�����$1�>�p�>���>���>𲾶& ��"�>ߏ���]&?d�>=Ƚ������j��⌑>\��>�� ?4��Z���� ?��Ͼ���>,j����TS?>���a 1?�4?R���� ��?���>�*$�)H!?�����TD?X)��B} �9+�
[
dense_5/kernel/readIdentitydense_5/kernel*
T0*!
_class
loc:@dense_5/kernel
j
dense_6/MatMulMatMul dense_4/Tanhdense_5/kernel/read*
transpose_a(*
transpose_b(*
T0
3
value_estimateIdentitydense_6/MatMul*
T0

8
unity-environment/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.bytes.meta


fileFormatVersion: 2
guid: 8db6173148a6f4e7fa654ed627c88d7a
timeCreated: 1508690694
licenseType: Pro
TextScriptImporter:
userData:
assetBundleName:
assetBundleVariant:

12
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/FlyCamera.cs.meta


fileFormatVersion: 2
guid: 83d7b886385fb48b999bc3fbc41bc181
timeCreated: 1508627099
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

23
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAcademy.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ReacherAcademy : Academy {
public float goalSize;
public float goalSpeed;
public override void AcademyReset()
{
goalSize = (float)resetParameters["goal_size"];
goalSpeed = (float)resetParameters["goal_speed"];
}
public override void AcademyStep()
{
}
}

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存