浏览代码

Merge branch 'master' into develop-shortenstrikervsgoalie

/develop/shortenstrikervsgoalie
Ervin Teng 4 年前
当前提交
d52443a5
共有 141 个文件被更改,包括 7458 次插入6895 次删除
  1. 18
      .circleci/config.yml
  2. 6
      .pre-commit-config.yaml
  3. 11
      DevProject/Assets/ML-Agents/Scripts/Tests/Performance/SensorPerformanceTests.cs
  4. 2
      Project/Assets/ML-Agents/Editor/DisableBurstFromMenu.cs
  5. 16
      Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs
  6. 9
      Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs
  7. 506
      Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn
  8. 566
      Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn
  9. 9
      Project/Assets/ML-Agents/Examples/Basic/Scripts/BasicController.cs
  10. 19
      Project/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn
  11. 30
      Project/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs
  12. 156
      Project/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn
  13. 55
      Project/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs
  14. 1001
      Project/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn
  15. 1001
      Project/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn
  16. 29
      Project/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs
  17. 2
      Project/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorSettings.cs
  18. 642
      Project/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn
  19. 31
      Project/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs
  20. 1000
      Project/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn
  21. 23
      Project/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs
  22. 992
      Project/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn
  23. 23
      Project/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs
  24. 1000
      Project/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn
  25. 23
      Project/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs
  26. 997
      Project/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn
  27. 12
      Project/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs
  28. 549
      Project/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn
  29. 2
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/AdjustTrainingTimescale.cs
  30. 4
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/DirectionIndicator.cs
  31. 14
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/JointDriveController.cs
  32. 20
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ModelOverrider.cs
  33. 6
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/Monitor.cs
  34. 2
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/OrientationCubeController.cs
  35. 2
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ProjectSettingsOverrides.cs
  36. 2
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/SensorBase.cs
  37. 8
      Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/TargetController.cs
  38. 31
      Project/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs
  39. 1001
      Project/Assets/ML-Agents/Examples/Soccer/TFModels/SoccerTwos.nn
  40. 6
      Project/Assets/ML-Agents/Examples/Startup/Scripts/Startup.cs
  41. 4
      Project/Assets/ML-Agents/Examples/Template/Scripts/TemplateAgent.cs
  42. 14
      Project/Assets/ML-Agents/Examples/Tennis/Scripts/HitWall.cs
  43. 20
      Project/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs
  44. 2
      Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDy.demo.meta
  45. 2
      Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDyVS.demo.meta
  46. 2
      Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerStVS.demo.meta
  47. 2
      Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerSta.demo.meta
  48. 57
      Project/Assets/ML-Agents/Examples/Walker/Scripts/WalkerAgent.cs
  49. 31
      Project/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs
  50. 1001
      Project/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn
  51. 1001
      Project/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn
  52. 46
      Project/Assets/ML-Agents/Examples/Worm/Prefabs/PlatformWormDynamicTarget.prefab
  53. 29
      Project/Assets/ML-Agents/Examples/Worm/Scripts/WormAgent.cs
  54. 1001
      Project/Assets/ML-Agents/Examples/Worm/TFModels/WormDynamic.nn
  55. 1001
      Project/Assets/ML-Agents/Examples/Worm/TFModels/WormStatic.nn
  56. 7
      README.md
  57. 2
      com.unity.ml-agents.extensions/Runtime/Sensors/ArticulationBodyJointExtractor.cs
  58. 2
      com.unity.ml-agents.extensions/Runtime/Sensors/ArticulationBodyPoseExtractor.cs
  59. 2
      com.unity.ml-agents.extensions/Runtime/Sensors/ArticulationBodySensorComponent.cs
  60. 4
      com.unity.ml-agents.extensions/Runtime/Sensors/PhysicsBodySensor.cs
  61. 4
      com.unity.ml-agents.extensions/Runtime/Sensors/PhysicsSensorSettings.cs
  62. 10
      com.unity.ml-agents.extensions/Runtime/Sensors/PoseExtractor.cs
  63. 2
      com.unity.ml-agents.extensions/Runtime/Sensors/RigidBodyJointExtractor.cs
  64. 4
      com.unity.ml-agents.extensions/Runtime/Sensors/RigidBodyPoseExtractor.cs
  65. 4
      com.unity.ml-agents.extensions/Runtime/Sensors/RigidBodySensorComponent.cs
  66. 2
      com.unity.ml-agents.extensions/Tests/Editor/Sensors/ArticulationBodyPoseExtractorTests.cs
  67. 2
      com.unity.ml-agents.extensions/Tests/Editor/Sensors/ArticulationBodySensorTests.cs
  68. 10
      com.unity.ml-agents.extensions/Tests/Editor/Sensors/PoseExtractorTests.cs
  69. 2
      com.unity.ml-agents.extensions/Tests/Editor/Sensors/RigidBodyPoseExtractorTests.cs
  70. 3
      com.unity.ml-agents.extensions/Tests/Runtime/RuntimeExampleTest.cs
  71. 16
      com.unity.ml-agents/CHANGELOG.md
  72. 7
      com.unity.ml-agents/CONTRIBUTING.md
  73. 2
      com.unity.ml-agents/Documentation~/com.unity.ml-agents.md
  74. 2
      com.unity.ml-agents/Editor/DemonstrationImporter.cs
  75. 20
      com.unity.ml-agents/Runtime/Academy.cs
  76. 10
      com.unity.ml-agents/Runtime/Actuators/ActionSegment.cs
  77. 2
      com.unity.ml-agents/Runtime/Actuators/ActionSpec.cs
  78. 17
      com.unity.ml-agents/Runtime/Actuators/ActuatorManager.cs
  79. 6
      com.unity.ml-agents/Runtime/Actuators/IActionReceiver.cs
  80. 2
      com.unity.ml-agents/Runtime/Actuators/VectorActuator.cs
  81. 38
      com.unity.ml-agents/Runtime/Agent.cs
  82. 4
      com.unity.ml-agents/Runtime/Agent.deprecated.cs
  83. 6
      com.unity.ml-agents/Runtime/Communicator/GrpcExtensions.cs
  84. 36
      com.unity.ml-agents/Runtime/Communicator/RpcCommunicator.cs
  85. 2
      com.unity.ml-agents/Runtime/Communicator/UnityRLCapabilities.cs
  86. 2
      com.unity.ml-agents/Runtime/Demonstrations/DemonstrationRecorder.cs
  87. 2
      com.unity.ml-agents/Runtime/DiscreteActionMasker.cs
  88. 20
      com.unity.ml-agents/Runtime/Policies/BehaviorParameters.cs
  89. 2
      com.unity.ml-agents/Runtime/Policies/BrainParameters.cs
  90. 2
      com.unity.ml-agents/Runtime/Policies/HeuristicPolicy.cs
  91. 14
      com.unity.ml-agents/Runtime/Sampler.cs
  92. 4
      com.unity.ml-agents/Runtime/Sensors/CameraSensor.cs
  93. 14
      com.unity.ml-agents/Runtime/Sensors/CameraSensorComponent.cs
  94. 2
      com.unity.ml-agents/Runtime/Sensors/ObservationWriter.cs
  95. 4
      com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensor.cs
  96. 2
      com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensorComponentBase.cs
  97. 2
      com.unity.ml-agents/Runtime/Sensors/Reflection/BoolReflectionSensor.cs
  98. 2
      com.unity.ml-agents/Runtime/Sensors/Reflection/FloatReflectionSensor.cs
  99. 2
      com.unity.ml-agents/Runtime/Sensors/Reflection/IntReflectionSensor.cs
  100. 10
      com.unity.ml-agents/Runtime/Sensors/Reflection/ObservableAttribute.cs

18
.circleci/config.yml


# Need ruby for search-and-replace
sudo apt-get update
sudo apt-get install ruby-full
# install dotnet and the formatter - see https://docs.microsoft.com/en-us/dotnet/core/install/linux-debian#debian-9-
pushd ~
wget -O - https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.asc.gpg
sudo mv microsoft.asc.gpg /etc/apt/trusted.gpg.d/
wget https://packages.microsoft.com/config/debian/9/prod.list
sudo mv prod.list /etc/apt/sources.list.d/microsoft-prod.list
sudo chown root:root /etc/apt/trusted.gpg.d/microsoft.asc.gpg
sudo chown root:root /etc/apt/sources.list.d/microsoft-prod.list
popd
sudo apt-get install -y apt-transport-https && \
sudo apt-get update && \
sudo apt-get install -y dotnet-sdk-3.1 && \
dotnet tool install -g dotnet-format --version 4.1.131201
echo "Setting up venv"
python3 -m venv venv
. venv/bin/activate
pip install --upgrade pip

name: Check Code Style using pre-commit
command: |
. venv/bin/activate
export PATH="$PATH:~/.dotnet/tools"
pre-commit run --show-diff-on-failure --all-files
markdown_link_check:

6
.pre-commit-config.yaml


name: validate release links
language: script
entry: utils/validate_release_links.py
- id: dotnet-format
name: dotnet-format
language: script
entry: utils/run_dotnet_format.py
types: [c#]

11
DevProject/Assets/ML-Agents/Scripts/Tests/Performance/SensorPerformanceTests.cs


using NUnit.Framework;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Policies;
using Unity.MLAgents.Sensors;
using Unity.MLAgents.Sensors.Reflection;

{
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
{
}
}

sensor.AddObservation(new Quaternion(1, 2, 3, 4));
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
{
}
}

[Observable]
public Quaternion QuaternionField = new Quaternion(1, 2, 3, 4);
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
{
}
}

get { return m_QuaternionField; }
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
{
}
}

{
using (Measure.ProfilerMarkers(s_Markers))
{
for(var i=0; i<k_MarkerTestSteps; i++)
for (var i = 0; i < k_MarkerTestSteps; i++)
{
RunAgent<CollectObservationsAgent>(k_NumAgentSteps, 7, ObservableAttributeOptions.Ignore);
}

2
Project/Assets/ML-Agents/Editor/DisableBurstFromMenu.cs


#if UNITY_CLOUD_BUILD
#if UNITY_CLOUD_BUILD
using UnityEditor;
public class DisableBurstFromMenu

16
Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs


using System;
using Unity.MLAgents.Actuators;
using Random = UnityEngine.Random;
public class Ball3DAgent : Agent
{

sensor.AddObservation(m_BallRb.velocity);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f);
var actionZ = 2f * Mathf.Clamp(actionBuffers.ContinuousActions[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(actionBuffers.ContinuousActions[1], -1f, 1f);
if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) ||
(gameObject.transform.rotation.z > -0.25f && actionZ < 0f))

SetResetParameters();
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = -Input.GetAxis("Horizontal");
actionsOut[1] = Input.GetAxis("Vertical");
var continuousActionsOut = actionsOut.ContinuousActions;
continuousActionsOut[0] = -Input.GetAxis("Horizontal");
continuousActionsOut[1] = Input.GetAxis("Vertical");
}
public void SetBall()

9
Project/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs


using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class Ball3DHardAgent : Agent

sensor.AddObservation((ball.transform.position - gameObject.transform.position));
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f);
var continuousActions = actionBuffers.ContinuousActions;
var actionZ = 2f * Mathf.Clamp(continuousActions[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(continuousActions[1], -1f, 1f);
if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) ||
(gameObject.transform.rotation.z > -0.25f && actionZ < 0f))

506
Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn
文件差异内容过多而无法显示
查看文件

566
Project/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn
文件差异内容过多而无法显示
查看文件

9
Project/Assets/ML-Agents/Examples/Basic/Scripts/BasicController.cs


using UnityEngine;
using UnityEngine.SceneManagement;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using UnityEngine.Serialization;
/// <summary>

/// Controls the movement of the GameObject based on the actions received.
/// </summary>
/// <param name="vectorAction"></param>
public void ApplyAction(float[] vectorAction)
public void ApplyAction(ActionSegment<int> vectorAction)
var movement = (int)vectorAction[0];
var movement = vectorAction[0];
var direction = 0;

if (Academy.Instance.IsCommunicatorOn)
{
// Apply the previous step's actions
ApplyAction(m_Agent.GetAction());
ApplyAction(m_Agent.GetStoredActionBuffers().DiscreteActions);
m_Agent?.RequestDecision();
}
else

// Apply the previous step's actions
ApplyAction(m_Agent.GetAction());
ApplyAction(m_Agent.GetStoredActionBuffers().DiscreteActions);
m_TimeSinceDecision = 0f;
m_Agent?.RequestDecision();

19
Project/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn


vector_observation���� action_masks����policy_1/concat_2/concatactionaction_output_shape������?
action_output_shape memory_sizeversion_numbertrainer_patch_versiontrainer_minor_versiontrainer_major_versionis_continuous_controlpolicy_1/add_2/ypolicy_1/Sum/reduction_indicespolicy_1/add/y $policy/main_graph_0/hidden_0/BiasAdd�����?vector_observation#policy/main_graph_0/hidden_0/kernel
�!policy/main_graph_0/hidden_0/bias� policy/main_graph_0/hidden_0/Mul2 �����?$policy/main_graph_0/hidden_0/BiasAddpolicy_1/dense/MatMul�����? policy/main_graph_0/hidden_0/Mulpolicy/dense/kernel�<policy_1/dense/MatMul/patch:0�policy_1/strided_slice������? action_maskspolicy_1/Softmax2�����?policy_1/dense/MatMul policy_1/addd�����?policy_1/Softmaxpolicy_1/add/y policy_1/Mulf�����? policy_1/addpolicy_1/strided_slice policy_1/Sum������? policy_1/Mulpolicy_1/Sum/reduction_indicespolicy_1/truedivg�����? policy_1/Mul policy_1/Sumpolicy_1/add_2d�����?policy_1/truedivpolicy_1/add_2/ypolicy_1/Log_12r�����?policy_1/add_2policy_1/concat_2/concat2�����?policy_1/Log_1action2�����?policy_1/concat_2/concat@@@�A���3�?���3��ƽl >_jX=�P��
��z�>����0>�����K�>s�>��Y>&�j>�M�����=,�����"����>�P�����>�ɹ<����8��c�==�����:)�y�x�>|\-<���xdO���"����{?=���=[�1>�l>>_����->�8���8��ž= �m>M�/>����H� =/��Nh���b_�4��>�]�܌<��>�Ӥ>�E��D>�T=�zO�����A�;Qd���Ͼ�{|��D;>Z�`�/�aV���o�>TJ�=Sa��xİ�5!)>�8�=� 侥Z�='�.�O�<��5n�A��|�=�����#��jfs>1x=��<)���g����K�>>z>���>���=�< Lb���>�ݘ>X=C=^JѾ��վ*�>�L�>�����`��%T�(Qz��'�>��k�����C�|>0�D�S:3<̋�>�D^=K8�Y^�>kX����f=b�=�׍�J9�=������=t:��
q�>�h`� �>�J��F��4.�� ��[@�;��+>}=��m>�В<xxW��ᔾ+�Q���>��=���>���>E�=��j�>�a�<����Ə<���u��� ,��FF��VG��� T�m^���&<�+g=Z ^>��u��>tFy>�f�=/{M>� ���վv��x���n)�=9�A>s�J���@>*� ��:�>��>A��P���q�>qI=�Z���H���l���e>��[>O�þ��:�G����(�)��>���>����G�����>��^=
�?�ٻ&��B�$>AW5>�٧���?s�=�E�>č�>�-/<��¾�{��#!O��u0>z�<>���>��=+� �r|�kV�fo�>��=F@Z>�j=V �> �6b��U�6='�?sզ��2�;M4��@�C>��?�-? �=x/>��ؾ�g5=c��=�v����c>$Bi>m�����I>ؖ��ou<=�o��cb����ؽ�m�EQ�S�K=�7�=�[��Q�>/*��7��>�j��#|>D�����|��=���W���� �>���=M�6=�W�>�<�>��=����>(���o��>U)�>���>�����-��2�>F>�<ؾ[�7?*��>��E>>1?i��>S�>�# ?� ?���>uTJ�W#�>+Y澷&)?�D?��L?���x���d��>��>:'�>�u?� ?r_�>�9@?j����>��>���Z �=��7��Ϟ�@�d�v>�>9O�>��z>H���ᝡ���p=��=J��>��?i(�>8���>%>��%��樾9���n����r>6^->�Х=Ng>�r>�ad>,{����>1��<1����!>ev�Z �=+-�� ���A����<��:>�1f�{J���c=��!� ���]3>�]R�+��<��C>DY0�
Ty=��)��R�>C�>�[�= ޽mU�>?��=�(��:��λ�&.{=��f��3/��kK��\�=W�ʻ1��>�����4�=k�)������ ��=P�#���,���A�sJ,��ՙ>�kY�>����ܼ~I>%�S���ս�k� վW�g>�f>
ƙ����l��..R>��ʽ�k�=�=����a�P��IO?��I?�C?��F?t�B?;.�CM?o(��I?o�F?VGD?h�(����SH?�{H?��>��D?�G?�A?E7F?7���"Ǿk0/?W ��ž�-,?X��� k���!?ߥ߾�����,?Ӿ �⾡�?�T?�.�>��"��N�Y�˾5n0?�?�>*��> %��P��� ��>.?�l�b�����?���� " ��L-?�H�>x?L"�n��>%~�>�g'��X ����^�??��<�ݾ��+?LD!<.V<��,������Q"?,�|� ���,?G��������?���G��W�-?
action_output_shape memory_sizeversion_numbertrainer_patch_versiontrainer_minor_versiontrainer_major_versionis_continuous_controlpolicy_1/add/ypolicy_1/add_2/ypolicy_1/Sum/reduction_indices $policy/main_graph_0/hidden_0/BiasAdd�����?vector_observation#policy/main_graph_0/hidden_0/kernel
�!policy/main_graph_0/hidden_0/bias� policy/main_graph_0/hidden_0/Mul2 �����?$policy/main_graph_0/hidden_0/BiasAddpolicy_1/dense/MatMul�����? policy/main_graph_0/hidden_0/Mulpolicy/dense/kernel�<policy_1/dense/MatMul/patch:0�policy_1/strided_slice������? action_maskspolicy_1/Softmax2�����?policy_1/dense/MatMul policy_1/addd�����?policy_1/Softmaxpolicy_1/add/y policy_1/Mulf�����? policy_1/addpolicy_1/strided_slice policy_1/Sum������? policy_1/Mulpolicy_1/Sum/reduction_indicespolicy_1/truedivg�����? policy_1/Mul policy_1/Sumpolicy_1/add_2d�����?policy_1/truedivpolicy_1/add_2/ypolicy_1/Log_12r�����?policy_1/add_2policy_1/concat_2/concat2�����?policy_1/Log_1action2�����?policy_1/concat_2/concat@@@�A���3���3�? ��>�����=;���
�>�e����U>X��=t`��_�>���>*p\>�}�=���=\�?=�f�=a�E�nڙ=E�\�!ü�1�#�n=_ ��NJ�����>Ҍ�<��=��=��'=��Ͼ�&>*��D4�>S}}>_WV>��>�jj>(�f>� >��B>��<
�&>�,>�����\(��ޤ���a��G?>C�>�e,>��C�2D)>ڦ�=�!>YI>��=Pd���F������܎>����{A>P��=����j�?>�R�n�=�Z����\��>`��)= M��[�s�'�@��ܦ�ډ��m����
˾��]��=�;�">]|�>���=��#> RT�}Q>e�O��|���|>�g�>:�>9ke�3V'>/�1�X�v��>�PDJ��>����B=�_n=bW���>��>,���#�H�
$_�:�}�NƔ>Nf�>z���am��_�>}��>�D�ƺ`>ш�=;��=1k����>��>d�þ#�
���>�
�=����[�=�D>��"�=������>� >%�2�Po��}Ȝ�Ƙ� >�8�> �Ҿ��2>���>��y�
�>J��m#����<�V�> �e��X���p_>�X/�� >��=,�b>1�=�*>� ��+����9�>�}ѽ�l>�ϲ= �R��ܥ.>��ʽۈǽ�GD�,��t��Ο�;
H/>S41����5�=������)>z���$��>�����a�=��%>@�,�3{�>�ܳ�v{½Wl:>�N=�fƽ}�ľ�V�����w���+�+<�ǹ��7�����=,��œH��a��?%���=��7?OD?�#s>F�>.k?0�>QƝ>��(����?j�վ�R�><��=�0��Pv�=�Vu��� =ũ�>�� <���F7;��ǔ>� >@ӑ=��B� `�>�x���>�GN>K��=z�X;�+<<
��=���=<���c�M���2�U ?�ر�W�9>1�?#�H>jP>8�|>��K>�?>:E>W����@ň��{��:��>���=0;�>�� ?� ���,���>�>|�e�0��?J�7?i�"?���=�#>>�?�. ?�Ծ�p&�� '?�׾�]r?z�?v!�x u?������ �~��=���B����C?�Y?'?U��>�<�>�d:>�6!?%�о�%�W�T<s�<�9�>�,?#����=��+�X��$�c?��2�7!2�_/<?/�e?���>(/9?���>�VK?�L�>{����z�=�&?*��kl�?���?��ݾ�J?1��u����H?5Ƀ�ڽ����?W��>�\.?�AC?���>J�K?��'?9f��s�����>�(7>���>��>�;�Rp?�(.>e:v<�Ҩ�����&º>W�׼ ��>���>���&�X�>����I3����=�b$�兢��nC�0�=����l�y�ԼAΠ>d�_>��Һ� �j�𾚆�>�c��i�)>�Aw> ����¿>���>vft���>�/3>�� =�g;���s�Ř>�*=��->�[�<� ���K�=��> {���½�K�:5�m����� �^��M�p�> �i>p��=}�{�\�=ut��1> ��) ���;?I��é�� @?SG?�:?Bm5?��=?�+;?Uu@?@|��I#��iC?7? �J�:?��<?����->?2
�>���>rT�iK�>�_�>��������/���] ?A�?�>�O�2�> �>n������d�c)?{ ����^�5?L1��S ���?�w��q�޾ص?������վ��?s\׾y���n%?�9Ͼ�;��f!?a��>& �>���u\�>���>���8�� � ���$?�b�> q>�Ծi}�:����(?�����q�8&? �>���>l�����9�����!?

30
Project/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs


using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class BouncerAgent : Agent

sensor.AddObservation(target.transform.localPosition);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
for (var i = 0; i < vectorAction.Length; i++)
var continuousActions = actionBuffers.ContinuousActions;
for (var i = 0; i < continuousActions.Length; i++)
vectorAction[i] = Mathf.Clamp(vectorAction[i], -1f, 1f);
continuousActions[i] = Mathf.Clamp(continuousActions[i], -1f, 1f);
var x = vectorAction[0];
var y = ScaleAction(vectorAction[1], 0, 1);
var z = vectorAction[2];
var x = continuousActions[0];
var y = ScaleAction(continuousActions[1], 0, 1);
var z = continuousActions[2];
vectorAction[0] * vectorAction[0] +
vectorAction[1] * vectorAction[1] +
vectorAction[2] * vectorAction[2]) / 3f);
continuousActions[0] * continuousActions[0] +
continuousActions[1] * continuousActions[1] +
continuousActions[2] * continuousActions[2]) / 3f);
m_LookDir = new Vector3(x, y, z);
}

}
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = Input.GetAxis("Horizontal");
actionsOut[1] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f;
actionsOut[2] = Input.GetAxis("Vertical");
var continuousActionsOut = actionsOut.ContinuousActions;
continuousActionsOut[0] = Input.GetAxis("Horizontal");
continuousActionsOut[1] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f;
continuousActionsOut[2] = Input.GetAxis("Vertical");
}
void Update()

156
Project/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn
文件差异内容过多而无法显示
查看文件

55
Project/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs


using System;
using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgentsExamples;
using Unity.MLAgents.Sensors;
using Random = UnityEngine.Random;

Vector3 m_WalkDir; //Direction to the target
Quaternion m_WalkDirLookRot; //Will hold the rotation to our target
[Header("Target To Walk Towards")] [Space(10)]
[Header("Target To Walk Towards")]
[Space(10)]
public TargetController target; //Target the agent will walk towards.
[Header("Body Parts")] [Space(10)] public Transform body;

public Transform leg3Lower;
[Header("Orientation")] [Space(10)]
[Header("Orientation")]
[Space(10)]
//This will be used as a stabilized model space reference point for observations
//Because ragdolls can move erratically during training, using a stabilized reference transform improves learning
public OrientationCubeController orientationCube;

[Header("Reward Functions To Use")] [Space(10)]
[Header("Reward Functions To Use")]
[Space(10)]
[Header("Foot Grounded Visualization")] [Space(10)]
[Header("Foot Grounded Visualization")]
[Space(10)]
public bool useFootGroundedVisualization;
public MeshRenderer foot0;

AddReward(1f);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var continuousActions = actionBuffers.ContinuousActions;
bpDict[leg0Upper].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[leg1Upper].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[leg2Upper].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[leg3Upper].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[leg0Lower].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[leg1Lower].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[leg2Lower].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[leg3Lower].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[leg0Upper].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[leg1Upper].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[leg2Upper].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[leg3Upper].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[leg0Lower].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[leg1Lower].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[leg2Lower].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[leg3Lower].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[leg0Upper].SetJointStrength(vectorAction[++i]);
bpDict[leg1Upper].SetJointStrength(vectorAction[++i]);
bpDict[leg2Upper].SetJointStrength(vectorAction[++i]);
bpDict[leg3Upper].SetJointStrength(vectorAction[++i]);
bpDict[leg0Lower].SetJointStrength(vectorAction[++i]);
bpDict[leg1Lower].SetJointStrength(vectorAction[++i]);
bpDict[leg2Lower].SetJointStrength(vectorAction[++i]);
bpDict[leg3Lower].SetJointStrength(vectorAction[++i]);
bpDict[leg0Upper].SetJointStrength(continuousActions[++i]);
bpDict[leg1Upper].SetJointStrength(continuousActions[++i]);
bpDict[leg2Upper].SetJointStrength(continuousActions[++i]);
bpDict[leg3Upper].SetJointStrength(continuousActions[++i]);
bpDict[leg0Lower].SetJointStrength(continuousActions[++i]);
bpDict[leg1Lower].SetJointStrength(continuousActions[++i]);
bpDict[leg2Lower].SetJointStrength(continuousActions[++i]);
bpDict[leg3Lower].SetJointStrength(continuousActions[++i]);
}
void FixedUpdate()

{
throw new ArgumentException(
"NaN in movingTowardsDot.\n" +
$" orientationCube.transform.forward: {orientationCube.transform.forward}\n"+
$" body.velocity: {m_JdController.bodyPartsDict[body].rb.velocity}\n"+
$" orientationCube.transform.forward: {orientationCube.transform.forward}\n" +
$" body.velocity: {m_JdController.bodyPartsDict[body].rb.velocity}\n" +
$" maximumWalkingSpeed: {maximumWalkingSpeed}"
);
}

{
throw new ArgumentException(
"NaN in movingTowardsDot.\n" +
$" orientationCube.transform.forward: {orientationCube.transform.forward}\n"+
$" orientationCube.transform.forward: {orientationCube.transform.forward}\n" +
$" body.forward: {body.forward}"
);
}

1001
Project/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn
文件差异内容过多而无法显示
查看文件

1001
Project/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn
文件差异内容过多而无法显示
查看文件

29
Project/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs


using System;
using Unity.MLAgents.Actuators;
using Random = UnityEngine.Random;
public class FoodCollectorAgent : Agent
{

return new Color32(r, g, b, 255);
}
public void MoveAgent(float[] act)
public void MoveAgent(ActionSegment<int> act)
{
m_Shoot = false;

gameObject.GetComponentInChildren<Renderer>().material = normalMaterial;
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
MoveAgent(vectorAction);
MoveAgent(actionBuffers.DiscreteActions);
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = 0f;
actionsOut[1] = 0f;
actionsOut[2] = 0f;
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut[0] = 0;
discreteActionsOut[1] = 0;
discreteActionsOut[2] = 0;
actionsOut[2] = 2f;
discreteActionsOut[2] = 2;
actionsOut[0] = 1f;
discreteActionsOut[0] = 1;
actionsOut[2] = 1f;
discreteActionsOut[2] = 1;
actionsOut[0] = 2f;
discreteActionsOut[0] = 2;
actionsOut[3] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f;
discreteActionsOut[3] = Input.GetKey(KeyCode.Space) ? 1 : 0;
}
public override void OnEpisodeBegin()

2
Project/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorSettings.cs


// Send stats via SideChannel so that they'll appear in TensorBoard.
// These values get averaged every summary_frequency steps, so we don't
// need to send every Update() call.
if ((Time.frameCount % 100)== 0)
if ((Time.frameCount % 100) == 0)
{
m_Recorder.Add("TotalScore", totalScore);
}

642
Project/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn
文件差异内容过多而无法显示
查看文件

31
Project/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs


using UnityEngine;
using System.Linq;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using UnityEngine.Serialization;
public class GridAgent : Agent

m_ResetParams = Academy.Instance.EnvironmentParameters;
}
public override void CollectDiscreteActionMasks(DiscreteActionMasker actionMasker)
public override void WriteDiscreteActionMask(IDiscreteActionMask actionMask)
// Prevents the agent from picking an action that would make it collide with a wall
// Prevents the agent from picking an action that would make it collide with a wall
var positionX = (int)transform.position.x;
var positionZ = (int)transform.position.z;
var maxPosition = (int)m_ResetParams.GetWithDefault("gridSize", 5f) - 1;

actionMasker.SetMask(0, new []{ k_Left});
actionMask.WriteMask(0, new[] { k_Left });
actionMasker.SetMask(0, new []{k_Right});
actionMask.WriteMask(0, new[] { k_Right });
actionMasker.SetMask(0, new []{k_Down});
actionMask.WriteMask(0, new[] { k_Down });
actionMasker.SetMask(0, new []{k_Up});
actionMask.WriteMask(0, new[] { k_Up });
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var action = Mathf.FloorToInt(vectorAction[0]);
var action = actionBuffers.DiscreteActions[0];
var targetPos = transform.position;
switch (action)

}
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = k_NoAction;
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut[0] = k_NoAction;
actionsOut[0] = k_Right;
discreteActionsOut[0] = k_Right;
actionsOut[0] = k_Up;
discreteActionsOut[0] = k_Up;
actionsOut[0] = k_Left;
discreteActionsOut[0] = k_Left;
actionsOut[0] = k_Down;
discreteActionsOut[0] = k_Down;
}
}

1000
Project/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn
文件差异内容过多而无法显示
查看文件

23
Project/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs


using System.Collections;
using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class HallwayAgent : Agent

m_GroundRenderer.material = m_GroundMaterial;
}
public void MoveAgent(float[] act)
public void MoveAgent(ActionSegment<int> act)
var action = Mathf.FloorToInt(act[0]);
var action = act[0];
switch (action)
{
case 1:

m_AgentRb.AddForce(dirToGo * m_HallwaySettings.agentRunSpeed, ForceMode.VelocityChange);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
MoveAgent(vectorAction);
MoveAgent(actionBuffers.DiscreteActions);
}
void OnCollisionEnter(Collision col)

}
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = 0;
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut[0] = 0;
actionsOut[0] = 3;
discreteActionsOut[0] = 3;
actionsOut[0] = 1;
discreteActionsOut[0] = 1;
actionsOut[0] = 4;
discreteActionsOut[0] = 4;
actionsOut[0] = 2;
discreteActionsOut[0] = 2;
}
}

992
Project/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn
文件差异内容过多而无法显示
查看文件

23
Project/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs


using System.Collections;
using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
public class PushAgentBasic : Agent
{

/// <summary>
/// Moves the agent according to the selected action.
/// </summary>
public void MoveAgent(float[] act)
public void MoveAgent(ActionSegment<int> act)
var action = Mathf.FloorToInt(act[0]);
var action = act[0];
switch (action)
{

/// <summary>
/// Called every step of the engine. Here the agent takes an action.
/// </summary>
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
MoveAgent(vectorAction);
MoveAgent(actionBuffers.DiscreteActions);
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = 0;
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut[0] = 0;
actionsOut[0] = 3;
discreteActionsOut[0] = 3;
actionsOut[0] = 1;
discreteActionsOut[0] = 1;
actionsOut[0] = 4;
discreteActionsOut[0] = 4;
actionsOut[0] = 2;
discreteActionsOut[0] = 2;
}
}

1000
Project/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn
文件差异内容过多而无法显示
查看文件

23
Project/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs


using UnityEngine;
using Random = UnityEngine.Random;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class PyramidAgent : Agent

}
}
public void MoveAgent(float[] act)
public void MoveAgent(ActionSegment<int> act)
var action = Mathf.FloorToInt(act[0]);
var action = act[0];
switch (action)
{
case 1:

m_AgentRb.AddForce(dirToGo * 2f, ForceMode.VelocityChange);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
MoveAgent(vectorAction);
MoveAgent(actionBuffers.DiscreteActions);
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = 0;
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut[0] = 0;
actionsOut[0] = 3;
discreteActionsOut[0] = 3;
actionsOut[0] = 1;
discreteActionsOut[0] = 1;
actionsOut[0] = 4;
discreteActionsOut[0] = 4;
actionsOut[0] = 2;
discreteActionsOut[0] = 2;
}
}

997
Project/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn
文件差异内容过多而无法显示
查看文件

12
Project/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs


using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class ReacherAgent : Agent

/// <summary>
/// The agent's four actions correspond to torques on each of the two joints.
/// </summary>
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var torqueX = Mathf.Clamp(vectorAction[0], -1f, 1f) * 150f;
var torqueZ = Mathf.Clamp(vectorAction[1], -1f, 1f) * 150f;
var torqueX = Mathf.Clamp(actionBuffers.ContinuousActions[0], -1f, 1f) * 150f;
var torqueZ = Mathf.Clamp(actionBuffers.ContinuousActions[1], -1f, 1f) * 150f;
torqueX = Mathf.Clamp(vectorAction[2], -1f, 1f) * 150f;
torqueZ = Mathf.Clamp(vectorAction[3], -1f, 1f) * 150f;
torqueX = Mathf.Clamp(actionBuffers.ContinuousActions[2], -1f, 1f) * 150f;
torqueZ = Mathf.Clamp(actionBuffers.ContinuousActions[3], -1f, 1f) * 150f;
m_RbB.AddTorque(new Vector3(torqueX, 0f, torqueZ));
}

549
Project/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn
文件差异内容过多而无法显示
查看文件

2
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/AdjustTrainingTimescale.cs


using UnityEngine;
namespace MLAgentsExamples
namespace MLAgentsExamples
{
public class AdjustTrainingTimescale : MonoBehaviour
{

4
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/DirectionIndicator.cs


using UnityEngine;
using UnityEngine;
public bool updatedByAgent; //should this be updated by the agent? If not, it will use local settings
public Transform transformToFollow; //ex: hips or body
public Transform targetToLookAt; //target in the scene the indicator will point to

14
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/JointDriveController.cs


[System.Serializable]
public class BodyPart
{
[Header("Body Part Info")][Space(10)] public ConfigurableJoint joint;
[Header("Body Part Info")] [Space(10)] public ConfigurableJoint joint;
[Header("Ground & Target Contact")][Space(10)]
[Header("Ground & Target Contact")]
[Space(10)]
public GroundContact groundContact;
public TargetContact targetContact;

[Header("Current Joint Settings")][Space(10)]
[Header("Current Joint Settings")]
[Space(10)]
public Vector3 currentEularJointRotation;
[HideInInspector] public float currentStrength;

[Header("Other Debug Info")][Space(10)]
[Header("Other Debug Info")]
[Space(10)]
public Vector3 currentJointForce;
public float currentJointForceSqrMag;

public class JointDriveController : MonoBehaviour
{
[Header("Joint Drive Settings")][Space(10)]
[Header("Joint Drive Settings")]
[Space(10)]
public float maxJointSpring;
public float jointDampen;

20
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ModelOverrider.cs


/// </summary>
public class ModelOverrider : MonoBehaviour
{
HashSet<string> k_SupportedExtensions = new HashSet<string>{"nn", "onnx"};
HashSet<string> k_SupportedExtensions = new HashSet<string> { "nn", "onnx" };
const string k_CommandLineModelOverrideFlag = "--mlagents-override-model";
const string k_CommandLineModelOverrideDirectoryFlag = "--mlagents-override-model-directory";
const string k_CommandLineModelOverrideExtensionFlag = "--mlagents-override-model-extension";

int TotalCompletedEpisodes
{
get { return m_PreviousAgentCompletedEpisodes + (m_Agent == null ? 0 : m_Agent.CompletedEpisodes); }
get { return m_PreviousAgentCompletedEpisodes + (m_Agent == null ? 0 : m_Agent.CompletedEpisodes); }
}
int TotalNumSteps

public bool HasOverrides
{
get { return m_BehaviorNameOverrides.Count > 0 || !string.IsNullOrEmpty(m_BehaviorNameOverrideDirectory); }
get { return m_BehaviorNameOverrides.Count > 0 || !string.IsNullOrEmpty(m_BehaviorNameOverrideDirectory); }
}
public static string GetOverrideBehaviorName(string originalBehaviorName)

var args = commandLineArgsOverride ?? Environment.GetCommandLineArgs();
for (var i = 0; i < args.Length; i++)
{
if (args[i] == k_CommandLineModelOverrideFlag && i < args.Length-2)
if (args[i] == k_CommandLineModelOverrideFlag && i < args.Length - 2)
else if (args[i] == k_CommandLineModelOverrideDirectoryFlag && i < args.Length-1)
else if (args[i] == k_CommandLineModelOverrideDirectoryFlag && i < args.Length - 1)
else if (args[i] == k_CommandLineModelOverrideExtensionFlag && i < args.Length-1)
else if (args[i] == k_CommandLineModelOverrideExtensionFlag && i < args.Length - 1)
{
m_OverrideExtension = args[i + 1].Trim().ToLower();
var isKnownExtension = k_SupportedExtensions.Contains(m_OverrideExtension);

#endif
}
}
else if (args[i] == k_CommandLineQuitAfterEpisodesFlag && i < args.Length-1)
else if (args[i] == k_CommandLineQuitAfterEpisodesFlag && i < args.Length - 1)
{
Int32.TryParse(args[i + 1], out maxEpisodes);
}

{
assetPath = m_BehaviorNameOverrides[behaviorName];
}
else if(!string.IsNullOrEmpty(m_BehaviorNameOverrideDirectory))
else if (!string.IsNullOrEmpty(m_BehaviorNameOverrideDirectory))
{
assetPath = Path.Combine(m_BehaviorNameOverrideDirectory, $"{behaviorName}.{m_OverrideExtension}");
}

{
model = File.ReadAllBytes(assetPath);
}
catch(IOException)
catch (IOException)
{
Debug.Log($"Couldn't load file {assetPath} at full path {Path.GetFullPath(assetPath)}", this);
// Cache the null so we don't repeatedly try to load a missing file

if (!overrideOk && m_QuitOnLoadFailure)
{
if(!string.IsNullOrEmpty(overrideError))
if (!string.IsNullOrEmpty(overrideError))
{
Debug.LogWarning(overrideError);
}

6
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/Monitor.cs


using System.Collections.Generic;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;

var displayValues = s_DisplayTransformValues[target];
var index = 0;
var orderedKeys = displayValues.Keys.OrderBy(x => - displayValues[x].time);
var orderedKeys = displayValues.Keys.OrderBy(x => -displayValues[x].time);
foreach (var key in orderedKeys)
{
s_KeyStyle.alignment = TextAnchor.MiddleRight;

s_RedStyle = s_ColorStyle[5];
}
}
}
}

2
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/OrientationCubeController.cs


using UnityEngine;
using UnityEngine;
namespace Unity.MLAgentsExamples
{

2
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/ProjectSettingsOverrides.cs


m_OriginalMaximumDeltaTime = Time.maximumDeltaTime;
m_OriginalSolverIterations = Physics.defaultSolverIterations;
m_OriginalSolverVelocityIterations = Physics.defaultSolverVelocityIterations;
m_OriginalReuseCollisionCallbacks = Physics.reuseCollisionCallbacks ;
m_OriginalReuseCollisionCallbacks = Physics.reuseCollisionCallbacks;
// Override
Physics.gravity *= gravityMultiplier;

2
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/SensorBase.cs


}
/// <inheritdoc/>
public void Update() {}
public void Update() { }
/// <inheritdoc/>
public void Reset() { }

8
Project/Assets/ML-Agents/Examples/SharedAssets/Scripts/TargetController.cs


using UnityEngine;
using UnityEngine;
using Random = UnityEngine.Random;
using Unity.MLAgents;
using UnityEngine.Events;

/// </summary>
public class TargetController : MonoBehaviour
{
[Header("Target Fell Protection")]
public bool respawnIfFallsOffPlatform = true; //If the target falls off the platform, reset the position.
public float fallDistance = 5; //distance below the starting height that will trigger a respawn

31
Project/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs


using System;
using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Policies;
public class AgentSoccer : Agent

m_ResetParams = Academy.Instance.EnvironmentParameters;
}
public void MoveAgent(float[] act)
public void MoveAgent(ActionSegment<int> act)
{
var dirToGo = Vector3.zero;
var rotateDir = Vector3.zero;

var forwardAxis = (int)act[0];
var rightAxis = (int)act[1];
var rotateAxis = (int)act[2];
var forwardAxis = act[0];
var rightAxis = act[1];
var rotateAxis = act[2];
switch (forwardAxis)
{

ForceMode.VelocityChange);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
{
if (position == Position.Goalie)

// Existential penalty cumulant for Generic
timePenalty -= m_Existential;
}
MoveAgent(vectorAction);
MoveAgent(actionBuffers.DiscreteActions);
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
Array.Clear(actionsOut, 0, actionsOut.Length);
var discreteActionsOut = actionsOut.DiscreteActions;
discreteActionsOut.Clear();
actionsOut[0] = 1f;
discreteActionsOut[0] = 1;
actionsOut[0] = 2f;
discreteActionsOut[0] = 2;
actionsOut[2] = 1f;
discreteActionsOut[2] = 1;
actionsOut[2] = 2f;
discreteActionsOut[2] = 2;
actionsOut[1] = 1f;
discreteActionsOut[1] = 1;
actionsOut[1] = 2f;
discreteActionsOut[1] = 2;
}
}
/// <summary>

1001
Project/Assets/ML-Agents/Examples/Soccer/TFModels/SoccerTwos.nn
文件差异内容过多而无法显示
查看文件

6
Project/Assets/ML-Agents/Examples/Startup/Scripts/Startup.cs


// no scene environment variable is found.
var args = Environment.GetCommandLineArgs();
Console.WriteLine("Command line arguments passed: " + String.Join(" ", args));
for (int i = 0; i < args.Length; i++) {
if (args [i] == k_SceneCommandLineFlag && i < args.Length - 1) {
for (int i = 0; i < args.Length; i++)
{
if (args[i] == k_SceneCommandLineFlag && i < args.Length - 1)
{
sceneName = args[i + 1];
}
}

4
Project/Assets/ML-Agents/Examples/Template/Scripts/TemplateAgent.cs


using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class TemplateAgent : Agent

}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
{
}

14
Project/Assets/ML-Agents/Examples/Tennis/Scripts/HitWall.cs


public bool net;
public enum FloorHit
{
Service,
FloorHitUnset,
FloorAHit,
FloorBHit
}
{
Service,
FloorHitUnset,
FloorAHit,
FloorBHit
}
public FloorHit lastFloorHit;

lastFloorHit = FloorHit.Service;
net = false;
}
void AgentAWins()
{
m_AgentA.SetReward(1);

20
Project/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs


using UnityEngine;
using UnityEngine.UI;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
public class TennisAgent : Agent

sensor.AddObservation(m_InvertMult * gameObject.transform.rotation.z);
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
var moveX = Mathf.Clamp(vectorAction[0], -1f, 1f) * m_InvertMult;
var moveY = Mathf.Clamp(vectorAction[1], -1f, 1f);
var rotate = Mathf.Clamp(vectorAction[2], -1f, 1f) * m_InvertMult;
var continuousActions = actionBuffers.ContinuousActions;
var moveX = Mathf.Clamp(continuousActions[0], -1f, 1f) * m_InvertMult;
var moveY = Mathf.Clamp(continuousActions[1], -1f, 1f);
var rotate = Mathf.Clamp(continuousActions[2], -1f, 1f) * m_InvertMult;
if (moveY > 0.5 && transform.position.y - transform.parent.transform.position.y < -1.5f)
{

m_TextComponent.text = score.ToString();
}
public override void Heuristic(float[] actionsOut)
public override void Heuristic(in ActionBuffers actionsOut)
actionsOut[0] = Input.GetAxis("Horizontal"); // Racket Movement
actionsOut[1] = Input.GetKey(KeyCode.Space) ? 1f : 0f; // Racket Jumping
actionsOut[2] = Input.GetAxis("Vertical"); // Racket Rotation
var continuousActionsOut = actionsOut.ContinuousActions;
continuousActionsOut[0] = Input.GetAxis("Horizontal"); // Racket Movement
continuousActionsOut[1] = Input.GetKey(KeyCode.Space) ? 1f : 0f; // Racket Jumping
continuousActionsOut[2] = Input.GetAxis("Vertical"); // Racket Rotation
}
public override void OnEpisodeBegin()

2
Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDy.demo.meta


guid: 9f87b3070a0fd4a1e838131a91399c2f
ScriptedImporter:
fileIDToRecycleName:
11400000: Assets/Demonstrations/ExpertWalkerDy.demo
11400002: Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDy.demo
externalObjects: {}
userData: ' (Unity.MLAgents.Demonstrations.DemonstrationSummary)'
assetBundleName:

2
Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDyVS.demo.meta


guid: a4b02e2c382c247919eb63ce72e90a3b
ScriptedImporter:
fileIDToRecycleName:
11400000: Assets/Demonstrations/ExpertWalkerDyVS.demo
11400002: Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerDyVS.demo
externalObjects: {}
userData: ' (Unity.MLAgents.Demonstrations.DemonstrationSummary)'
assetBundleName:

2
Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerStVS.demo.meta


guid: edcbb505552464c5c829886a4a3817dd
ScriptedImporter:
fileIDToRecycleName:
11400000: Assets/Demonstrations/ExpertWalkerStVS.demo
11400002: Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerStVS.demo
externalObjects: {}
userData: ' (Unity.MLAgents.Demonstrations.DemonstrationSummary)'
assetBundleName:

2
Project/Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerSta.demo.meta


guid: 1f3a5d62e6aea4b5eb053ac33f11b06d
ScriptedImporter:
fileIDToRecycleName:
11400000: Assets/Demonstrations/ExpertWalkerSta.demo
11400002: Assets/ML-Agents/Examples/Walker/Demos/ExpertWalkerSta.demo
externalObjects: {}
userData: ' (Unity.MLAgents.Demonstrations.DemonstrationSummary)'
assetBundleName:

57
Project/Assets/ML-Agents/Examples/Walker/Scripts/WalkerAgent.cs


using System;
using UnityEngine;
using Unity.MLAgents;
using Unity.MLAgents.Actuators;
using Unity.MLAgentsExamples;
using Unity.MLAgents.Sensors;
using BodyPart = Unity.MLAgentsExamples.BodyPart;

}
}
public override void OnActionReceived(float[] vectorAction)
public override void OnActionReceived(ActionBuffers actionBuffers)
bpDict[chest].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[spine].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
var continuousActions = actionBuffers.ContinuousActions;
bpDict[chest].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], continuousActions[++i]);
bpDict[spine].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], continuousActions[++i]);
bpDict[thighL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[thighR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[shinL].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[shinR].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[footR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[footL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], vectorAction[++i]);
bpDict[thighL].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[thighR].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[shinL].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[shinR].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[footR].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], continuousActions[++i]);
bpDict[footL].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], continuousActions[++i]);
bpDict[armL].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[armR].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[forearmL].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[forearmR].SetJointTargetRotation(vectorAction[++i], 0, 0);
bpDict[head].SetJointTargetRotation(vectorAction[++i], vectorAction[++i], 0);
bpDict[armL].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[armR].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[forearmL].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[forearmR].SetJointTargetRotation(continuousActions[++i], 0, 0);
bpDict[head].SetJointTargetRotation(continuousActions[++i], continuousActions[++i], 0);
bpDict[chest].SetJointStrength(vectorAction[++i]);
bpDict[spine].SetJointStrength(vectorAction[++i]);
bpDict[head].SetJointStrength(vectorAction[++i]);
bpDict[thighL].SetJointStrength(vectorAction[++i]);
bpDict[shinL].SetJointStrength(vectorAction[++i]);
bpDict[footL].SetJointStrength(vectorAction[++i]);
bpDict[thighR].SetJointStrength(vectorAction[++i]);
bpDict[shinR].SetJointStrength(vectorAction[++i]);
bpDict[footR].SetJointStrength(vectorAction[++i]);
bpDict[armL].SetJointStrength(vectorAction[++i]);
bpDict[forearmL].SetJointStrength(vectorAction[++i]);
bpDict[armR].SetJointStrength(vectorAction[++i]);
bpDict[forearmR].SetJointStrength(vectorAction[++i]);
bpDict[chest].SetJointStrength(continuousActions[++i]);
bpDict[spine].SetJointStrength(continuousActions[++i]);
bpDict[head].SetJointStrength(continuousActions[++i]);
bpDict[thighL].SetJointStrength(continuousActions[++i]);
bpDict[shinL].SetJointStrength(continuousActions[++i]);
bpDict[footL].SetJointStrength(continuousActions[++i]);
bpDict[thighR].SetJointStrength(continuousActions[++i]);
bpDict[shinR].SetJointStrength(continuousActions[++i]);
bpDict[footR].SetJointStrength(continuousActions[++i]);
bpDict[armL].SetJointStrength(continuousActions[++i]);
bpDict[forearmL].SetJointStrength(continuousActions[++i]);
bpDict[armR].SetJointStrength(continuousActions[++i]);
bpDict[forearmR].SetJointStrength(continuousActions[++i]);
}
//Update OrientationCube and DirectionIndicator

31
Project/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs


using UnityEngine;
using Unity.MLAgents;
using Unity.Barracuda;
using Unity.MLAgents.Actuators;
using Unity.MLAgents.Sensors;
using Unity.MLAgentsExamples;