浏览代码

Merge remote-tracking branch 'origin/develop' into try-tf2-support

/develop-gpu-test
Chris Elion 5 年前
当前提交
fca51de8
共有 151 个文件被更改,包括 7812 次插入3424 次删除
  1. 1
      .github/ISSUE_TEMPLATE/bug_report.md
  2. 1
      .github/stale.yml
  3. 14
      .pre-commit-config.yaml
  4. 7
      .yamato/csharp-tests.yml
  5. 7
      .yamato/standalone-build-test.yml
  6. 78
      UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs
  7. 47
      UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs
  8. 13
      UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs
  9. 1
      UnitySDK/Assets/ML-Agents/Editor/Tests/RayPerceptionTests.cs
  10. 6
      UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs
  11. 2
      UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs
  12. 2
      UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs
  13. 2
      UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs
  14. 2
      UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs
  15. 2
      UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs
  16. 2
      UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs
  17. 4
      UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity
  18. 2
      UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs
  19. 2
      UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs
  20. 2
      UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs
  21. 2
      UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs
  22. 2
      UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs
  23. 2
      UnitySDK/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs
  24. 2
      UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateAgent.cs
  25. 2
      UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs
  26. 2
      UnitySDK/Assets/ML-Agents/Examples/Walker/Scripts/WalkerAgent.cs
  27. 2
      UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs
  28. 2
      UnitySDK/Assets/ML-Agents/Scripts/Academy.cs
  29. 234
      UnitySDK/Assets/ML-Agents/Scripts/Agent.cs
  30. 25
      UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs
  31. 15
      UnitySDK/Assets/ML-Agents/Scripts/DemonstrationStore.cs
  32. 77
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs
  33. 166
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs
  34. 72
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs
  35. 2
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Observation.cs.meta
  36. 51
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs
  37. 40
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/GeneratorImpl.cs
  38. 2
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs
  39. 38
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorGenerator.cs
  40. 8
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs
  41. 37
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs
  42. 10
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs
  43. 27
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs
  44. 61
      UnitySDK/Assets/ML-Agents/Scripts/Utilities.cs
  45. 1001
      demos/Expert3DBall.demo
  46. 1001
      demos/Expert3DBallHard.demo
  47. 294
      demos/ExpertBasic.demo
  48. 403
      demos/ExpertBouncer.demo
  49. 993
      demos/ExpertCrawlerDyn.demo
  50. 1000
      demos/ExpertFood.demo
  51. 485
      demos/ExpertGrid.demo
  52. 978
      demos/ExpertHallway.demo
  53. 1001
      demos/ExpertPyramid.demo
  54. 1001
      demos/ExpertReacher.demo
  55. 1001
      demos/ExpertTennis.demo
  56. 2
      docs/Glossary.md
  57. 2
      docs/Learning-Environment-Create-New.md
  58. 10
      docs/Migrating.md
  59. 8
      docs/Python-API.md
  60. 2
      docs/Readme.md
  61. 12
      gym-unity/gym_unity/envs/__init__.py
  62. 6
      gym-unity/gym_unity/tests/test_gym.py
  63. 1
      ml-agents-envs/mlagents/envs/action_info.py
  64. 6
      ml-agents-envs/mlagents/envs/base_unity_environment.py
  65. 140
      ml-agents-envs/mlagents/envs/brain.py
  66. 27
      ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.py
  67. 17
      ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.pyi
  68. 56
      ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.py
  69. 29
      ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.pyi
  70. 28
      ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.py
  71. 8
      ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.pyi
  72. 7
      ml-agents-envs/mlagents/envs/env_manager.py
  73. 115
      ml-agents-envs/mlagents/envs/environment.py
  74. 39
      ml-agents-envs/mlagents/envs/mock_communicator.py
  75. 1
      ml-agents-envs/mlagents/envs/rpc_communicator.py
  76. 4
      ml-agents-envs/mlagents/envs/simple_env_manager.py
  77. 6
      ml-agents-envs/mlagents/envs/subprocess_env_manager.py
  78. 25
      ml-agents-envs/mlagents/envs/tests/test_brain.py
  79. 4
      ml-agents-envs/mlagents/envs/tests/test_envs.py
  80. 13
      ml-agents-envs/mlagents/envs/timers.py
  81. 1
      ml-agents/mlagents/trainers/barracuda.py
  82. 12
      ml-agents/mlagents/trainers/bc/trainer.py
  83. 18
      ml-agents/mlagents/trainers/components/reward_signals/extrinsic/signal.py
  84. 6
      ml-agents/mlagents/trainers/components/reward_signals/gail/model.py
  85. 22
      ml-agents/mlagents/trainers/components/reward_signals/gail/signal.py
  86. 12
      ml-agents/mlagents/trainers/learn.py
  87. 13
      ml-agents/mlagents/trainers/models.py
  88. 9
      ml-agents/mlagents/trainers/ppo/models.py
  89. 11
      ml-agents/mlagents/trainers/ppo/multi_gpu_policy.py
  90. 8
      ml-agents/mlagents/trainers/ppo/trainer.py
  91. 12
      ml-agents/mlagents/trainers/rl_trainer.py
  92. 45
      ml-agents/mlagents/trainers/sac/models.py
  93. 6
      ml-agents/mlagents/trainers/sac/policy.py
  94. 14
      ml-agents/mlagents/trainers/sac/trainer.py
  95. 1
      ml-agents/mlagents/trainers/tensorflow_to_barracuda.py
  96. 10
      ml-agents/mlagents/trainers/tests/mock_brain.py
  97. 232
      ml-agents/mlagents/trainers/tests/test.demo

1
.github/ISSUE_TEMPLATE/bug_report.md


**Environment (please complete the following information):**
- OS + version: [e.g. Windows 10]
- _ML-Agents version_: (e.g. ML-Agents v0.8, or latest `develop` branch from source)
- _TensorFlow version_: (you can run `pip3 show tensorflow` to get this)
- _Environment_: (which example environment you used to reproduce the error)
**NOTE:** We are unable to help reproduce bugs with custom environments. Please attempt to reproduce your issue with one of the example environments, or provide a minimal patch to one of the environments needed to reproduce the issue.

1
.github/stale.yml


- bug
- request
- help-wanted
- announcement

14
.pre-commit-config.yaml


hooks:
- id: python-check-mock-methods
- repo: https://github.com/pre-commit/mirrors-pylint
rev: v2.4.3
hooks:
- id: pylint
exclude: >
(?x)^(
.*_pb2.py|
.*_pb2_grpc.py|
.*/tests/.*
)$
require_serial: true
# "Local" hooks, see https://pre-commit.com/#repository-local-hooks
- repo: local
hooks:

7
.yamato/csharp-tests.yml


commands:
- ./run-tests-editmode-osx-editor.sh
triggers:
branches:
only:
- "/develop-.*/"
targets:
only:
- "develop"
- "develop"
- "master"
- "/release-.*/"
- "/hotfix-.*/"

7
.yamato/standalone-build-test.yml


commands:
- ./run-standalone-build-osx.sh
triggers:
branches:
only:
- "/develop-.*/"
targets:
only:
- "develop"
- "develop"
- "master"
- "/release-.*/"
- "/hotfix-.*/"

78
UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs


using NUnit.Framework;
using UnityEngine;
using System.IO.Abstractions.TestingHelpers;
using System.Reflection;
using MLAgents.CommunicatorObjects;
using Google.Protobuf;
namespace MLAgents.Tests
{

done = true,
id = 5,
maxStepReached = true,
stackedVectorObservation = new List<float>() { 1f, 1f, 1f },
storedTextActions = "TestAction",
textObservation = "TestAction",
}
public class ObservationAgent : TestAgent
{
public override void CollectObservations()
{
collectObservationsCalls += 1;
AddVectorObs(1f);
AddVectorObs(2f);
AddVectorObs(3f);
}
}
[Test]
public void TestAgentWrite()
{
var agentGo1 = new GameObject("TestAgent");
var bpA = agentGo1.AddComponent<BehaviorParameters>();
bpA.brainParameters.vectorObservationSize = 3;
bpA.brainParameters.numStackedVectorObservations = 1;
bpA.brainParameters.vectorActionDescriptions = new[] { "TestActionA", "TestActionB" };
bpA.brainParameters.vectorActionSize = new[] { 2, 2 };
bpA.brainParameters.vectorActionSpaceType = SpaceType.Discrete;
agentGo1.AddComponent<ObservationAgent>();
var agent1 = agentGo1.GetComponent<ObservationAgent>();
agentGo1.AddComponent<DemonstrationRecorder>();
var demoRecorder = agentGo1.GetComponent<DemonstrationRecorder>();
var fileSystem = new MockFileSystem();
demoRecorder.demonstrationName = "TestBrain";
demoRecorder.record = true;
demoRecorder.InitializeDemoStore(fileSystem);
var acaGo = new GameObject("TestAcademy");
acaGo.AddComponent<TestAcademy>();
var aca = acaGo.GetComponent<TestAcademy>();
aca.resetParameters = new ResetParameters();
var academyInitializeMethod = typeof(Academy).GetMethod("InitializeEnvironment",
BindingFlags.Instance | BindingFlags.NonPublic);
var agentEnableMethod = typeof(Agent).GetMethod("OnEnable",
BindingFlags.Instance | BindingFlags.NonPublic);
var agentSendInfo = typeof(Agent).GetMethod("SendInfo",
BindingFlags.Instance | BindingFlags.NonPublic);
agentEnableMethod?.Invoke(agent1, new object[] { });
academyInitializeMethod?.Invoke(aca, new object[] { });
// Step the agent
agent1.RequestDecision();
agentSendInfo?.Invoke(agent1, new object[] { });
demoRecorder.Close();
// Read back the demo file and make sure observations were written
var reader = fileSystem.File.OpenRead("Assets/Demonstrations/TestBrain.demo");
reader.Seek(DemonstrationStore.MetaDataBytes + 1, 0);
BrainParametersProto.Parser.ParseDelimitedFrom(reader);
var agentInfoProto = AgentInfoProto.Parser.ParseDelimitedFrom(reader);
var obs = agentInfoProto.Observations[2]; // skip dummy sensors
{
var vecObs = obs.FloatData.Data;
Assert.AreEqual(bpA.brainParameters.vectorObservationSize, vecObs.Count);
for (var i = 0; i < vecObs.Count; i++)
{
Assert.AreEqual((float) i+1, vecObs[i]);
}
}
}
}
}

47
UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs


using System.Collections.Generic;
using System.Linq;
using System.Reflection;
static IEnumerable<Agent> GetFakeAgentInfos()
static IEnumerable<Agent> GetFakeAgents()
var acaGo = new GameObject("TestAcademy");
acaGo.AddComponent<TestAcademy>();
var aca = acaGo.GetComponent<TestAcademy>();
aca.resetParameters = new ResetParameters();
var bpA = goA.AddComponent<BehaviorParameters>();
bpA.brainParameters.vectorObservationSize = 3;
bpA.brainParameters.numStackedVectorObservations = 1;
var goB = new GameObject("goB");
var bpB = goB.AddComponent<BehaviorParameters>();
bpB.brainParameters.vectorObservationSize = 3;
bpB.brainParameters.numStackedVectorObservations = 1;
var agentB = goB.AddComponent<TestAgent>();
var agents = new List<Agent> { agentA, agentB };
foreach (var agent in agents)
{
var agentEnableMethod = typeof(Agent).GetMethod("OnEnableHelper",
BindingFlags.Instance | BindingFlags.NonPublic);
agentEnableMethod?.Invoke(agent, new object[] { aca });
}
agentA.collectObservationsSensor.AddObservation(new Vector3(1, 2, 3));
agentB.collectObservationsSensor.AddObservation(new Vector3(4, 5, 6));
stackedVectorObservation = new[] { 1f, 2f, 3f }.ToList(),
var goB = new GameObject("goB");
var agentB = goB.AddComponent<TestAgent>();
stackedVectorObservation = new[] { 4f, 5f, 6f }.ToList(),
return new List<Agent> { agentA, agentB };
return agents;
}
[Test]

shape = new long[] { 2, 3 }
};
const int batchSize = 4;
var agentInfos = GetFakeAgentInfos();
var agentInfos = GetFakeAgents();
generator.AddSensorIndex(0);
generator.AddSensorIndex(1);
generator.AddSensorIndex(2);
generator.Generate(inputTensor, batchSize, agentInfos);
Assert.IsNotNull(inputTensor.data);
Assert.AreEqual(inputTensor.data[0, 0], 1);

valueType = TensorProxy.TensorType.Integer
};
const int batchSize = 4;
var agentInfos = GetFakeAgentInfos();
var agentInfos = GetFakeAgents();
var alloc = new TensorCachingAllocator();
var generator = new PreviousActionInputGenerator(alloc);

valueType = TensorProxy.TensorType.FloatingPoint
};
const int batchSize = 4;
var agentInfos = GetFakeAgentInfos();
var agentInfos = GetFakeAgents();
var alloc = new TensorCachingAllocator();
var generator = new ActionMaskInputGenerator(alloc);
generator.Generate(inputTensor, batchSize, agentInfos);

13
UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs


using NUnit.Framework;
using System.Reflection;
using MLAgents.Sensor;
using MLAgents.InferenceBrain;
namespace MLAgents.Tests
{

AddVectorObs(0f);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
agentActionCalls += 1;
AddReward(0.1f);

public int[] GetFloatObservationShape()
{
return new[] { 1 };
return new[] { 0 };
public void WriteToTensor(TensorProxy tensorProxy, int agentIndex) { }
public int Write(WriteAdapter adapter)
{
// No-op
return 0;
}
public byte[] GetCompressedObservation()
{

{
return sensorName;
}
public void Update() { }
}
public class EditModeTestGeneration

1
UnitySDK/Assets/ML-Agents/Editor/Tests/RayPerceptionTests.cs


var go = new GameObject("MyGameObject");
var rayPer3D = go.AddComponent<RayPerception3D>();
var result = rayPer3D.Perceive(1f, angles, tags);
Debug.Log(result.Count);
Assert.IsTrue(result.Count == angles.Length * (tags.Length + 2));
}

6
UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs


string[] scenes = { "Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity" };
var buildResult = BuildPipeline.BuildPlayer(scenes, "testPlayer", BuildTarget.StandaloneOSX, BuildOptions.None);
#if UNITY_2018_1_OR_NEWER
var isOK = buildResult.summary.result == BuildResult.Succeeded;
var isOk = buildResult.summary.result == BuildResult.Succeeded;
var error = "";
foreach (var stepInfo in buildResult.steps)
{

}
#else
var error = buildResult;
var isOK = string.IsNullOrEmpty(error);
var isOk = string.IsNullOrEmpty(error);
if (isOK)
if (isOk)
{
EditorApplication.Exit(0);
}

2
UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs


AddVectorObs(m_BallRb.velocity);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f);

2
UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs


AddVectorObs((ball.transform.position - gameObject.transform.position));
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f);
var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f);

2
UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs


AddVectorObs(m_Position, 20);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
var movement = (int)vectorAction[0];

2
UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs


AddVectorObs(target.transform.localPosition);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
for (var i = 0; i < vectorAction.Length; i++)
{

2
UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs


target.position = newTargetPos + ground.position;
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
if (detectTargets)
{

2
UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs


gameObject.GetComponentInChildren<Renderer>().material = normalMaterial;
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
MoveAgent(vectorAction);
}

4
UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.44971162, g: 0.49977726, b: 0.5756362, a: 1}
m_IndirectSpecularColor: {r: 0.4497121, g: 0.49977785, b: 0.57563704, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

vectorActionSize: 05000000
vectorActionDescriptions: []
vectorActionSpaceType: 0
m_Model: {fileID: 11400000, guid: 07afbd1d35ed345eeb850fcbb59eae0b, type: 3}
m_Model: {fileID: 11400000, guid: a812f1ce7763a4a0c912717f3594fe20, type: 3}
m_InferenceDevice: 0
m_UseHeuristic: 0
m_BehaviorName: GridWorld

2
UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs


}
// to be implemented by the developer
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
AddReward(-0.01f);
var action = Mathf.FloorToInt(vectorAction[0]);

2
UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs


m_AgentRb.AddForce(dirToGo * m_Academy.agentRunSpeed, ForceMode.VelocityChange);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
AddReward(-1f / agentParameters.maxStep);
MoveAgent(vectorAction);

2
UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs


/// <summary>
/// Called every step of the engine. Here the agent takes an action.
/// </summary>
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
// Move the agent using the action.
MoveAgent(vectorAction);

2
UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs


m_AgentRb.AddForce(dirToGo * 2f, ForceMode.VelocityChange);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
AddReward(-1f / agentParameters.maxStep);
MoveAgent(vectorAction);

2
UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs


/// <summary>
/// The agent's four actions correspond to torques on each of the two joints.
/// </summary>
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
m_GoalDegree += m_GoalSpeed;
UpdateGoalPosition();

2
UnitySDK/Assets/ML-Agents/Examples/Soccer/Scripts/AgentSoccer.cs


ForceMode.VelocityChange);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
// Existential penalty for strikers.
if (agentRole == AgentRole.Striker)

2
UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateAgent.cs


{
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
}

2
UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs


AddVectorObs(m_BallRb.velocity.y);
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
var moveX = Mathf.Clamp(vectorAction[0], -1f, 1f) * m_InvertMult;
var moveY = Mathf.Clamp(vectorAction[1], -1f, 1f);

2
UnitySDK/Assets/ML-Agents/Examples/Walker/Scripts/WalkerAgent.cs


}
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
m_DirToTarget = target.position - m_JdController.bodyPartsDict[hips].rb.position;

2
UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs


jumpingTime -= Time.fixedDeltaTime;
}
public override void AgentAction(float[] vectorAction, string textAction)
public override void AgentAction(float[] vectorAction)
{
MoveAgent(vectorAction);
if ((!Physics.Raycast(m_AgentRb.position, Vector3.down, 20))

2
UnitySDK/Assets/ML-Agents/Scripts/Academy.cs


// Fields provided in the Inspector
[FormerlySerializedAs("maxSteps")]
[FormerlySerializedAs("trainingConfiguration")]
[SerializeField]
[Tooltip("The engine-level settings which correspond to rendering " +
"quality and engine speed during Training.")]

234
UnitySDK/Assets/ML-Agents/Scripts/Agent.cs


using System;
using System.Collections.Generic;
using UnityEngine;
using Barracuda;

public struct AgentInfo
{
/// <summary>
/// Most recent agent vector (i.e. numeric) observation.
/// </summary>
public List<float> vectorObservation;
/// <summary>
/// The previous agent vector observations, stacked. The length of the
/// history (i.e. number of vector observations to stack) is specified
/// in the Brain parameters.
/// </summary>
public List<float> stackedVectorObservation;
/// <summary>
/// Most recent compressed observations.
/// </summary>
public List<CompressedObservation> compressedObservations;
/// <summary>
/// Most recent text observation.
/// Most recent observations.
public string textObservation;
public List<Observation> observations;
/// <summary>
/// Keeps track of the last text action taken by the Brain.
/// </summary>
public string storedTextActions;
/// <summary>
/// For discrete control, specifies the actions that the agent cannot take. Is true if

/// to separate between different agents in the environment.
/// </summary>
public int id;
/// <summary>
/// User-customizable object for sending structured output from Unity to Python in response
/// to an action in addition to a scalar reward.
/// TODO(cgoy): All references to protobuf objects should be removed.
/// </summary>
public CommunicatorObjects.CustomObservationProto customObservation;
}
/// <summary>

public struct AgentAction
{
public float[] vectorActions;
public string textActions;
/// TODO(cgoy): All references to protobuf objects should be removed.
public CommunicatorObjects.CustomActionProto customAction;
}
/// <summary>

/// </summary>
DemonstrationRecorder m_Recorder;
/// <summary>
/// List of sensors used to generate observations.
/// Currently generated from attached SensorComponents, and a legacy VectorSensor
/// </summary>
/// <summary>
/// VectorSensor which is written to by AddVectorObs
/// </summary>
public VectorSensor collectObservationsSensor;
/// <summary>
/// Internal buffer used for generating float observations.
/// </summary>
float[] m_VectorSensorBuffer;
WriteAdapter m_WriteAdapter = new WriteAdapter();
/// MonoBehaviour function that is called when the attached GameObject
/// becomes enabled or active.

}
}
if (m_Info.textObservation == null)
m_Info.textObservation = "";
m_Action.textActions = "";
m_Info.vectorObservation =
new List<float>(param.vectorObservationSize);
m_Info.stackedVectorObservation =
new List<float>(param.vectorObservationSize
* param.numStackedVectorObservations);
m_Info.stackedVectorObservation.AddRange(
new float[param.vectorObservationSize
* param.numStackedVectorObservations]);
m_Info.compressedObservations = new List<CompressedObservation>();
m_Info.customObservation = null;
m_Info.observations = new List<Observation>();
}
/// <summary>

/// </summary>
public void InitializeSensors()
{
// Get all attached sensor components
var attachedSensorComponents = GetComponents<SensorComponent>();
sensors.Capacity += attachedSensorComponents.Length;
foreach (var component in attachedSensorComponents)

// Support legacy CollectObservations
var param = m_PolicyFactory.brainParameters;
if (param.vectorObservationSize > 0)
{
collectObservationsSensor = new VectorSensor(param.vectorObservationSize);
if (param.numStackedVectorObservations > 1)
{
var stackingSensor = new StackingSensor(collectObservationsSensor, param.numStackedVectorObservations);
sensors.Add(stackingSensor);
}
else
{
sensors.Add(collectObservationsSensor);
}
}
// Sort the Sensors by name to ensure determinism
sensors.Sort((x, y) => x.GetName().CompareTo(y.GetName()));

Debug.Assert(!sensors[i].GetName().Equals(sensors[i + 1].GetName()), "Sensor names must be unique.");
}
#endif
// Create a buffer for writing vector sensor data too
int numFloatObservations = 0;
for (var i = 0; i < sensors.Count; i++)
{
if (sensors[i].GetCompressionType() == SensorCompressionType.None)
{
numFloatObservations += sensors[i].ObservationSize();
}
}
m_VectorSensorBuffer = new float[numFloatObservations];
}
/// <summary>

}
m_Info.storedVectorActions = m_Action.vectorActions;
m_Info.storedTextActions = m_Action.textActions;
m_Info.vectorObservation.Clear();
m_Info.compressedObservations.Clear();
m_Info.observations.Clear();
UpdateSensors();
using (TimerStack.Instance.Scoped("CollectObservations"))
{
CollectObservations();

var param = m_PolicyFactory.brainParameters;
if (m_Info.vectorObservation.Count != param.vectorObservationSize)
{
throw new UnityAgentsException(string.Format(
"Vector Observation size mismatch in continuous " +
"agent {0}. " +
"Was Expecting {1} but received {2}. ",
gameObject.name,
param.vectorObservationSize,
m_Info.vectorObservation.Count));
}
Utilities.ShiftLeft(m_Info.stackedVectorObservation, param.vectorObservationSize);
Utilities.ReplaceRange(m_Info.stackedVectorObservation, m_Info.vectorObservation,
m_Info.stackedVectorObservation.Count - m_Info.vectorObservation.Count);
// var param = m_PolicyFactory.brainParameters; // look, no brain params!
m_Info.reward = m_Reward;
m_Info.done = m_Done;

if (m_Recorder != null && m_Recorder.record && Application.isEditor)
{
// This is a bit of a hack - if we're in inference mode, compressed observations won't be generated
// This is a bit of a hack - if we're in inference mode, observations won't be generated
if (m_Info.compressedObservations.Count == 0)
if (m_Info.observations.Count == 0)
{
GenerateSensorData();
}

m_Info.textObservation = "";
}
void UpdateSensors()
{
for (var i = 0; i < sensors.Count; i++)
{
sensors[i].Update();
}
}
/// <summary>

/// </summary>
public void GenerateSensorData()
{
int floatsWritten = 0;
// TODO add bool argument indicating when to compress? For now, we always will compress.
var compressedObs = new CompressedObservation
if (sensor.GetCompressionType() == SensorCompressionType.None)
{
// only handles 1D
// TODO handle in communicator code instead
m_WriteAdapter.SetTarget(m_VectorSensorBuffer, floatsWritten);
var numFloats = sensor.Write(m_WriteAdapter);
var floatObs = new Observation
{
FloatData = new ArraySegment<float>(m_VectorSensorBuffer, floatsWritten, numFloats),
Shape = sensor.GetFloatObservationShape(),
CompressionType = sensor.GetCompressionType()
};
m_Info.observations.Add(floatObs);
floatsWritten += numFloats;
}
else
Data = sensor.GetCompressedObservation(),
Shape = sensor.GetFloatObservationShape(),
CompressionType = sensor.GetCompressionType()
};
m_Info.compressedObservations.Add(compressedObs);
var compressedObs = new Observation
{
CompressedData = sensor.GetCompressedObservation(),
Shape = sensor.GetFloatObservationShape(),
CompressionType = sensor.GetCompressionType()
};
m_Info.observations.Add(compressedObs);
}
/// Collects the (vector, visual, text) observations of the agent.
/// Collects the (vector, visual) observations of the agent.
/// The agent observation describes the current environment from the
/// perspective of the agent.
/// </summary>

/// observation could include distances to friends or enemies, or the
/// current level of ammunition at its disposal.
/// Recall that an Agent may attach vector, visual or textual observations.
/// Recall that an Agent may attach vector or visual observations.
/// Vector observations are added by calling the provided helper methods:
/// - <see cref="AddVectorObs(int)"/>
/// - <see cref="AddVectorObs(float)"/>

/// needs to match the vectorObservationSize attribute of the linked Brain.
/// Visual observations are implicitly added from the cameras attached to
/// the Agent.
/// Lastly, textual observations are added using
/// <see cref="SetTextObs(string)"/>.
/// </remarks>
public virtual void CollectObservations()
{

/// <param name="observation">Observation.</param>
protected void AddVectorObs(float observation)
{
m_Info.vectorObservation.Add(observation);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation">Observation.</param>
protected void AddVectorObs(int observation)
{
m_Info.vectorObservation.Add(observation);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation">Observation.</param>
protected void AddVectorObs(Vector3 observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
m_Info.vectorObservation.Add(observation.z);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation">Observation.</param>
protected void AddVectorObs(Vector2 observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation">Observation.</param>
protected void AddVectorObs(IEnumerable<float> observation)
{
m_Info.vectorObservation.AddRange(observation);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation">Observation.</param>
protected void AddVectorObs(Quaternion observation)
{
m_Info.vectorObservation.Add(observation.x);
m_Info.vectorObservation.Add(observation.y);
m_Info.vectorObservation.Add(observation.z);
m_Info.vectorObservation.Add(observation.w);
collectObservationsSensor.AddObservation(observation);
}
/// <summary>

/// <param name="observation"></param>
protected void AddVectorObs(bool observation)
{
m_Info.vectorObservation.Add(observation ? 1f : 0f);
collectObservationsSensor.AddObservation(observation);
var oneHotVector = new float[range];
oneHotVector[observation] = 1;
m_Info.vectorObservation.AddRange(oneHotVector);
}
/// <summary>
/// Sets the text observation.
/// </summary>
/// <param name="textObservation">The text observation.</param>
public void SetTextObs(string textObservation)
{
m_Info.textObservation = textObservation;
collectObservationsSensor.AddOneHotObservation(observation, range);
}
/// <summary>

/// Vector action. Note that for discrete actions, the provided array
/// will be of length 1.
/// </param>
/// <param name="textAction">Text action.</param>
public virtual void AgentAction(float[] vectorAction, string textAction)
public virtual void AgentAction(float[] vectorAction)
}
/// <summary>
/// Specifies the agent behavior at every step based on the provided
/// action.
/// </summary>
/// <param name="vectorAction">
/// Vector action. Note that for discrete actions, the provided array
/// will be of length 1.
/// </param>
/// <param name="textAction">Text action.</param>
/// <param name="customAction">
/// A custom action, defined by the user as custom protobuf message. Useful if the action is hard to encode
/// as either a flat vector or a single string.
/// </param>
public virtual void AgentAction(float[] vectorAction, string textAction, CommunicatorObjects.CustomActionProto customAction)
{
// We fall back to not using the custom action if the subclassed Agent doesn't override this method.
AgentAction(vectorAction, textAction);
}
/// <summary>

if ((m_RequestAction) && (m_Brain != null))
{
m_RequestAction = false;
AgentAction(m_Action.vectorActions, m_Action.textActions, m_Action.customAction);
AgentAction(m_Action.vectorActions);
}
if ((m_StepCount >= agentParameters.maxStep)

void DecideAction()
{
m_Brain?.DecideAction();
}
/// <summary>
/// Sets the custom observation for the agent for this episode.
/// </summary>
/// <param name="customObservation">New value of the agent's custom observation.</param>
public void SetCustomObservation(CommunicatorObjects.CustomObservationProto customObservation)
{
m_Info.customObservation = customObservation;
}
}
}

25
UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs


using System.IO.Abstractions;
using System.Text.RegularExpressions;
using System.Text.RegularExpressions;
namespace MLAgents
{

/// <summary>
/// Creates demonstration store for use in recording.
/// </summary>
void InitializeDemoStore()
public void InitializeDemoStore(IFileSystem fileSystem = null)
m_DemoStore = new DemonstrationStore();
m_DemoStore = new DemonstrationStore(fileSystem);
var behaviorParams = GetComponent<BehaviorParameters>();
GetComponent<BehaviorParameters>().brainParameters,
GetComponent<BehaviorParameters>().behaviorName);
behaviorParams.brainParameters,
behaviorParams.behaviorName);
Monitor.Log("Recording Demonstration of Agent: ", m_RecordingAgent.name);
}

m_DemoStore.Record(info);
}
public void Close()
{
if (m_DemoStore != null)
{
m_DemoStore.Close();
m_DemoStore = null;
}
}
if (Application.isEditor && record && m_DemoStore != null)
if (Application.isEditor && record)
m_DemoStore.Close();
Close();
}
}
}

15
UnitySDK/Assets/ML-Agents/Scripts/DemonstrationStore.cs


using System.IO;
using System.IO.Abstractions;
using Google.Protobuf;
using UnityEngine;
namespace MLAgents
{

public DemonstrationStore(IFileSystem fileSystem)
{
m_FileSystem = fileSystem;
}
public DemonstrationStore()
{
m_FileSystem = new FileSystem();
if (fileSystem != null)
{
m_FileSystem = fileSystem;
}
else
{
m_FileSystem = new FileSystem();
}
}
/// <summary>

77
UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs


byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjVtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2Fj",
"dGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaNm1sYWdlbnRzL2Vu",
"dnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3VzdG9tX2FjdGlvbi5wcm90byKV",
"AQoQQWdlbnRBY3Rpb25Qcm90bxIWCg52ZWN0b3JfYWN0aW9ucxgBIAMoAhIU",
"Cgx0ZXh0X2FjdGlvbnMYAiABKAkSDQoFdmFsdWUYBCABKAISPgoNY3VzdG9t",
"X2FjdGlvbhgFIAEoCzInLmNvbW11bmljYXRvcl9vYmplY3RzLkN1c3RvbUFj",
"dGlvblByb3RvSgQIAxAEQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmpl",
"Y3RzYgZwcm90bzM="));
"dGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiSwoQQWdlbnRBY3Rp",
"b25Qcm90bxIWCg52ZWN0b3JfYWN0aW9ucxgBIAMoAhINCgV2YWx1ZRgEIAEo",
"AkoECAIQA0oECAMQBEoECAUQBkIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9y",
"T2JqZWN0c2IGcHJvdG8z"));
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomActionReflection.Descriptor, },
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentActionProto), global::MLAgents.CommunicatorObjects.AgentActionProto.Parser, new[]{ "VectorActions", "TextActions", "Value", "CustomAction" }, null, null, null)
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentActionProto), global::MLAgents.CommunicatorObjects.AgentActionProto.Parser, new[]{ "VectorActions", "Value" }, null, null, null)
}));
}
#endregion

[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentActionProto(AgentActionProto other) : this() {
vectorActions_ = other.vectorActions_.Clone();
textActions_ = other.textActions_;
CustomAction = other.customAction_ != null ? other.CustomAction.Clone() : null;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}

get { return vectorActions_; }
}
/// <summary>Field number for the "text_actions" field.</summary>
public const int TextActionsFieldNumber = 2;
private string textActions_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string TextActions {
get { return textActions_; }
set {
textActions_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 4;
private float value_;

set {
value_ = value;
}
}
/// <summary>Field number for the "custom_action" field.</summary>
public const int CustomActionFieldNumber = 5;
private global::MLAgents.CommunicatorObjects.CustomActionProto customAction_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CustomActionProto CustomAction {
get { return customAction_; }
set {
customAction_ = value;
}
}

return true;
}
if(!vectorActions_.Equals(other.vectorActions_)) return false;
if (TextActions != other.TextActions) return false;
if (!object.Equals(CustomAction, other.CustomAction)) return false;
return Equals(_unknownFields, other._unknownFields);
}

hash ^= vectorActions_.GetHashCode();
if (TextActions.Length != 0) hash ^= TextActions.GetHashCode();
if (customAction_ != null) hash ^= CustomAction.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}

[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
vectorActions_.WriteTo(output, _repeated_vectorActions_codec);
if (TextActions.Length != 0) {
output.WriteRawTag(18);
output.WriteString(TextActions);
}
if (customAction_ != null) {
output.WriteRawTag(42);
output.WriteMessage(CustomAction);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}

public int CalculateSize() {
int size = 0;
size += vectorActions_.CalculateSize(_repeated_vectorActions_codec);
if (TextActions.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(TextActions);
}
if (customAction_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(CustomAction);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}

return;
}
vectorActions_.Add(other.vectorActions_);
if (other.TextActions.Length != 0) {
TextActions = other.TextActions;
}
}
if (other.customAction_ != null) {
if (customAction_ == null) {
customAction_ = new global::MLAgents.CommunicatorObjects.CustomActionProto();
}
CustomAction.MergeFrom(other.CustomAction);
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}

vectorActions_.AddEntriesFrom(input, _repeated_vectorActions_codec);
break;
}
case 18: {
TextActions = input.ReadString();
break;
}
break;
}
case 42: {
if (customAction_ == null) {
customAction_ = new global::MLAgents.CommunicatorObjects.CustomActionProto();
}
input.ReadMessage(customAction_);
break;
}
}

166
UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs


byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CjNtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2lu",
"Zm8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGj9tbGFnZW50cy9lbnZz",
"L2NvbW11bmljYXRvcl9vYmplY3RzL2NvbXByZXNzZWRfb2JzZXJ2YXRpb24u",
"cHJvdG8aO21sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3Vz",
"dG9tX29ic2VydmF0aW9uLnByb3RvIowDCg5BZ2VudEluZm9Qcm90bxIiChpz",
"dGFja2VkX3ZlY3Rvcl9vYnNlcnZhdGlvbhgBIAMoAhIYChB0ZXh0X29ic2Vy",
"dmF0aW9uGAMgASgJEh0KFXN0b3JlZF92ZWN0b3JfYWN0aW9ucxgEIAMoAhIb",
"ChNzdG9yZWRfdGV4dF9hY3Rpb25zGAUgASgJEg4KBnJld2FyZBgHIAEoAhIM",
"CgRkb25lGAggASgIEhgKEG1heF9zdGVwX3JlYWNoZWQYCSABKAgSCgoCaWQY",
"CiABKAUSEwoLYWN0aW9uX21hc2sYCyADKAgSSAoSY3VzdG9tX29ic2VydmF0",
"aW9uGAwgASgLMiwuY29tbXVuaWNhdG9yX29iamVjdHMuQ3VzdG9tT2JzZXJ2",
"YXRpb25Qcm90bxJRChdjb21wcmVzc2VkX29ic2VydmF0aW9ucxgNIAMoCzIw",
"LmNvbW11bmljYXRvcl9vYmplY3RzLkNvbXByZXNzZWRPYnNlcnZhdGlvblBy",
"b3RvSgQIAhADSgQIBhAHQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmpl",
"Y3RzYgZwcm90bzM="));
"Zm8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjRtbGFnZW50cy9lbnZz",
"L2NvbW11bmljYXRvcl9vYmplY3RzL29ic2VydmF0aW9uLnByb3RvIuoBCg5B",
"Z2VudEluZm9Qcm90bxIdChVzdG9yZWRfdmVjdG9yX2FjdGlvbnMYBCADKAIS",
"DgoGcmV3YXJkGAcgASgCEgwKBGRvbmUYCCABKAgSGAoQbWF4X3N0ZXBfcmVh",
"Y2hlZBgJIAEoCBIKCgJpZBgKIAEoBRITCgthY3Rpb25fbWFzaxgLIAMoCBI8",
"CgxvYnNlcnZhdGlvbnMYDSADKAsyJi5jb21tdW5pY2F0b3Jfb2JqZWN0cy5P",
"YnNlcnZhdGlvblByb3RvSgQIARACSgQIAhADSgQIAxAESgQIBRAGSgQIBhAH",
"SgQIDBANQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90",
"bzM="));
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CompressedObservationReflection.Descriptor, global::MLAgents.CommunicatorObjects.CustomObservationReflection.Descriptor, },
new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.ObservationReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentInfoProto), global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser, new[]{ "StackedVectorObservation", "TextObservation", "StoredVectorActions", "StoredTextActions", "Reward", "Done", "MaxStepReached", "Id", "ActionMask", "CustomObservation", "CompressedObservations" }, null, null, null)
new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentInfoProto), global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser, new[]{ "StoredVectorActions", "Reward", "Done", "MaxStepReached", "Id", "ActionMask", "Observations" }, null, null, null)
}));
}
#endregion

[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AgentInfoProto(AgentInfoProto other) : this() {
stackedVectorObservation_ = other.stackedVectorObservation_.Clone();
textObservation_ = other.textObservation_;
storedTextActions_ = other.storedTextActions_;
CustomObservation = other.customObservation_ != null ? other.CustomObservation.Clone() : null;
compressedObservations_ = other.compressedObservations_.Clone();
observations_ = other.observations_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}

}
/// <summary>Field number for the "stacked_vector_observation" field.</summary>
public const int StackedVectorObservationFieldNumber = 1;
private static readonly pb::FieldCodec<float> _repeated_stackedVectorObservation_codec
= pb::FieldCodec.ForFloat(10);
private readonly pbc::RepeatedField<float> stackedVectorObservation_ = new pbc::RepeatedField<float>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<float> StackedVectorObservation {
get { return stackedVectorObservation_; }
}
/// <summary>Field number for the "text_observation" field.</summary>
public const int TextObservationFieldNumber = 3;
private string textObservation_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string TextObservation {
get { return textObservation_; }
set {
textObservation_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "stored_vector_actions" field.</summary>
public const int StoredVectorActionsFieldNumber = 4;
private static readonly pb::FieldCodec<float> _repeated_storedVectorActions_codec

public pbc::RepeatedField<float> StoredVectorActions {
get { return storedVectorActions_; }
}
/// <summary>Field number for the "stored_text_actions" field.</summary>
public const int StoredTextActionsFieldNumber = 5;
private string storedTextActions_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string StoredTextActions {
get { return storedTextActions_; }
set {
storedTextActions_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "reward" field.</summary>

get { return actionMask_; }
}
/// <summary>Field number for the "custom_observation" field.</summary>
public const int CustomObservationFieldNumber = 12;
private global::MLAgents.CommunicatorObjects.CustomObservationProto customObservation_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::MLAgents.CommunicatorObjects.CustomObservationProto CustomObservation {
get { return customObservation_; }
set {
customObservation_ = value;
}
}
/// <summary>Field number for the "compressed_observations" field.</summary>
public const int CompressedObservationsFieldNumber = 13;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.CompressedObservationProto> _repeated_compressedObservations_codec
= pb::FieldCodec.ForMessage(106, global::MLAgents.CommunicatorObjects.CompressedObservationProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.CompressedObservationProto> compressedObservations_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.CompressedObservationProto>();
/// <summary>Field number for the "observations" field.</summary>
public const int ObservationsFieldNumber = 13;
private static readonly pb::FieldCodec<global::MLAgents.CommunicatorObjects.ObservationProto> _repeated_observations_codec
= pb::FieldCodec.ForMessage(106, global::MLAgents.CommunicatorObjects.ObservationProto.Parser);
private readonly pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ObservationProto> observations_ = new pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ObservationProto>();
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.CompressedObservationProto> CompressedObservations {
get { return compressedObservations_; }
public pbc::RepeatedField<global::MLAgents.CommunicatorObjects.ObservationProto> Observations {
get { return observations_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]

if (ReferenceEquals(other, this)) {
return true;
}
if(!stackedVectorObservation_.Equals(other.stackedVectorObservation_)) return false;
if (TextObservation != other.TextObservation) return false;
if (StoredTextActions != other.StoredTextActions) return false;
if (!object.Equals(CustomObservation, other.CustomObservation)) return false;
if(!compressedObservations_.Equals(other.compressedObservations_)) return false;
if(!observations_.Equals(other.observations_)) return false;
return Equals(_unknownFields, other._unknownFields);
}

hash ^= stackedVectorObservation_.GetHashCode();
if (TextObservation.Length != 0) hash ^= TextObservation.GetHashCode();
if (StoredTextActions.Length != 0) hash ^= StoredTextActions.GetHashCode();
if (customObservation_ != null) hash ^= CustomObservation.GetHashCode();
hash ^= compressedObservations_.GetHashCode();
hash ^= observations_.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}

[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
stackedVectorObservation_.WriteTo(output, _repeated_stackedVectorObservation_codec);
if (TextObservation.Length != 0) {
output.WriteRawTag(26);
output.WriteString(TextObservation);
}
if (StoredTextActions.Length != 0) {
output.WriteRawTag(42);
output.WriteString(StoredTextActions);
}
if (Reward != 0F) {