浏览代码

replaced all the tabs to 4 spaces in the project

/develop-generalizationTraining-TrainerController
Vincent Gao 7 年前
当前提交
38bd3e40
共有 26 个文件被更改,包括 376 次插入376 次删除
  1. 22
      unity-environment/Assets/ML-Agents/Editor/AgentEditor.cs
  2. 50
      unity-environment/Assets/ML-Agents/Editor/BrainEditor.cs
  3. 2
      unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs
  4. 12
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushAcademy.cs
  5. 46
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushAgent.cs
  6. 24
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushArea.cs
  7. 10
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallAcademy.cs
  8. 50
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallAgent.cs
  9. 24
      unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallArea.cs
  10. 18
      unity-environment/Assets/ML-Agents/Examples/Banana/Scripts/BananaLogic.cs
  11. 12
      unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAcademy.cs
  12. 82
      unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs
  13. 12
      unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAcademy.cs
  14. 24
      unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs
  15. 8
      unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerBanana.cs
  16. 20
      unity-environment/Assets/ML-Agents/Examples/Crawler/Scripts/CameraFollow.cs
  17. 10
      unity-environment/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAcademy.cs
  18. 12
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAcademy.cs
  19. 18
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs
  20. 12
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs
  21. 18
      unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherGoal.cs
  22. 16
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs
  23. 216
      unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/hitWall.cs
  24. 10
      unity-environment/Assets/ML-Agents/Scripts/Agent.cs
  25. 22
      unity-environment/Assets/ML-Agents/Scripts/CoreBrainInternal.cs
  26. 2
      unity-environment/Assets/ML-Agents/Template/Scripts/TemplateDecision.cs

22
unity-environment/Assets/ML-Agents/Editor/AgentEditor.cs


EditorGUILayout.PropertyField(brain);
EditorGUILayout.LabelField("Agent Cameras");
for (int i = 0; i < cameras.arraySize; i++)
{
EditorGUILayout.PropertyField(cameras.GetArrayElementAtIndex(i), new GUIContent("Camera " + (i + 1).ToString() + ": "));
}
for (int i = 0; i < cameras.arraySize; i++)
{
EditorGUILayout.PropertyField(cameras.GetArrayElementAtIndex(i), new GUIContent("Camera " + (i + 1).ToString() + ": "));
}
if (GUILayout.Button("Remove Camera", EditorStyles.miniButton))
{
cameras.arraySize--;
}
if (GUILayout.Button("Remove Camera", EditorStyles.miniButton))
{
cameras.arraySize--;
}
"The per-agent maximum number of steps."));
"The per-agent maximum number of steps."));
EditorGUILayout.PropertyField(isResetOnDone, new GUIContent("Reset On Done",
"If checked, the agent will reset on done. Else, AgentOnDone() will be called."));
EditorGUILayout.PropertyField(isEBS, new GUIContent("On Demand Decision", "If checked, you must manually request decisions."));

actionsPerDecision.intValue = Mathf.Max(1, actionsPerDecision.intValue);
}
serializedAgent.ApplyModifiedProperties();
serializedAgent.ApplyModifiedProperties();
EditorGUILayout.LabelField("", GUI.skin.horizontalSlider);
base.OnInspectorGUI();

50
unity-environment/Assets/ML-Agents/Editor/BrainEditor.cs


[SerializeField]
bool _Foldout = true;
public override void OnInspectorGUI ()
{
Brain myBrain = (Brain)target;
SerializedObject serializedBrain = serializedObject;
public override void OnInspectorGUI ()
{
Brain myBrain = (Brain)target;
SerializedObject serializedBrain = serializedObject;
if (myBrain.transform.parent == null) {
EditorGUILayout.HelpBox ("A Brain GameObject must be a child of an Academy GameObject!", MessageType.Error);
} else if (myBrain.transform.parent.GetComponent<Academy> () == null) {
EditorGUILayout.HelpBox ("The Parent of a Brain must have an Academy Component attached to it!", MessageType.Error);
}
if (myBrain.transform.parent == null) {
EditorGUILayout.HelpBox ("A Brain GameObject must be a child of an Academy GameObject!", MessageType.Error);
} else if (myBrain.transform.parent.GetComponent<Academy> () == null) {
EditorGUILayout.HelpBox ("The Parent of a Brain must have an Academy Component attached to it!", MessageType.Error);
}
BrainParameters parameters = myBrain.brainParameters;
BrainParameters parameters = myBrain.brainParameters;
serializedBrain.Update();
serializedBrain.Update();
_Foldout = EditorGUILayout.Foldout(_Foldout, "Brain Parameters");
int indentLevel = EditorGUI.indentLevel;

}
EditorGUI.indentLevel = indentLevel;
SerializedProperty bt = serializedBrain.FindProperty("brainType");
EditorGUILayout.PropertyField(bt);
SerializedProperty bt = serializedBrain.FindProperty("brainType");
EditorGUILayout.PropertyField(bt);
if (bt.enumValueIndex < 0) {
bt.enumValueIndex = (int)BrainType.Player;
}
if (bt.enumValueIndex < 0) {
bt.enumValueIndex = (int)BrainType.Player;
}
serializedBrain.ApplyModifiedProperties();
serializedBrain.ApplyModifiedProperties();
myBrain.UpdateCoreBrains ();
myBrain.coreBrain.OnInspector ();
myBrain.UpdateCoreBrains ();
myBrain.coreBrain.OnInspector ();
#if !NET_4_6 && ENABLE_TENSORFLOW
EditorGUILayout.HelpBox ("You cannot have ENABLE_TENSORFLOW without NET_4_6", MessageType.Error);
#endif
}
#if !NET_4_6 && ENABLE_TENSORFLOW
EditorGUILayout.HelpBox ("You cannot have ENABLE_TENSORFLOW without NET_4_6", MessageType.Error);
#endif
}
}

2
unity-environment/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs


public class Ball3DDecision : MonoBehaviour, Decision
{
public float rotationSpeed = 2f;
public float rotationSpeed = 2f;
public float[] Decide(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
{
if (gameObject.GetComponent<Brain>().brainParameters.vectorActionSpaceType == StateType.continuous)

12
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushAcademy.cs


public float blockSize;
public float xVariation;
public override void AcademyReset()
{
public override void AcademyReset()
{
}
}
public override void AcademyStep()
{
public override void AcademyStep()
{
}
}
}

46
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushAgent.cs


public class PushAgent : AreaAgent
{
public GameObject goalHolder;
public GameObject goalHolder;
public GameObject block;
Rigidbody rb;

public override void InitializeAgent()
{
base.InitializeAgent();
base.InitializeAgent();
public override void CollectObservations()
{
public override void CollectObservations()
{
velocity = rb.velocity;
blockVelocity = block.GetComponent<Rigidbody>().velocity;
AddVectorObs((transform.position.x - area.transform.position.x));

AddVectorObs((block.transform.position.y - area.transform.position.y));
AddVectorObs((block.transform.position.z + 5 - area.transform.position.z));
AddVectorObs(velocity.x);
AddVectorObs(velocity.y);
AddVectorObs(velocity.z);
AddVectorObs(velocity.x);
AddVectorObs(velocity.y);
AddVectorObs(velocity.z);
AddVectorObs(blockVelocity.x);
AddVectorObs(blockVelocity.y);
AddVectorObs(blockVelocity.z);
AddVectorObs(blockVelocity.x);
AddVectorObs(blockVelocity.y);
AddVectorObs(blockVelocity.z);
}
}
public override void AgentAction(float[] act)
{
public override void AgentAction(float[] act)
{
{
{
}
}
}
}
public override void AgentReset()
{
public override void AgentReset()
{
rb.velocity = new Vector3(0f, 0f, 0f);
rb.velocity = new Vector3(0f, 0f, 0f);
}
}
public override void AgentOnDone()
{
public override void AgentOnDone()
{
}
}
}

24
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Push/PushArea.cs


public GameObject goalHolder;
public GameObject academy;
// Use this for initialization
void Start () {
academy = GameObject.Find("Academy");
}
// Update is called once per frame
void Update () {
}
// Use this for initialization
void Start () {
academy = GameObject.Find("Academy");
}
// Update is called once per frame
void Update () {
}
public override void ResetArea()
{
public override void ResetArea()
{
float goalSize = academy.GetComponent<PushAcademy>().goalSize;
float blockSize = academy.GetComponent<PushAcademy>().blockSize;
float xVariation = academy.GetComponent<PushAcademy>().xVariation;

blockSize = Random.Range(blockSize * 0.9f, blockSize * 1.1f);
block.transform.localScale = new Vector3(blockSize, 1f, blockSize);
goalHolder.transform.localScale = new Vector3(goalSize, 1f, goalSize);
}
}
}

10
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallAcademy.cs


public float minWallHeight;
public float maxWallHeight;
public override void AcademyReset()
{
public override void AcademyReset()
{
}
}
public override void AcademyStep()
{
public override void AcademyStep()
{
}

50
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallAgent.cs


public class WallAgent : AreaAgent
{
public GameObject goalHolder;
public GameObject goalHolder;
base.InitializeAgent();
base.InitializeAgent();
public override void CollectObservations()
{
public override void CollectObservations()
{
Vector3 velocity = GetComponent<Rigidbody>().velocity;
AddVectorObs((transform.position.x - area.transform.position.x));
AddVectorObs((transform.position.y - area.transform.position.y));

AddVectorObs((block.transform.position.y - area.transform.position.y));
AddVectorObs((block.transform.position.z + 5 - area.transform.position.z));
AddVectorObs(wall.transform.localScale.y);
AddVectorObs(wall.transform.localScale.y);
AddVectorObs(velocity.x);
AddVectorObs(velocity.y);
AddVectorObs(velocity.z);
AddVectorObs(velocity.x);
AddVectorObs(velocity.y);
AddVectorObs(velocity.z);
AddVectorObs(blockVelocity.x);
AddVectorObs(blockVelocity.y);
AddVectorObs(blockVelocity.z);
}
AddVectorObs(blockVelocity.x);
AddVectorObs(blockVelocity.y);
AddVectorObs(blockVelocity.z);
}
public override void AgentAction(float[] act)
{
public override void AgentAction(float[] act)
{
AddReward(-0.005f);
MoveAgent(act);

{
{
}
}
}
}
public override void AgentReset()
{
transform.position = new Vector3(Random.Range(-3.5f, 3.5f), 1.1f, -8f) + area.transform.position;
GetComponent<Rigidbody>().velocity = new Vector3(0f, 0f, 0f);
public override void AgentReset()
{
transform.position = new Vector3(Random.Range(-3.5f, 3.5f), 1.1f, -8f) + area.transform.position;
GetComponent<Rigidbody>().velocity = new Vector3(0f, 0f, 0f);
}
}
public override void AgentOnDone()
{
public override void AgentOnDone()
{
}
}
}

24
unity-environment/Assets/ML-Agents/Examples/Area/Scripts/Wall/WallArea.cs


public GameObject block;
public GameObject goalHolder;
// Use this for initialization
void Start () {
academy = GameObject.Find("Academy");
}
// Update is called once per frame
void Update () {
}
// Use this for initialization
void Start () {
academy = GameObject.Find("Academy");
}
// Update is called once per frame
void Update () {
}
wall.transform.localScale = new Vector3(12f, Random.Range(wallHeightMin, wallHeightMax) - 0.1f, 1f);
wall.transform.localScale = new Vector3(12f, Random.Range(wallHeightMin, wallHeightMax) - 0.1f, 1f);
goalHolder.transform.position = new Vector3(Random.Range(-3.5f, 3.5f), -0.1f, 0f) + gameObject.transform.position;
}
goalHolder.transform.position = new Vector3(Random.Range(-3.5f, 3.5f), -0.1f, 0f) + gameObject.transform.position;
}
}

18
unity-environment/Assets/ML-Agents/Examples/Banana/Scripts/BananaLogic.cs


public bool respawn;
public BananaArea myArea;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void OnEaten() {
if (respawn)

12
unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAcademy.cs


public class BasicAcademy : Academy {
public override void AcademyReset()
{
public override void AcademyReset()
{
}
}
public override void AcademyStep()
{
public override void AcademyStep()
{
}
}
}

82
unity-environment/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs


private BasicAcademy academy;
public float timeBetweenDecisionsAtInference;
private float timeSinceDecision;
public int position;
public int smallGoalPosition;
public int largeGoalPosition;
public GameObject largeGoal;
public GameObject smallGoal;
public int minPosition;
public int maxPosition;
public int position;
public int smallGoalPosition;
public int largeGoalPosition;
public GameObject largeGoal;
public GameObject smallGoal;
public int minPosition;
public int maxPosition;
public override void InitializeAgent()
{

public override void CollectObservations()
{
AddVectorObs(position);
}
public override void CollectObservations()
{
AddVectorObs(position);
}
public override void AgentAction(float[] act)
{
float movement = act[0];
int direction = 0;
if (movement == 0) { direction = -1; }
if (movement == 1) { direction = 1; }
public override void AgentAction(float[] act)
{
float movement = act[0];
int direction = 0;
if (movement == 0) { direction = -1; }
if (movement == 1) { direction = 1; }
position += direction;
if (position < minPosition) { position = minPosition; }
if (position > maxPosition) { position = maxPosition; }
position += direction;
if (position < minPosition) { position = minPosition; }
if (position > maxPosition) { position = maxPosition; }
gameObject.transform.position = new Vector3(position, 0f, 0f);
gameObject.transform.position = new Vector3(position, 0f, 0f);
if (position == smallGoalPosition)
{
if (position == smallGoalPosition)
{
}
}
if (position == largeGoalPosition)
{
if (position == largeGoalPosition)
{
}
}
}
}
public override void AgentReset()
{
position = 0;
minPosition = -10;
maxPosition = 10;
smallGoalPosition = -3;
largeGoalPosition = 7;
smallGoal.transform.position = new Vector3(smallGoalPosition, 0f, 0f);
largeGoal.transform.position = new Vector3(largeGoalPosition, 0f, 0f);
}
public override void AgentReset()
{
position = 0;
minPosition = -10;
maxPosition = 10;
smallGoalPosition = -3;
largeGoalPosition = 7;
smallGoal.transform.position = new Vector3(smallGoalPosition, 0f, 0f);
largeGoal.transform.position = new Vector3(largeGoalPosition, 0f, 0f);
}
public override void AgentOnDone()
{
public override void AgentOnDone()
{
}
}
public void FixedUpdate()
{

12
unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAcademy.cs


public class BouncerAcademy : Academy {
public override void AcademyReset()
{
public override void AcademyReset()
{
}
}
public override void AcademyStep()
{
public override void AcademyStep()
{
}
}
}

24
unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs


rb.velocity = rb.velocity.normalized * speed;
}
public override void CollectObservations()
{
public override void CollectObservations()
{
}
}
public override void AgentAction(float[] act)
{
public override void AgentAction(float[] act)
{
float x = Mathf.Clamp(act[0], -1, 1);
float z = Mathf.Clamp(act[1], -1, 1);
rb.velocity = new Vector3(x, 0, z) ;

{
//AddReward(0.05f);
}
}
}
public override void AgentReset()
{
public override void AgentReset()
{
}
}
public override void AgentOnDone()
{
public override void AgentOnDone()
{
}
}
private void OnTriggerEnter(Collider collision)
{

8
unity-environment/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerBanana.cs


void Start(){
}
// Update is called once per frame
void FixedUpdate () {
// Update is called once per frame
void FixedUpdate () {
}
}
private void OnTriggerEnter(Collider collision)
{

20
unity-environment/Assets/ML-Agents/Examples/Crawler/Scripts/CameraFollow.cs


public class CameraFollow : MonoBehaviour {
public Transform target;
Vector3 offset;
public Transform target;
Vector3 offset;
// Use this for initialization
void Start () {
offset = gameObject.transform.position - target.position;
}
// Update is called once per frame
void Update () {
// Use this for initialization
void Start () {
offset = gameObject.transform.position - target.position;
}
// Update is called once per frame
void Update () {
}
}
}

10
unity-environment/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAcademy.cs


public class HallwayAcademy : Academy {
public float agentRunSpeed;
public float agentRotationSpeed;
public float agentRunSpeed;
public float agentRotationSpeed;
public float gravityMultiplier; //use ~3 to make things less floaty
public float gravityMultiplier; //use ~3 to make things less floaty
public override void InitializeAcademy()
{

public override void AcademyReset()
{
public override void AcademyReset()
{
}
}

12
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAcademy.cs


public float goalSpeed;
public override void AcademyReset()
{
public override void AcademyReset()
{
}
}
public override void AcademyStep()
{
public override void AcademyStep()
{
}
}
}

18
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherAgent.cs


rbB = pendulumB.GetComponent<Rigidbody>();
}
public override void CollectObservations()
{
public override void CollectObservations()
{
AddVectorObs(pendulumA.transform.rotation.x);
AddVectorObs(pendulumA.transform.rotation.y);
AddVectorObs(pendulumA.transform.rotation.z);

AddVectorObs(hand.transform.position.z - transform.position.z);
}
}
public override void AgentAction(float[] act)
{
public override void AgentAction(float[] act)
{
goalDegree += goalSpeed;
UpdateGoalPosition();

torque_z = Mathf.Clamp(act[3], -1, 1) * 100f;
rbB.AddTorque(new Vector3(torque_x, 0f, torque_z));
}
}
void UpdateGoalPosition() {
float radians = (goalDegree * Mathf.PI) / 180f;

goal.transform.localScale = new Vector3(goalSize, goalSize, goalSize);
}
public override void AgentOnDone()
{
public override void AgentOnDone()
{
}
}
}

12
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs


public class ReacherDecision : MonoBehaviour, Decision {
public float[] Decide (List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
{
{
float[] action = new float[4];
for (int i = 0; i < 4; i++) {
action[i] = Random.Range(-1f, 1f);

}
}
{
return new List<float>();
}
{
return new List<float>();
}
}

18
unity-environment/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherGoal.cs


public GameObject hand;
public GameObject goalOn;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
private void OnTriggerEnter(Collider other)
{

16
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/TennisArea.cs


public GameObject agentA;
public GameObject agentB;
// Use this for initialization
void Start () {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
}
// Update is called once per frame
void Update () {
}
public void MatchReset() {
float ballOut = Random.Range(6f, 8f);

216
unity-environment/Assets/ML-Agents/Examples/Tennis/Scripts/hitWall.cs


public class hitWall : MonoBehaviour
{
public GameObject areaObject;
public int lastAgentHit;
public GameObject areaObject;
public int lastAgentHit;
// Use this for initialization
void Start()
{
lastAgentHit = -1;
}
// Use this for initialization
void Start()
{
lastAgentHit = -1;
}
private void OnTriggerExit(Collider other)
{
TennisArea area = areaObject.GetComponent<TennisArea>();
TennisAgent agentA = area.agentA.GetComponent<TennisAgent>();
TennisAgent agentB = area.agentB.GetComponent<TennisAgent>();
private void OnTriggerExit(Collider other)
{
TennisArea area = areaObject.GetComponent<TennisArea>();
TennisAgent agentA = area.agentA.GetComponent<TennisAgent>();
TennisAgent agentB = area.agentB.GetComponent<TennisAgent>();
if (other.name == "over")
{
if (lastAgentHit == 0)
{
if (other.name == "over")
{
if (lastAgentHit == 0)
{
}
else
{
}
else
{
}
lastAgentHit = 0;
}
lastAgentHit = 0;
}
}
}
}
private void OnCollisionEnter(Collision collision)
{
TennisArea area = areaObject.GetComponent<TennisArea>();
TennisAgent agentA = area.agentA.GetComponent<TennisAgent>();
TennisAgent agentB = area.agentB.GetComponent<TennisAgent>();
private void OnCollisionEnter(Collision collision)
{
TennisArea area = areaObject.GetComponent<TennisArea>();
TennisAgent agentA = area.agentA.GetComponent<TennisAgent>();
TennisAgent agentB = area.agentB.GetComponent<TennisAgent>();
if (collision.gameObject.tag == "iWall")
{
if (collision.gameObject.name == "wallA")
{
if (lastAgentHit == 0)
{
if (collision.gameObject.tag == "iWall")
{
if (collision.gameObject.name == "wallA")
{
if (lastAgentHit == 0)
{
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentA.score += 1;
}
}
else if (collision.gameObject.name == "wallB")
{
if (lastAgentHit == 0)
{
agentA.score += 1;
}
}
else if (collision.gameObject.name == "wallB")
{
if (lastAgentHit == 0)
{
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentA.score += 1;
}
}
else if (collision.gameObject.name == "floorA")
{
if (lastAgentHit == 0 || lastAgentHit == -1)
{
agentA.score += 1;
}
}
else if (collision.gameObject.name == "floorA")
{
if (lastAgentHit == 0 || lastAgentHit == -1)
{
agentB.SetReward(0);
agentB.score += 1;
}
else
{
agentB.SetReward(0);
agentB.score += 1;
}
else
{
agentB.SetReward(0);
agentB.score += 1;
agentB.SetReward(0);
agentB.score += 1;
}
}
else if (collision.gameObject.name == "floorB")
{
if (lastAgentHit == 1 || lastAgentHit == -1)
{
agentA.SetReward(0);
}
}
else if (collision.gameObject.name == "floorB")
{
if (lastAgentHit == 1 || lastAgentHit == -1)
{
agentA.SetReward(0);
agentA.score += 1;
}
else
{
agentA.SetReward(0);
agentA.score += 1;
}
else
{
agentA.SetReward(0);
agentA.score += 1;
}
}
else if (collision.gameObject.name == "net")
{
if (lastAgentHit == 0)
{
agentA.score += 1;
}
}
else if (collision.gameObject.name == "net")
{
if (lastAgentHit == 0)
{
agentB.SetReward(0);
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentB.SetReward(0);
agentB.score += 1;
}
else
{
agentA.SetReward(0);
agentA.score += 1;
}
}
agentA.score += 1;
}
}
agentB.Done();
area.MatchReset();
}
agentB.Done();
area.MatchReset();
}
if (collision.gameObject.tag == "agent")
{
if (collision.gameObject.name == "AgentA")
{
lastAgentHit = 0;
}
else
{
lastAgentHit = 1;
}
}
}
if (collision.gameObject.tag == "agent")
{
if (collision.gameObject.name == "AgentA")
{
lastAgentHit = 0;
}
else
{
lastAgentHit = 1;
}
}
}
}

10
unity-environment/Assets/ML-Agents/Scripts/Agent.cs


/// <summary>
/// The brain that will control this agent.
/// Use the inspector to drag the desired brain gameObject into
/// the Brain field.
/// the Brain field.
///</summary>
[HideInInspector]
public Brain brain;

/// When GiveBrain is called, the agent unsubscribes from its
/// previous brain and subscribes to the one passed in argument.
/// Use this method to provide a brain to the agent via script.
///<param name= "b" >The Brain the agent will subscribe to.</param>
///<param name= "b" >The Brain the agent will subscribe to.</param>
/// <summary>
public void GiveBrain(Brain b)
{

/// <summary>
/// Is called when the agent must request the brain for a new decision.
/// </summary>
public void RequestDecision()
public void RequestDecision()
{
requestDecision = true;
RequestAction();

/// </summary>
public void RequestAction()
public void RequestAction()
{
requestAction = true;
}

/// <summary>
/// Initialize the agent with this method
/// Must be implemented in agent-specific child class.
/// This method called only once when the agent is enabled.
/// This method called only once when the agent is enabled.
/// </summary>
public virtual void InitializeAgent()
{

22
unity-environment/Assets/ML-Agents/Scripts/CoreBrainInternal.cs


{
#if ENABLE_TENSORFLOW
#if UNITY_ANDROID
// This needs to ba called only once and will raise an exception if
// This needs to ba called only once and will raise an exception if
try{
TensorFlowSharp.Android.NativeBinding.Init();
}
catch{
}
try{
TensorFlowSharp.Android.NativeBinding.Init();
}
catch{
}
#endif
if ((communicator == null)
|| (!broadcast))

public void DecideAction(Dictionary<Agent, AgentInfo> agentInfo)
{
#if ENABLE_TENSORFLOW
if (coord != null)
{
coord.GiveBrainInfo(brain, agentInfo);
}
if (coord != null)
{
coord.GiveBrainInfo(brain, agentInfo);
}
int currentBatchSize = agentInfo.Count();
List<Agent> agentList = agentInfo.Keys.ToList();
if (currentBatchSize == 0)

2
unity-environment/Assets/ML-Agents/Template/Scripts/TemplateDecision.cs


public List<float> MakeMemory(List<float> state, List<Texture2D> observation, float reward, bool done, List<float> memory)
{
return new List<float>();
}
}
正在加载...
取消
保存