Unity 机器学习代理工具包 (ML-Agents) 是一个开源项目,它使游戏和模拟能够作为训练智能代理的环境。
您最多选择25个主题 主题必须以中文或者字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符
 
 
 
 
 

219 行
6.6 KiB

//Put this script on your blue cube.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using MLAgents;
public class PushAgentBasic : Agent
{
/// <summary>
/// The ground. The bounds are used to spawn the elements.
/// </summary>
public GameObject ground;
public GameObject area;
/// <summary>
/// The area bounds.
/// </summary>
[HideInInspector]
public Bounds areaBounds;
PushBlockAcademy academy;
/// <summary>
/// The goal to push the block to.
/// </summary>
public GameObject goal;
/// <summary>
/// The block to be pushed to the goal.
/// </summary>
public GameObject block;
/// <summary>
/// Detects when the block touches the goal.
/// </summary>
[HideInInspector]
public GoalDetect goalDetect;
public bool useVectorObs;
Rigidbody blockRB; //cached on initialization
Rigidbody agentRB; //cached on initialization
Material groundMaterial; //cached on Awake()
RayPerception rayPer;
/// <summary>
/// We will be changing the ground material based on success/failue
/// </summary>
Renderer groundRenderer;
void Awake()
{
academy = FindObjectOfType<PushBlockAcademy>(); //cache the academy
}
public override void InitializeAgent()
{
base.InitializeAgent();
goalDetect = block.GetComponent<GoalDetect>();
goalDetect.agent = this;
rayPer = GetComponent<RayPerception>();
// Cache the agent rigidbody
agentRB = GetComponent<Rigidbody>();
// Cache the block rigidbody
blockRB = block.GetComponent<Rigidbody>();
// Get the ground's bounds
areaBounds = ground.GetComponent<Collider>().bounds;
// Get the ground renderer so we can change the material when a goal is scored
groundRenderer = ground.GetComponent<Renderer>();
// Starting material
groundMaterial = groundRenderer.material;
}
public override void CollectObservations()
{
if (useVectorObs)
{
var rayDistance = 12f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
var detectableObjects = new[] { "block", "goal", "wall" };
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f));
AddVectorObs(rayPer.Perceive(rayDistance, rayAngles, detectableObjects, 1.5f, 0f));
}
}
/// <summary>
/// Use the ground's bounds to pick a random spawn position.
/// </summary>
public Vector3 GetRandomSpawnPos()
{
bool foundNewSpawnLocation = false;
Vector3 randomSpawnPos = Vector3.zero;
while (foundNewSpawnLocation == false)
{
float randomPosX = Random.Range(-areaBounds.extents.x * academy.spawnAreaMarginMultiplier,
areaBounds.extents.x * academy.spawnAreaMarginMultiplier);
float randomPosZ = Random.Range(-areaBounds.extents.z * academy.spawnAreaMarginMultiplier,
areaBounds.extents.z * academy.spawnAreaMarginMultiplier);
randomSpawnPos = ground.transform.position + new Vector3(randomPosX, 1f, randomPosZ);
if (Physics.CheckBox(randomSpawnPos, new Vector3(2.5f, 0.01f, 2.5f)) == false)
{
foundNewSpawnLocation = true;
}
}
return randomSpawnPos;
}
/// <summary>
/// Called when the agent moves the block into the goal.
/// </summary>
public void IScoredAGoal()
{
// We use a reward of 5.
AddReward(5f);
// By marking an agent as done AgentReset() will be called automatically.
Done();
// Swap ground material for a bit to indicate we scored.
StartCoroutine(GoalScoredSwapGroundMaterial(academy.goalScoredMaterial, 0.5f));
}
/// <summary>
/// Swap ground material, wait time seconds, then swap back to the regular material.
/// </summary>
IEnumerator GoalScoredSwapGroundMaterial(Material mat, float time)
{
groundRenderer.material = mat;
yield return new WaitForSeconds(time); // Wait for 2 sec
groundRenderer.material = groundMaterial;
}
/// <summary>
/// Moves the agent according to the selected action.
/// </summary>
public void MoveAgent(float[] act)
{
Vector3 dirToGo = Vector3.zero;
Vector3 rotateDir = Vector3.zero;
int action = Mathf.FloorToInt(act[0]);
// Goalies and Strikers have slightly different action spaces.
switch (action)
{
case 1:
dirToGo = transform.forward * 1f;
break;
case 2:
dirToGo = transform.forward * -1f;
break;
case 3:
rotateDir = transform.up * 1f;
break;
case 4:
rotateDir = transform.up * -1f;
break;
case 5:
dirToGo = transform.right * -0.75f;
break;
case 6:
dirToGo = transform.right * 0.75f;
break;
}
transform.Rotate(rotateDir, Time.fixedDeltaTime * 200f);
agentRB.AddForce(dirToGo * academy.agentRunSpeed,
ForceMode.VelocityChange);
}
/// <summary>
/// Called every step of the engine. Here the agent takes an action.
/// </summary>
public override void AgentAction(float[] vectorAction, string textAction)
{
// Move the agent using the action.
MoveAgent(vectorAction);
// Penalty given each step to encourage agent to finish task quickly.
AddReward(-1f / agentParameters.maxStep);
}
/// <summary>
/// Resets the block position and velocities.
/// </summary>
void ResetBlock()
{
// Get a random position for the block.
block.transform.position = GetRandomSpawnPos();
// Reset block velocity back to zero.
blockRB.velocity = Vector3.zero;
// Reset block angularVelocity back to zero.
blockRB.angularVelocity = Vector3.zero;
}
/// <summary>
/// In the editor, if "Reset On Done" is checked then AgentReset() will be
/// called automatically anytime we mark done = true in an agent script.
/// </summary>
public override void AgentReset()
{
int rotation = Random.Range(0, 4);
float rotationAngle = rotation * 90f;
area.transform.Rotate(new Vector3(0f, rotationAngle, 0f));
ResetBlock();
transform.position = GetRandomSpawnPos();
agentRB.velocity = Vector3.zero;
agentRB.angularVelocity = Vector3.zero;
}
}