Unity 机器学习代理工具包 (ML-Agents) 是一个开源项目,它使游戏和模拟能够作为训练智能代理的环境。
您最多选择25个主题 主题必须以中文或者字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符
 
 
 
 
 

138 行
3.8 KiB

using System.Collections.Generic;
using Unity.MLAgents;
using UnityEngine;
public class SoccerEnvController : MonoBehaviour
{
[System.Serializable]
public class PlayerInfo
{
public AgentSoccer Agent;
[HideInInspector]
public Vector3 StartingPos;
[HideInInspector]
public Quaternion StartingRot;
[HideInInspector]
public Rigidbody Rb;
}
/// <summary>
/// Max Academy steps before this platform resets
/// </summary>
/// <returns></returns>
[Header("Max Environment Steps")] public int MaxEnvironmentSteps = 25000;
/// <summary>
/// The area bounds.
/// </summary>
/// <summary>
/// We will be changing the ground material based on success/failue
/// </summary>
public GameObject ball;
[HideInInspector]
public Rigidbody ballRb;
Vector3 m_BallStartingPos;
//List of Agents On Platform
public List<PlayerInfo> AgentsList = new List<PlayerInfo>();
private SoccerSettings m_SoccerSettings;
private SimpleMultiAgentGroup m_BlueAgentGroup;
private SimpleMultiAgentGroup m_PurpleAgentGroup;
private int m_ResetTimer;
void Start()
{
m_SoccerSettings = FindObjectOfType<SoccerSettings>();
// Initialize TeamManager
m_BlueAgentGroup = new SimpleMultiAgentGroup();
m_PurpleAgentGroup = new SimpleMultiAgentGroup();
ballRb = ball.GetComponent<Rigidbody>();
m_BallStartingPos = new Vector3(ball.transform.position.x, ball.transform.position.y, ball.transform.position.z);
foreach (var item in AgentsList)
{
item.StartingPos = item.Agent.transform.position;
item.StartingRot = item.Agent.transform.rotation;
item.Rb = item.Agent.GetComponent<Rigidbody>();
if (item.Agent.team == Team.Blue)
{
m_BlueAgentGroup.RegisterAgent(item.Agent);
}
else
{
m_PurpleAgentGroup.RegisterAgent(item.Agent);
}
}
ResetScene();
}
void FixedUpdate()
{
m_ResetTimer += 1;
if (m_ResetTimer >= MaxEnvironmentSteps && MaxEnvironmentSteps > 0)
{
m_BlueAgentGroup.GroupEpisodeInterrupted();
m_PurpleAgentGroup.GroupEpisodeInterrupted();
ResetScene();
}
}
public void ResetBall()
{
var randomPosX = Random.Range(-2.5f, 2.5f);
var randomPosZ = Random.Range(-2.5f, 2.5f);
ball.transform.position = m_BallStartingPos + new Vector3(randomPosX, 0f, randomPosZ);
ballRb.velocity = Vector3.zero;
ballRb.angularVelocity = Vector3.zero;
}
public void GoalTouched(Team scoredTeam)
{
if (scoredTeam == Team.Blue)
{
m_BlueAgentGroup.AddGroupReward(1 - m_ResetTimer / MaxEnvironmentSteps);
m_PurpleAgentGroup.AddGroupReward(-1);
}
else
{
m_PurpleAgentGroup.AddGroupReward(1 - m_ResetTimer / MaxEnvironmentSteps);
m_BlueAgentGroup.AddGroupReward(-1);
}
m_PurpleAgentGroup.EndGroupEpisode();
m_BlueAgentGroup.EndGroupEpisode();
ResetScene();
}
public void ResetScene()
{
m_ResetTimer = 0;
//Reset Agents
foreach (var item in AgentsList)
{
var randomPosX = Random.Range(-5f, 5f);
var newStartPos = item.Agent.initialPos + new Vector3(randomPosX, 0f, 0f);
var rot = item.Agent.rotSign * Random.Range(80.0f, 100.0f);
var newRot = Quaternion.Euler(0, rot, 0);
item.Agent.transform.SetPositionAndRotation(newStartPos, newRot);
item.Rb.velocity = Vector3.zero;
item.Rb.angularVelocity = Vector3.zero;
}
//Reset Ball
ResetBall();
}
}