浏览代码

RayPerception sensor (#2874)

/develop-newnormalization
GitHub 5 年前
当前提交
2e6bab0d
共有 29 个文件被更改,包括 1569 次插入1392 次删除
  1. 7
      UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs
  2. 2
      UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs
  3. 1
      UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/WriterAdapterTests.cs
  4. 6
      UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs
  5. 15
      UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs
  6. 59
      UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs
  7. 69
      UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs
  8. 52
      UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab
  9. 2
      UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity
  10. 11
      UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs
  11. 1001
      UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn
  12. 1001
      UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn
  13. 4
      UnitySDK/Assets/ML-Agents/Scripts/Agent.cs
  14. 164
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs
  15. 5
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs
  16. 2
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs
  17. 5
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs
  18. 18
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs
  19. 1
      UnitySDK/UnitySDK.sln.DotSettings
  20. 21
      UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/RayPerceptionSensorTests.cs
  21. 3
      UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/RayPerceptionSensorTests.cs.meta
  22. 315
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensor.cs
  23. 3
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensor.cs.meta
  24. 10
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent2D.cs
  25. 3
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent2D.cs.meta
  26. 32
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent3D.cs
  27. 3
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent3D.cs.meta
  28. 143
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponentBase.cs
  29. 3
      UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponentBase.cs.meta

7
UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs


using UnityEngine;
using UnityEditor;
using Barracuda;
using MLAgents.Sensor;
namespace MLAgents
{

D.logEnabled = false;
Model barracudaModel = null;
var model = (NNModel)serializedObject.FindProperty("m_Model").objectReferenceValue;
var brainParameters = ((BehaviorParameters)target).brainParameters;
var behaviorParameters = (BehaviorParameters)target;
var sensorComponents = behaviorParameters.GetComponents<SensorComponent>();
var brainParameters = behaviorParameters.brainParameters;
if (model != null)
{
barracudaModel = ModelLoader.Load(model.Value);

var failedChecks = InferenceBrain.BarracudaModelParamLoader.CheckModel(
barracudaModel, brainParameters);
barracudaModel, brainParameters, sensorComponents);
foreach (var check in failedChecks)
{
if (check != null)

2
UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs


using System.Collections.Generic;
using Google.Protobuf;
namespace MLAgents.Tests
{

1
UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/WriterAdapterTests.cs


using Barracuda;
using MLAgents.InferenceBrain;
using MLAgents.InferenceBrain.Utils;
namespace MLAgents.Tests

6
UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs


{
var myTransform = transform;
myLaser.transform.localScale = new Vector3(1f, 1f, m_LaserLength);
var position = myTransform.TransformDirection(RayPerception3D.PolarToCartesian(25f, 90f));
Debug.DrawRay(myTransform.position, position, Color.red, 0f, true);
var rayDir = 25.0f * myTransform.forward;
Debug.DrawRay(myTransform.position, rayDir, Color.red, 0f, true);
if (Physics.SphereCast(transform.position, 2f, position, out hit, 25f))
if (Physics.SphereCast(transform.position, 2f, rayDir, out hit, 25f))
{
if (hit.collider.gameObject.CompareTag("agent"))
{

15
UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
protected List<float> m_PerceptionBuffer = new List<float>();
protected float[] m_PerceptionBuffer;
abstract public List<float> Perceive(float rayDistance,
abstract public IList<float> Perceive(float rayDistance,
/// <summary>
/// Converts degrees to radians.
/// </summary>
public static float DegreeToRadian(float degree)
{
return degree * Mathf.PI / 180f;
}
}

59
UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
using MLAgents.Sensor;
namespace MLAgents
{

/// </summary>
[Obsolete("The RayPerception MonoBehaviour is deprecated. Use the RayPerceptionSensorComponent instead")]
Vector2 m_EndPosition;
RaycastHit2D m_Hit;
/// <summary>

/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
/// <param name="startOffset">Unused</param>
/// <param name="endOffset">Unused</param>
public override List<float> Perceive(float rayDistance,
public override IList<float> Perceive(float rayDistance,
m_PerceptionBuffer.Clear();
// For each ray sublist stores categorical information on detected object
// along with object distance.
foreach (var angle in rayAngles)
var perceptionSize = (detectableObjects.Length + 2) * rayAngles.Length;
if (m_PerceptionBuffer == null || m_PerceptionBuffer.Length != perceptionSize)
m_EndPosition = transform.TransformDirection(
PolarToCartesian(rayDistance, angle));
if (Application.isEditor)
{
Debug.DrawRay(transform.position,
m_EndPosition, Color.black, 0.01f, true);
}
m_PerceptionBuffer = new float[perceptionSize];
}
var subList = new float[detectableObjects.Length + 2];
m_Hit = Physics2D.CircleCast(transform.position, 0.5f, m_EndPosition, rayDistance);
if (m_Hit)
{
for (var i = 0; i < detectableObjects.Length; i++)
{
if (m_Hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
subList[i] = 1;
subList[detectableObjects.Length + 1] = m_Hit.distance / rayDistance;
break;
}
}
}
else
{
subList[detectableObjects.Length] = 1f;
}
m_PerceptionBuffer.AddRange(subList);
}
const float castRadius = 0.5f;
const bool legacyHitFractionBehavior = true;
RayPerceptionSensor.PerceiveStatic(
rayDistance, rayAngles, detectableObjects, startOffset, endOffset, castRadius,
transform, RayPerceptionSensor.CastType.Cast3D, m_PerceptionBuffer, legacyHitFractionBehavior
);
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
public static Vector2 PolarToCartesian(float radius, float angle)
{
var x = radius * Mathf.Cos(DegreeToRadian(angle));
var y = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector2(x, y);
}
}
}

69
UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs


using System;
using System.Collections.Generic;
using UnityEngine;
using MLAgents.Sensor;
namespace MLAgents
{

/// </summary>
[Obsolete("The RayPerception MonoBehaviour is deprecated. Use the RayPerceptionSensorComponent instead")]
RaycastHit m_Hit;
float[] m_SubList;
/// <summary>
/// Creates perception vector to be used as part of an observation of an agent.
/// Each ray in the rayAngles array adds a sublist of data to the observation.

/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
/// <param name="startOffset">Starting height offset of ray from center of agent.</param>
/// <param name="endOffset">Ending height offset of ray from center of agent.</param>
public override List<float> Perceive(float rayDistance,
public override IList<float> Perceive(float rayDistance,
if (m_SubList == null || m_SubList.Length != detectableObjects.Length + 2)
m_SubList = new float[detectableObjects.Length + 2];
m_PerceptionBuffer.Clear();
m_PerceptionBuffer.Capacity = m_SubList.Length * rayAngles.Length;
// For each ray sublist stores categorical information on detected object
// along with object distance.
foreach (var angle in rayAngles)
var perceptionSize = (detectableObjects.Length + 2) * rayAngles.Length;
if (m_PerceptionBuffer == null || m_PerceptionBuffer.Length != perceptionSize)
Vector3 startPositionLocal = new Vector3(0, startOffset, 0);
Vector3 endPositionLocal = PolarToCartesian(rayDistance, angle);
endPositionLocal.y += endOffset;
m_PerceptionBuffer = new float[perceptionSize];
}
var startPositionWorld = transform.TransformPoint(startPositionLocal);
var endPositionWorld = transform.TransformPoint(endPositionLocal);
var rayDirection = endPositionWorld - startPositionWorld;
if (Application.isEditor)
{
Debug.DrawRay(startPositionWorld,rayDirection, Color.black, 0.01f, true);
}
Array.Clear(m_SubList, 0, m_SubList.Length);
if (Physics.SphereCast(startPositionWorld, 0.5f, rayDirection, out m_Hit, rayDistance))
{
for (var i = 0; i < detectableObjects.Length; i++)
{
if (m_Hit.collider.gameObject.CompareTag(detectableObjects[i]))
{
m_SubList[i] = 1;
m_SubList[detectableObjects.Length + 1] = m_Hit.distance / rayDistance;
break;
}
}
}
else
{
m_SubList[detectableObjects.Length] = 1f;
}
Utilities.AddRangeNoAlloc(m_PerceptionBuffer, m_SubList);
}
const float castRadius = 0.5f;
const bool legacyHitFractionBehavior = true;
RayPerceptionSensor.PerceiveStatic(
rayDistance, rayAngles, detectableObjects, startOffset, endOffset, castRadius,
transform, RayPerceptionSensor.CastType.Cast3D, m_PerceptionBuffer, legacyHitFractionBehavior
);
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
public static Vector3 PolarToCartesian(float radius, float angle)
{
var x = radius * Mathf.Cos(DegreeToRadian(angle));
var z = radius * Mathf.Sin(DegreeToRadian(angle));
return new Vector3(x, 0f, z);
}
}
}

52
UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab


- component: {fileID: 54678503543725326}
- component: {fileID: 114898893333200490}
- component: {fileID: 114925928594762506}
- component: {fileID: 114092229367912210}
- component: {fileID: 114458838850320084}
- component: {fileID: 114227939525648256}
m_Layer: 0
m_Name: Agent
m_TagString: agent

serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!114 &114092229367912210
--- !u!114 &114227939525648256
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1195095783991828}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 6bb6b867a41448888c1cd4f99643ad71, type: 3}
m_Name:
m_EditorClassIdentifier:
sensorName: OffsetRayPerceptionSensor
detectableTags:
- wall
- goal
- block
raysPerDirection: 3
maxRayDegrees: 90
startVerticalOffset: 2.5
endVerticalOffset: 5
sphereCastRadius: 0.5
rayLength: 20
observationStacks: 6
rayHitColor: {r: 1, g: 0, b: 0, a: 1}
rayMissColor: {r: 1, g: 1, b: 1, a: 1}
useWorldPositions: 1
--- !u!114 &114458838850320084
MonoBehaviour:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}

m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3}
m_Script: {fileID: 11500000, guid: 6bb6b867a41448888c1cd4f99643ad71, type: 3}
sensorName: RayPerceptionSensor
detectableTags:
- wall
- goal
- block
raysPerDirection: 3
maxRayDegrees: 90
startVerticalOffset: 0
endVerticalOffset: 0
sphereCastRadius: 0.5
rayLength: 20
observationStacks: 6
rayHitColor: {r: 1, g: 0, b: 0, a: 1}
rayMissColor: {r: 1, g: 1, b: 1, a: 1}
useWorldPositions: 1
--- !u!114 &114898893333200490
MonoBehaviour:
m_ObjectHideFlags: 1

m_Name:
m_EditorClassIdentifier:
m_BrainParameters:
vectorObservationSize: 74
vectorObservationSize: 4
m_Model: {fileID: 11400000, guid: fb2ce36eb40b6480e94ea0b5d7573e47, type: 3}
m_Model: {fileID: 11400000, guid: 0468bf44b1efd4992b6bf22cadb50d89, type: 3}
m_InferenceDevice: 0
m_UseHeuristic: 0
m_BehaviorName: SmallWallJump

2
UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.44971442, g: 0.499779, b: 0.5756377, a: 1}
m_IndirectSpecularColor: {r: 0.44971484, g: 0.49977958, b: 0.5756385, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

11
UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs


Material m_GroundMaterial;
Renderer m_GroundRenderer;
WallJumpAcademy m_Academy;
RayPerception m_RayPer;
public float jumpingTime;
public float jumpTime;

Vector3 m_JumpTargetPos;
Vector3 m_JumpStartingPos;
string[] m_DetectableObjects;
m_RayPer = GetComponent<RayPerception>();
m_DetectableObjects = new[] { "wall", "goal", "block" };
m_AgentRb = GetComponent<Rigidbody>();
m_ShortBlockRb = shortBlock.GetComponent<Rigidbody>();

public override void CollectObservations()
{
var rayDistance = 20f;
float[] rayAngles = { 0f, 45f, 90f, 135f, 180f, 110f, 70f };
AddVectorObs(m_RayPer.Perceive(
rayDistance, rayAngles, m_DetectableObjects));
AddVectorObs(m_RayPer.Perceive(
rayDistance, rayAngles, m_DetectableObjects, 2.5f, 5.0f));
var agentPos = m_AgentRb.position - ground.transform.position;
AddVectorObs(agentPos / 20f);

1001
UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn
文件差异内容过多而无法显示
查看文件

1001
UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn
文件差异内容过多而无法显示
查看文件

4
UnitySDK/Assets/ML-Agents/Scripts/Agent.cs


/// Editor. This excludes the Brain linked to the Agent since it can be
/// modified programmatically.
/// </summary>
[System.Serializable]
[Serializable]
public class AgentParameters
{
/// <summary>

/// </remarks>
[HelpURL("https://github.com/Unity-Technologies/ml-agents/blob/master/" +
"docs/Learning-Environment-Design-Agents.md")]
[System.Serializable]
[Serializable]
[RequireComponent(typeof(BehaviorParameters))]
public abstract class Agent : MonoBehaviour
{

164
UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs


using System.Collections.Generic;
using System.Linq;
using Barracuda;
using MLAgents.Sensor;
using UnityEngine;
namespace MLAgents.InferenceBrain
{

return tensors;
}
public static int GetNumVisualInputs(Model model)
{
var count = 0;
if (model == null)
return count;
foreach (var input in model.inputs)
{
if (input.shape.Length == 4)
{
if (input.name.StartsWith(TensorNames.VisualObservationPlaceholderPrefix))
{
count++;
}
}
}
return count;
}
/// <summary>
/// Generates the Tensor outputs that are expected to be present in the Model.
/// </summary>

/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="sensorComponents">Attached sensor components</param>
public static IEnumerable<string> CheckModel(Model model, BrainParameters brainParameters)
public static IEnumerable<string> CheckModel(Model model, BrainParameters brainParameters, SensorComponent[] sensorComponents)
{
List<string> failedModelChecks = new List<string>();
if (model == null)

})
);
failedModelChecks.AddRange(
CheckInputTensorPresence(model, brainParameters, memorySize, isContinuous)
CheckInputTensorPresence(model, brainParameters, memorySize, isContinuous, sensorComponents)
CheckInputTensorShape(model, brainParameters)
CheckInputTensorShape(model, brainParameters, sensorComponents)
);
failedModelChecks.AddRange(
CheckOutputTensorShape(model, brainParameters, isContinuous, actionSize)

/// <param name="isContinuous">
/// Whether the model is expecting continuous or discrete control.
/// </param>
/// <param name="sensorComponents">Array of attached sensor components</param>
/// <returns>
/// A IEnumerable of string corresponding to the failed input presence checks.
/// </returns>

int memory,
ModelActionType isContinuous)
ModelActionType isContinuous,
SensorComponent[] sensorComponents
)
{
var failedModelChecks = new List<string>();
var tensorsNames = GetInputTensors(model).Select(x => x.name).ToList();

"You must set the Vector Observation Space Size to 0.");
}
// TODO reenable checks there are enough Visual Observation Placeholder in the model.
// If there are not enough Visual Observation Input compared to what the
// sensors expect.
var visObsIndex = 0;
for (var sensorIndex = 0; sensorIndex < sensorComponents.Length; sensorIndex++)
{
var sensor = sensorComponents[sensorIndex];
if (!sensor.IsVisual())
{
continue;
}
if (!tensorsNames.Contains(
TensorNames.VisualObservationPlaceholderPrefix + visObsIndex))
{
failedModelChecks.Add(
"The model does not contain a Visual Observation Placeholder Input " +
$"for sensor component {visObsIndex} ({sensor.GetType().Name}).");
}
visObsIndex++;
}
var expectedVisualObs = GetNumVisualInputs(model);
// Check if there's not enough visual sensors (too many would be handled above)
if (expectedVisualObs > visObsIndex)
{
failedModelChecks.Add(
$"The model expects {expectedVisualObs} visual inputs," +
$" but only found {visObsIndex} visual sensors."
);
}
// If the model has a non-negative memory size but requires a recurrent input
if (memory > 0)

}
/// <summary>
/// Checks that the shape of the visual observation input placeholder is the same as the corresponding sensor.
/// </summary>
/// <param name="tensorProxy">The tensor that is expected by the model</param>
/// <param name="sensorComponent">The sensor that produces the visual observation.</param>
/// <returns>
/// If the Check failed, returns a string containing information about why the
/// check failed. If the check passed, returns null.
/// </returns>
static string CheckVisualObsShape(
TensorProxy tensorProxy, SensorComponent sensorComponent)
{
var shape = sensorComponent.GetObservationShape();
var widthBp = shape[0];
var heightBp = shape[1];
var pixelBp = shape[2];
var heightT = tensorProxy.shape[1];
var widthT = tensorProxy.shape[2];
var pixelT = tensorProxy.shape[3];
if ((widthBp != widthT) || (heightBp != heightT) || (pixelBp != pixelT))
{
return $"The visual Observation of the model does not match. " +
$"Received TensorProxy of shape [?x{widthBp}x{heightBp}x{pixelBp}] but " +
$"was expecting [?x{widthT}x{heightT}x{pixelT}].";
}
return null;
}
/// <summary>
/// Generates failed checks that correspond to inputs shapes incompatibilities between
/// the model and the BrainParameters.
/// </summary>

/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="sensorComponents">Attached sensors</param>
Model model, BrainParameters brainParameters)
Model model, BrainParameters brainParameters, SensorComponent[] sensorComponents)
new Dictionary<string, Func<BrainParameters, TensorProxy, string>>()
new Dictionary<string, Func<BrainParameters, TensorProxy, SensorComponent[], string>>()
{TensorNames.RandomNormalEpsilonPlaceholder, ((bp, tensor) => null)},
{TensorNames.ActionMaskPlaceholder, ((bp, tensor) => null)},
{TensorNames.SequenceLengthPlaceholder, ((bp, tensor) => null)},
{TensorNames.RecurrentInPlaceholder, ((bp, tensor) => null)},
{TensorNames.RandomNormalEpsilonPlaceholder, ((bp, tensor, scs) => null)},
{TensorNames.ActionMaskPlaceholder, ((bp, tensor, scs) => null)},
{TensorNames.SequenceLengthPlaceholder, ((bp, tensor, scs) => null)},
{TensorNames.RecurrentInPlaceholder, ((bp, tensor, scs) => null)},
tensorTester[mem.input] = ((bp, tensor) => null);
tensorTester[mem.input] = ((bp, tensor, scs) => null);
// TODO reenable checks on visual observation shapes.
var visObsIndex = 0;
for (var sensorIndex = 0; sensorIndex < sensorComponents.Length; sensorIndex++)
{
var sensorComponent = sensorComponents[sensorIndex];
if (!sensorComponent.IsVisual())
{
continue;
}
tensorTester[TensorNames.VisualObservationPlaceholderPrefix + visObsIndex] =
(bp, tensor, scs) => CheckVisualObsShape(tensor, sensorComponent);
visObsIndex++;
}
// If the model expects an input but it is not in this list
foreach (var tensor in GetInputTensors(model))

else
{
var tester = tensorTester[tensor.name];
var error = tester.Invoke(brainParameters, tensor);
var error = tester.Invoke(brainParameters, tensor, sensorComponents);
if (error != null)
{
failedModelChecks.Add(error);

/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="tensorProxy">The tensor that is expected by the model</param>
/// <param name="sensorComponents">Array of attached sensor components</param>
BrainParameters brainParameters, TensorProxy tensorProxy)
BrainParameters brainParameters, TensorProxy tensorProxy, SensorComponent[] sensorComponents)
if (vecObsSizeBp * numStackedVector != totalVecObsSizeT)
var totalVectorSensorSize = 0;
foreach (var sensorComp in sensorComponents)
{
if (sensorComp.IsVector())
{
totalVectorSensorSize += sensorComp.GetObservationShape()[0];
}
}
if (vecObsSizeBp * numStackedVector + totalVectorSensorSize != totalVecObsSizeT)
return "Vector Observation Size of the model does not match. Received " +
$"{vecObsSizeBp} x {numStackedVector} but was expecting {totalVecObsSizeT}.";
var sensorSizes = "";
foreach (var sensorComp in sensorComponents)
{
if (sensorComp.IsVector())
{
var vecSize = sensorComp.GetObservationShape()[0];
if (sensorSizes.Length == 0)
{
sensorSizes = $"[{vecSize}";
}
else
{
sensorSizes += $", {vecSize}";
}
}
}
sensorSizes += "]";
return $"Vector Observation Size of the model does not match. Was expecting {totalVecObsSizeT} " +
$"but received {vecObsSizeBp} x {numStackedVector} vector observations and " +
$"SensorComponent sizes: {sensorSizes}.";
}
return null;
}

/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="tensorProxy"> The tensor that is expected by the model</param>
/// <param name="sensorComponents">Array of attached sensor components</param>
BrainParameters brainParameters, TensorProxy tensorProxy)
BrainParameters brainParameters, TensorProxy tensorProxy, SensorComponent[] sensorComponents)
{
var numberActionsBp = brainParameters.vectorActionSize.Length;
var numberActionsT = tensorProxy.shape[tensorProxy.shape.Length - 1];

5
UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs


{
return new CameraSensor(camera, width, height, grayscale, sensorName);
}
public override int[] GetObservationShape()
{
return new[] { width, height, grayscale ? 1 : 3 };
}
}
}

2
UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs


string GetName();
}
public static class ISensorExtensions
public static class SensorExtensions
{
/// <summary>
/// Get the total number of elements in the ISensor's observation (i.e. the product of the shape elements).

5
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs


{
return new RenderTextureSensor(renderTexture, width, height, grayscale, sensorName);
}
public override int[] GetObservationShape()
{
return new[] { width, height, grayscale ? 1 : 3 };
}
}
}

18
UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs


/// </summary>
/// <returns></returns>
public abstract ISensor CreateSensor();
/// <summary>
/// Returns the shape of the sensor observations that will be created.
/// </summary>
/// <returns></returns>
public abstract int[] GetObservationShape();
public virtual bool IsVisual()
{
var shape = GetObservationShape();
return shape.Length == 3;
}
public virtual bool IsVector()
{
var shape = GetObservationShape();
return shape.Length == 1;
}
}
}

1
UnitySDK/UnitySDK.sln.DotSettings


<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/Abbreviations/=PNG/@EntryIndexedValue">PNG</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/Abbreviations/=RL/@EntryIndexedValue">RL</s:String>
<s:Boolean x:Key="/Default/UserDictionary/Words/=BLAS/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Gizmos/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/UserDictionary/Words/=Logits/@EntryIndexedValue">True</s:Boolean>

21
UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/RayPerceptionSensorTests.cs


using NUnit.Framework;
using UnityEngine;
using MLAgents.Sensor;
namespace MLAgents.Tests
{
public class RayPerceptionSensorTests
{
[Test]
public void TestGetRayAngles()
{
var angles = RayPerceptionSensorComponentBase.GetRayAngles(3, 90f);
var expectedAngles = new [] { 90f, 60f, 120f, 30f, 150f, 0f, 180f };
Assert.AreEqual(expectedAngles.Length, angles.Length);
for (var i = 0; i < angles.Length; i++)
{
Assert.AreEqual(expectedAngles[i], angles[i], .01);
}
}
}
}

3
UnitySDK/Assets/ML-Agents/Editor/Tests/Sensor/RayPerceptionSensorTests.cs.meta


fileFormatVersion: 2
guid: d2983e2bca9a40398f287727dc0472a5
timeCreated: 1573242741

315
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensor.cs


using System;
using System.Collections.Generic;
using UnityEngine;
namespace MLAgents.Sensor
{
public class RayPerceptionSensor : ISensor
{
public enum CastType
{
Cast2D,
Cast3D,
}
float[] m_Observations;
int[] m_Shape;
string m_Name;
float m_RayDistance;
List<string> m_DetectableObjects;
float[] m_Angles;
float m_StartOffset;
float m_EndOffset;
float m_CastRadius;
CastType m_CastType;
Transform m_Transform;
/// <summary>
/// Debug information for the raycast hits. This is used by the RayPerceptionSensorComponent.
/// </summary>
public class DebugDisplayInfo
{
public struct RayInfo
{
public Vector3 localStart;
public Vector3 localEnd;
public Vector3 worldStart;
public Vector3 worldEnd;
public bool castHit;
public float hitFraction;
}
public void Reset()
{
m_Frame = Time.frameCount;
}
/// <summary>
/// "Age" of the results in number of frames. This is used to adjust the alpha when drawing.
/// </summary>
public int age
{
get { return Time.frameCount - m_Frame; }
}
public RayInfo[] rayInfos;
int m_Frame;
}
DebugDisplayInfo m_DebugDisplayInfo;
public DebugDisplayInfo debugDisplayInfo
{
get { return m_DebugDisplayInfo; }
}
public RayPerceptionSensor(string name, float rayDistance, List<string> detectableObjects, float[] angles,
Transform transform, float startOffset, float endOffset, float castRadius, CastType castType)
{
var numObservations = (detectableObjects.Count + 2) * angles.Length;
m_Shape = new[] { numObservations };
m_Name = name;
m_Observations = new float[numObservations];
m_RayDistance = rayDistance;
m_DetectableObjects = detectableObjects;
// TODO - preprocess angles, save ray directions instead?
m_Angles = angles;
m_Transform = transform;
m_StartOffset = startOffset;
m_EndOffset = endOffset;
m_CastRadius = castRadius;
m_CastType = castType;
if (Application.isEditor)
{
m_DebugDisplayInfo = new DebugDisplayInfo();
}
}
public int Write(WriteAdapter adapter)
{
PerceiveStatic(
m_RayDistance, m_Angles, m_DetectableObjects, m_StartOffset, m_EndOffset,
m_CastRadius, m_Transform, m_CastType, m_Observations, false, m_DebugDisplayInfo
);
adapter.AddRange(m_Observations);
return m_Observations.Length;
}
public void Update()
{
}
public int[] GetFloatObservationShape()
{
return m_Shape;
}
public string GetName()
{
return m_Name;
}
public virtual byte[] GetCompressedObservation()
{
return null;
}
public virtual SensorCompressionType GetCompressionType()
{
return SensorCompressionType.None;
}
/// <summary>
/// Evaluates a perception vector to be used as part of an observation of an agent.
/// Each element in the rayAngles array determines a sublist of data to the observation.
/// The sublist contains the observation data for a single cast. The list is composed of the following:
/// 1. A one-hot encoding for detectable objects. For example, if detectableObjects.Length = n, the
/// first n elements of the sublist will be a one-hot encoding of the detectableObject that was hit, or
/// all zeroes otherwise.
/// 2. The 'length' element of the sublist will be 1 if the ray missed everything, or 0 if it hit
/// something (detectable or not).
/// 3. The 'length+1' element of the sublist will contain the normalised distance to the object hit, or 1 if
/// nothing was hit.
///
/// The legacyHitFractionBehavior changes the behavior to be backwards compatible but has some
/// counter-intuitive behavior:
/// * if the cast hits a object that's not in the detectableObjects list, all results are 0
/// * if the cast doesn't hit, the hit fraction field is 0
/// </summary>
/// <param name="rayLength"></param>
/// <param name="rayAngles">List of angles (in degrees) used to define the rays. 90 degrees is considered
/// "forward" relative to the game object</param>
/// <param name="detectableObjects">List of tags which correspond to object types agent can see</param>
/// <param name="startOffset">Starting height offset of ray from center of agent.</param>
/// <param name="endOffset">Ending height offset of ray from center of agent.</param>
/// <param name="castRadius">Radius of the sphere to use for spherecasting. If 0 or less, rays are used
/// instead - this may be faster, especially for complex environments.</param>
/// <param name="transform">Transform of the GameObject</param>
/// <param name="castType">Whether to perform the casts in 2D or 3D.</param>
/// <param name="perceptionBuffer">Output array of floats. Must be (num rays) * (num tags + 2) in size.</param>
/// <param name="legacyHitFractionBehavior">Whether to use the legacy behavior for hit fractions.</param>
/// <param name="debugInfo">Optional debug information output, only used by RayPerceptionSensor.</param>
///
public static void PerceiveStatic(float rayLength,
IReadOnlyList<float> rayAngles, IReadOnlyList<string> detectableObjects,
float startOffset, float endOffset, float castRadius,
Transform transform, CastType castType, float[] perceptionBuffer,
bool legacyHitFractionBehavior = false,
DebugDisplayInfo debugInfo = null)
{
Array.Clear(perceptionBuffer, 0, perceptionBuffer.Length);
if (debugInfo != null)
{
debugInfo.Reset();
if (debugInfo.rayInfos == null || debugInfo.rayInfos.Length != rayAngles.Count)
{
debugInfo.rayInfos = new DebugDisplayInfo.RayInfo[rayAngles.Count];
}
}
// For each ray sublist stores categorical information on detected object
// along with object distance.
int bufferOffset = 0;
for (var rayIndex = 0; rayIndex<rayAngles.Count; rayIndex++)
{
var angle = rayAngles[rayIndex];
Vector3 startPositionLocal, endPositionLocal;
if (castType == CastType.Cast3D)
{
startPositionLocal = new Vector3(0, startOffset, 0);
endPositionLocal = PolarToCartesian3D(rayLength, angle);
endPositionLocal.y += endOffset;
}
else
{
// Vector2s here get converted to Vector3s (and back to Vector2s for casting)
startPositionLocal = new Vector2();
endPositionLocal = PolarToCartesian2D(rayLength, angle);
}
var startPositionWorld = transform.TransformPoint(startPositionLocal);
var endPositionWorld = transform.TransformPoint(endPositionLocal);
var rayDirection = endPositionWorld - startPositionWorld;
// Do the cast and assign the hit information for each detectable object.
// sublist[0 ] <- did hit detectableObjects[0]
// ...
// sublist[numObjects-1] <- did hit detectableObjects[numObjects-1]
// sublist[numObjects ] <- 1 if missed else 0
// sublist[numObjects+1] <- hit fraction (or 1 if no hit)
// The legacyHitFractionBehavior changes the behavior to be backwards compatible but has some
// counter-intuitive behavior:
// * if the cast hits a object that's not in the detectableObjects list, all results are 0
// * if the cast doesn't hit, the hit fraction field is 0
bool castHit;
float hitFraction;
GameObject hitObject;
if(castType == CastType.Cast3D)
{
RaycastHit rayHit;
if (castRadius > 0f)
{
castHit = Physics.SphereCast(startPositionWorld, castRadius, rayDirection, out rayHit, rayLength);
}
else
{
castHit = Physics.Raycast(startPositionWorld, rayDirection, out rayHit, rayLength);
}
hitFraction = castHit ? rayHit.distance / rayLength : 1.0f;
hitObject = castHit ? rayHit.collider.gameObject : null;
}
else
{
RaycastHit2D rayHit;
if (castRadius > 0f)
{
rayHit = Physics2D.CircleCast(startPositionWorld, castRadius, rayDirection, rayLength);
}
else
{
rayHit = Physics2D.Raycast(startPositionWorld, rayDirection, rayLength);
}
castHit = rayHit;
hitFraction = castHit ? rayHit.fraction : 1.0f;
hitObject = castHit ? rayHit.collider.gameObject : null;
}
if (debugInfo != null)
{
debugInfo.rayInfos[rayIndex].localStart = startPositionLocal;
debugInfo.rayInfos[rayIndex].localEnd = endPositionLocal;
debugInfo.rayInfos[rayIndex].worldStart = startPositionWorld;
debugInfo.rayInfos[rayIndex].worldEnd = endPositionWorld;
debugInfo.rayInfos[rayIndex].castHit = castHit;
debugInfo.rayInfos[rayIndex].hitFraction = hitFraction;
}
else if (Application.isEditor)
{
// Legacy drawing
Debug.DrawRay(startPositionWorld,rayDirection, Color.black, 0.01f, true);
}
if (castHit)
{
for (var i = 0; i < detectableObjects.Count; i++)
{
if (hitObject.CompareTag(detectableObjects[i]))
{
perceptionBuffer[bufferOffset + i] = 1;
perceptionBuffer[bufferOffset + detectableObjects.Count + 1] = hitFraction;
break;
}
if (!legacyHitFractionBehavior)
{
// Something was hit but not on the list. Still set the hit fraction.
perceptionBuffer[bufferOffset + detectableObjects.Count + 1] = hitFraction;
}
}
}
else
{
perceptionBuffer[bufferOffset + detectableObjects.Count] = 1f;
if (!legacyHitFractionBehavior)
{
// Nothing was hit, so there's full clearance in front of the agent.
perceptionBuffer[bufferOffset + detectableObjects.Count + 1] = 1.0f;
}
}
bufferOffset += detectableObjects.Count + 2;
}
}
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
static Vector3 PolarToCartesian3D(float radius, float angleDegrees)
{
var x = radius * Mathf.Cos(Mathf.Deg2Rad * angleDegrees);
var z = radius * Mathf.Sin(Mathf.Deg2Rad * angleDegrees);
return new Vector3(x, 0f, z);
}
/// <summary>
/// Converts polar coordinate to cartesian coordinate.
/// </summary>
static Vector2 PolarToCartesian2D(float radius, float angleDegrees)
{
var x = radius * Mathf.Cos(Mathf.Deg2Rad * angleDegrees);
var y = radius * Mathf.Sin(Mathf.Deg2Rad * angleDegrees);
return new Vector2(x, y);
}
}
}

3
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensor.cs.meta


fileFormatVersion: 2
guid: 71417cdf8dd542e19ec22822b001b884
timeCreated: 1573089052

10
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent2D.cs


namespace MLAgents.Sensor
{
public class RayPerceptionSensorComponent2D : RayPerceptionSensorComponentBase
{
public override RayPerceptionSensor.CastType GetCastType()
{
return RayPerceptionSensor.CastType.Cast2D;
}
}
}

3
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent2D.cs.meta


fileFormatVersion: 2
guid: f67c7e722ba14acd9153bb4488bff6e4
timeCreated: 1573769662

32
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent3D.cs


using System;
using UnityEngine;
namespace MLAgents.Sensor
{
public class RayPerceptionSensorComponent3D : RayPerceptionSensorComponentBase
{
[Header("3D Properties", order = 100)]
[Range(-10f, 10f)]
[Tooltip("Ray start is offset up or down by this amount.")]
public float startVerticalOffset;
[Range(-10f, 10f)]
[Tooltip("Ray end is offset up or down by this amount.")]
public float endVerticalOffset;
public override RayPerceptionSensor.CastType GetCastType()
{
return RayPerceptionSensor.CastType.Cast3D;
}
public override float GetStartVerticalOffset()
{
return startVerticalOffset;
}
public override float GetEndVerticalOffset()
{
return endVerticalOffset;
}
}
}

3
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponent3D.cs.meta


fileFormatVersion: 2
guid: 6bb6b867a41448888c1cd4f99643ad71
timeCreated: 1573764567

143
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponentBase.cs


using System;
using System.Collections.Generic;
using UnityEngine;
namespace MLAgents.Sensor
{
public abstract class RayPerceptionSensorComponentBase : SensorComponent
{
public string sensorName = "RayPerceptionSensor";
[Tooltip("List of tags in the scene to compare against.")]
public List<string> detectableTags;
[Range(0, 50)]
[Tooltip("Number of rays to the left and right of center.")]
public int raysPerDirection = 3;
[Range(0, 180)]
[Tooltip("Cone size for rays. Using 90 degrees will cast rays to the left and right. Greater than 90 degrees will go backwards.")]
public float maxRayDegrees = 70;
[Range(0f, 10f)]
[Tooltip("Radius of sphere to cast. Set to zero for raycasts.")]
public float sphereCastRadius = 0.5f;
[Range(1, 1000)]
[Tooltip("Length of the rays to cast.")]
public float rayLength = 20f;
[Range(1, 50)]
[Tooltip("Whether to stack previous observations. Using 1 means no previous observations.")]
public int observationStacks = 1;
[Header("Debug Gizmos", order = 999)]
public Color rayHitColor = Color.red;
public Color rayMissColor = Color.white;
[Tooltip("Whether to draw the raycasts in the world space of when they happened, or using the Agent's current transform'")]
public bool useWorldPositions = true;
[NonSerialized]
RayPerceptionSensor m_RaySensor;
public abstract RayPerceptionSensor.CastType GetCastType();
public virtual float GetStartVerticalOffset()
{
return 0f;
}
public virtual float GetEndVerticalOffset()
{
return 0f;
}
public override ISensor CreateSensor()
{
var rayAngles = GetRayAngles(raysPerDirection, maxRayDegrees);
m_RaySensor = new RayPerceptionSensor(sensorName, rayLength, detectableTags, rayAngles,
transform, GetStartVerticalOffset(), GetEndVerticalOffset(), sphereCastRadius, GetCastType()
);
if (observationStacks != 1)
{
var stackingSensor = new StackingSensor(m_RaySensor, observationStacks);
return stackingSensor;
}
return m_RaySensor;
}
public static float[] GetRayAngles(int raysPerDirection, float maxRayDegrees)
{
// Example:
// { 90, 90 - delta, 90 + delta, 90 - 2*delta, 90 + 2*delta }
var anglesOut = new float[2 * raysPerDirection + 1];
var delta = maxRayDegrees / raysPerDirection;
anglesOut[0] = 90f;
for (var i = 0; i < raysPerDirection; i++)
{
anglesOut[2 * i + 1] = 90 - (i+1) * delta;
anglesOut[2 * i + 2] = 90 + (i+1) * delta;
}
return anglesOut;
}
public override int[] GetObservationShape()
{
var numRays = 2 * raysPerDirection + 1;
var numTags = detectableTags == null ? 0 : detectableTags.Count;
var obsSize = (numTags + 2) * numRays;
var stacks = observationStacks > 1 ? observationStacks : 1;
return new[] { obsSize * stacks };
}
/// <summary>
/// Draw the debug information from the sensor (if available).
/// </summary>
public void OnDrawGizmos()
{
if (m_RaySensor?.debugDisplayInfo?.rayInfos == null)
{
return;
}
var debugInfo = m_RaySensor.debugDisplayInfo;
// Draw "old" observations in a lighter color.
// Since the agent may not step every frame, this helps de-emphasize "stale" hit information.
var alpha = Mathf.Pow(.5f, debugInfo.age);
foreach (var rayInfo in debugInfo.rayInfos)
{
// Either use the original world-space coordinates of the raycast, or transform the agent-local
// coordinates of the rays to the current transform of the agent. If the agent acts every frame,
// these should be the same.
var startPositionWorld = rayInfo.worldStart;
var endPositionWorld = rayInfo.worldEnd;
if (!useWorldPositions)
{
startPositionWorld = transform.TransformPoint(rayInfo.localStart);
endPositionWorld = transform.TransformPoint(rayInfo.localEnd);
}
var rayDirection = endPositionWorld - startPositionWorld;
rayDirection *= rayInfo.hitFraction;
// hit fraction ^2 will shift "far" hits closer to the hit color
var lerpT = rayInfo.hitFraction * rayInfo.hitFraction;
var color = Color.Lerp(rayHitColor, rayMissColor, lerpT);
color.a = alpha;
Gizmos.color = color;
Gizmos.DrawRay(startPositionWorld,rayDirection);
// Draw the hit point as a sphere. If using rays to cast (0 radius), use a small sphere.
if (rayInfo.castHit)
{
var hitRadius = Mathf.Max(sphereCastRadius, .05f);
Gizmos.DrawWireSphere(startPositionWorld + rayDirection, hitRadius);
}
}
}
}
}

3
UnitySDK/Assets/ML-Agents/Scripts/Sensor/RayPerceptionSensorComponentBase.cs.meta


fileFormatVersion: 2
guid: 45243967d8c0419b953c02bccb7c2768
timeCreated: 1573087062
正在加载...
取消
保存