浏览代码

Made ModelParamLoader static and made a modelRunner for batching at inference (#2718)

* Created the model runner and uses a shared interface with the communicator.

Fixing bugs with dealocation

Removing unnecessary code

Added code comments

Renaming

* Addressing comments

* Modified the constructor of ModelRunner

* Addressing comments

* remaming the _verbose variable

* Addressing comments : Removed the Verbose check in the LearningBrainEditor
/develop-gpu-test
GitHub 5 年前
当前提交
7d8651ac
共有 8 个文件被更改,包括 417 次插入326 次删除
  1. 12
      UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs
  2. 44
      UnitySDK/Assets/ML-Agents/Scripts/Academy.cs
  3. 41
      UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs
  4. 22
      UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs
  5. 314
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs
  6. 153
      UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs
  7. 146
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs
  8. 11
      UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs.meta

12
UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs


using UnityEngine;
using UnityEditor;
using Barracuda;
namespace MLAgents
{

}
if (m_RequireReload && m_TimeSinceModelReload > k_TimeBetweenModelReloads)
{
brain.ReloadModel();
var failedChecks = brain.GetModelFailedChecks();
D.logEnabled = false;
Barracuda.Model barracudaModel = null;
if (brain.model != null)
{
barracudaModel = ModelLoader.Load(brain.model.Value);
}
var failedChecks = InferenceBrain.BarracudaModelParamLoader.CheckModel(
barracudaModel, brain.brainParameters);
foreach (var check in failedChecks)
{
if (check != null)

44
UnitySDK/Assets/ML-Agents/Scripts/Academy.cs


using UnityEngine;
using System.Linq;
using System.Collections.Generic;
using MLAgents.InferenceBrain;
using Barracuda;
/**
* Welcome to Unity Machine Learning Agents (ML-Agents).

/// Pointer to the communicator currently in use by the Academy.
public ICommunicator Communicator;
private List<ModelRunner> m_ModelRunners = new List<ModelRunner>();
// Flag used to keep track of the first time the Academy is reset.
bool m_FirstAcademyReset;

Communicator = null;
}
if (Communicator != null){
if (Communicator != null)
{
Communicator.QuitCommandReceived += OnQuitCommandReceived;
Communicator.ResetCommandReceived += OnResetCommand;
Communicator.RLInputReceived += OnRLInputReceived;

SetIsInference(!IsCommunicatorOn);
BrainDecideAction += () => {};
DestroyAction += () => {};
AgentSetStatus += i => {};
AgentResetIfDone += () => {};
AgentSendState += () => {};
AgentAct += () => {};
AgentForceReset += () => {};
BrainDecideAction += () => { };
DestroyAction += () => { };
AgentSetStatus += i => { };
AgentResetIfDone += () => { };
AgentSendState += () => { };
AgentAct += () => { };
AgentForceReset += () => { };
ConfigureEnvironment();
}

void FixedUpdate()
{
EnvironmentStep();
}
/// <summary>
/// Creates or retrieves an existing ModelRunner that uses the same NNModel and the InferenceDevice as
/// provided.
/// </summary>
/// <param name="model"> The NNModel the ModelRunner must use </param>
/// <param name="brainParameters"> The brainParameters used to create the ModelRunner </param>
/// <param name="inferenceDevice"> The inference device (CPU or GPU) the ModelRunner will use </param>
/// <returns> The ModelRunner compatible with the input settings</returns>
public ModelRunner GetOrCreateModelRunner(NNModel model, BrainParameters brainParameters, InferenceDevice inferenceDevice)
{
var modelRunner = m_ModelRunners.Find(x => x.HasModel(model, inferenceDevice));
if (modelRunner == null)
{
modelRunner = new ModelRunner(
model, brainParameters, inferenceDevice);
m_ModelRunners.Add(modelRunner);
}
return modelRunner;
}
/// <summary>

41
UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs


m_CurrentUnityRlOutput.AgentInfos.Add(
brainKey,
new CommunicatorObjects.UnityRLOutputProto.Types.ListAgentInfoProto());
if (m_CurrentUnityRlInitializationOutput == null){
if (m_CurrentUnityRlInitializationOutput == null)
{
m_CurrentUnityRlInitializationOutput = new CommunicatorObjects.UnityRLInitializationOutputProto();
}
m_CurrentUnityRlInitializationOutput.BrainParameters.Add(brainParameters.ToProto(brainKey, true));

/// <summary>
/// Close the communicator gracefully on both sides of the communication.
/// </summary>
public void Close()
public void Dispose()
{
# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX
if (!m_IsOpen)

switch (command)
{
case CommandProto.Quit:
{
QuitCommandReceived?.Invoke();
return;
}
{
QuitCommandReceived?.Invoke();
return;
}
{
ResetCommandReceived?.Invoke(environmentParametersProto.ToEnvironmentResetParameters());
return;
}
{
ResetCommandReceived?.Invoke(environmentParametersProto.ToEnvironmentResetParameters());
return;
}
{
return;
}
{
return;
}
}
}

#region Sending and retreiving data
public void PutObservations(
string brainKey, IEnumerable<Agent> agents)
/// <summary>
/// Sends the observations. If at least one brain has an agent in need of
/// a decision or if the academy is done, the data is sent via
/// Communicator. Else, a new step is realized. The data can only be
/// sent once all the brains that were part of initialization have tried
/// to send information.
/// </summary>
/// <param name="key">Batch Key.</param>
/// <param name="agents">Agent info.</param>
public void PutObservations(string brainKey, ICollection<Agent> agents)
{
// The brain tried called GiveBrainInfo, update m_hasQueried
m_HasQueried[brainKey] = true;

// This method is run whenever the playmode state is changed.
if (state == PlayModeStateChange.ExitingPlayMode)
{
Close();
Dispose();
}
}

22
UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs


UnityOutput and UnityInput can be extended to provide functionalities beyond RL
UnityRLOutput and UnityRLInput can be extended to provide new RL functionalities
*/
public interface ICommunicator
public interface ICommunicator : IBatchedDecisionMaker
{
/// <summary>
/// Quit was received by the communicator.

void SubscribeBrain(string name, BrainParameters brainParameters);
/// <summary>
/// Sends the observations. If at least one brain has an agent in need of
/// a decision or if the academy is done, the data is sent via
/// Communicator. Else, a new step is realized. The data can only be
/// sent once all the brains that were part of initialization have tried
/// to send information.
/// </summary>
/// <param name="key">Batch Key.</param>
/// <param name="agents">Agent info.</param>
void PutObservations(string key, IEnumerable<Agent> agents);
/// <summary>
/// Gets the AgentActions based on the batching key.
/// </summary>
/// <param name="key">A key to identify which actions to get</param>

/// <summary>
/// Close the communicator gracefully on both sides of the communication.
/// </summary>
void Close();
}
public interface IBatchedDecisionMaker : IDisposable
{
void PutObservations(string key, ICollection<Agent> agents);
}
}

314
UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs


/// </summary>
public class BarracudaModelParamLoader
{
private enum ModelActionType
{
Unknown,

private const long k_ApiVersion = 2;
private readonly IWorker m_Engine;
private readonly Model m_Model;
private readonly BrainParameters m_BrainParameters;
private readonly List<string> m_FailedModelChecks = new List<string>();
/// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks
/// on it.
/// Generates the Tensor inputs that are expected to be present in the Model.
/// <param name="engine">
/// The Barracuda engine worker we get the parameters and the checks from
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <returns></returns>
public static BarracudaModelParamLoader GetLoaderAndCheck(
IWorker engine, Model model, BrainParameters brainParameters)
{
var modelParamLoader = new BarracudaModelParamLoader(engine, model, brainParameters);
modelParamLoader.GenerateChecks();
return modelParamLoader;
}
private BarracudaModelParamLoader(
IWorker engine, Model model, BrainParameters brainParameters)
{
m_Engine = engine;
m_Model = model;
m_BrainParameters = brainParameters;
}
/// <summary>
/// Generates the Tensor inputs that are expected to be present in the Model.
/// </summary>
public IReadOnlyList<TensorProxy> GetInputTensors()
public static IReadOnlyList<TensorProxy> GetInputTensors(Model model)
if (m_Model == null)
if (model == null)
foreach (var input in m_Model.inputs)
foreach (var input in model.inputs)
{
tensors.Add(new TensorProxy
{

});
}
foreach (var mem in m_Model.memories)
foreach (var mem in model.memories)
//Debug.Log($"{mem.input}: {mem.shape} -> {BarracudaUtils.TensorShapeFromBarracuda(mem.shape).Length}");
tensors.Add(new TensorProxy
{
name = mem.input,

/// <summary>
/// Generates the Tensor outputs that are expected to be present in the Model.
/// </summary>
/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
public string[] GetOutputNames()
public static string[] GetOutputNames(Model model)
if (m_Model == null)
if (model == null)
{
return names.ToArray();
}

var memory = GetIntScalar(TensorNames.MemorySize);
var memory = (int)model.GetTensorByName(TensorNames.MemorySize)[0];
foreach (var mem in m_Model.memories)
foreach (var mem in model.memories)
{
names.Add(mem.output);
}

}
/// <summary>
/// Queries the InferenceEngine for the value of a variable in the graph given its name.
/// Only works with int32 Tensors with zero dimensions containing a unique element.
/// If the node was not found or could not be retrieved, the value -1 will be returned.
/// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks
/// on it.
/// <param name="name">The name of the Tensor variable</param>
/// <returns>The value of the scalar variable in the model. (-1 if not found)</returns>
private int GetIntScalar(string name)
/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <returns>The list the error messages of the checks that failed</returns>
public static IEnumerable<string> CheckModel(Model model, BrainParameters brainParameters)
return (int)m_Model.GetTensorByName(name)[0];
}
/// <summary>
/// Retrieves an IEnumerable of string corresponding to the failed compatibility checks
/// between the InferenceEngine and the BrainParameters.
/// </summary>
public IEnumerable<string> GetChecks()
{
return m_FailedModelChecks;
}
/// <summary>
/// Generates the list of failed checks that failed when comparing the data from the Model
/// and from the BrainParameters
/// </summary>
private void GenerateChecks()
{
m_FailedModelChecks.Clear();
if (m_Engine == null)
List<string> failedModelChecks = new List<string>();
if (model == null)
m_FailedModelChecks.Add(
failedModelChecks.Add(
return;
return failedModelChecks;
var modelApiVersion = GetIntScalar(TensorNames.VersionNumber);
var memorySize = GetIntScalar(TensorNames.MemorySize);
var isContinuousInt = GetIntScalar(TensorNames.IsContinuousControl);
var modelApiVersion = (int)model.GetTensorByName(TensorNames.VersionNumber)[0];
var memorySize = (int)model.GetTensorByName(TensorNames.MemorySize)[0];
var isContinuousInt = (int)model.GetTensorByName(TensorNames.IsContinuousControl)[0];
var actionSize = GetIntScalar(TensorNames.ActionOutputShape);
var actionSize = (int)model.GetTensorByName(TensorNames.ActionOutputShape)[0];
m_FailedModelChecks.Add(
failedModelChecks.Add(
return;
return failedModelChecks;
m_FailedModelChecks.Add(
failedModelChecks.Add(
return;
return failedModelChecks;
CheckIntScalarPresenceHelper(new Dictionary<string, int>()
{
{TensorNames.MemorySize, memorySize},
{TensorNames.IsContinuousControl, isContinuousInt},
{TensorNames.ActionOutputShape, actionSize}
});
CheckInputTensorPresence(memorySize, isContinuous);
CheckOutputTensorPresence(memorySize);
CheckInputTensorShape();
CheckOutputTensorShape(isContinuous, actionSize);
failedModelChecks.AddRange(
CheckIntScalarPresenceHelper(new Dictionary<string, int>()
{
{TensorNames.MemorySize, memorySize},
{TensorNames.IsContinuousControl, isContinuousInt},
{TensorNames.ActionOutputShape, actionSize}
})
);
failedModelChecks.AddRange(
CheckInputTensorPresence(model, brainParameters, memorySize, isContinuous)
);
failedModelChecks.AddRange(
CheckOutputTensorPresence(model, memorySize))
;
failedModelChecks.AddRange(
CheckInputTensorShape(model, brainParameters)
);
failedModelChecks.AddRange(
CheckOutputTensorShape(model, brainParameters, isContinuous, actionSize)
);
return failedModelChecks;
}
/// <summary>

/// invalid value of -1.
/// </summary>
/// <param name="requiredScalarFields"> Mapping from node names to int values</param>
private void CheckIntScalarPresenceHelper(Dictionary<string, int> requiredScalarFields)
/// <returns>The list the error messages of the checks that failed</returns>
private static IEnumerable<string> CheckIntScalarPresenceHelper(
Dictionary<string, int> requiredScalarFields)
var failedModelChecks = new List<string>();
m_FailedModelChecks.Add($"Missing node in the model provided : {field.Key}");
failedModelChecks.Add($"Missing node in the model provided : {field.Key}");
return failedModelChecks;
}
/// <summary>

/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="memory">
/// The memory size that the model is expecting.
/// </param>

/// <returns>
/// A IEnumerable of string corresponding to the failed input presence checks.
/// </returns>
private void CheckInputTensorPresence(int memory, ModelActionType isContinuous)
private static IEnumerable<string> CheckInputTensorPresence(
Model model,
BrainParameters brainParameters,
int memory,
ModelActionType isContinuous)
var tensorsNames = GetInputTensors().Select(x => x.name).ToList();
var failedModelChecks = new List<string>();
var tensorsNames = GetInputTensors(model).Select(x => x.name).ToList();
if ((m_BrainParameters.vectorObservationSize != 0) &&
if ((brainParameters.vectorObservationSize != 0) &&
m_FailedModelChecks.Add(
failedModelChecks.Add(
"The model does not contain a Vector Observation Placeholder Input. " +
"You must set the Vector Observation Space Size to 0.");
}

visObsIndex < m_BrainParameters.cameraResolutions.Length;
visObsIndex < brainParameters.cameraResolutions.Length;
m_FailedModelChecks.Add(
failedModelChecks.Add(
"The model does not contain a Visual Observation Placeholder Input " +
"for visual observation " + visObsIndex + ".");
}

if (!tensorsNames.Any(x => x.EndsWith("_h")) ||
!tensorsNames.Any(x => x.EndsWith("_c")))
{
m_FailedModelChecks.Add(
failedModelChecks.Add(
"The model does not contain a Recurrent Input Node but has memory_size.");
}
}

{
if (!tensorsNames.Contains(TensorNames.ActionMaskPlaceholder))
{
m_FailedModelChecks.Add(
failedModelChecks.Add(
return failedModelChecks;
}
/// <summary>

/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
private void CheckOutputTensorPresence(int memory)
private static IEnumerable<string> CheckOutputTensorPresence(Model model, int memory)
var failedModelChecks = new List<string>();
if (!m_Model.outputs.Contains(TensorNames.ActionOutput))
if (!model.outputs.Contains(TensorNames.ActionOutput))
m_FailedModelChecks.Add("The model does not contain an Action Output Node.");
failedModelChecks.Add("The model does not contain an Action Output Node.");
var memOutputs = m_Model.memories.Select(x => x.output).ToList();
var memOutputs = model.memories.Select(x => x.output).ToList();
m_FailedModelChecks.Add(
failedModelChecks.Add(
return failedModelChecks;
}
/// <summary>

private void CheckInputTensorShape()
/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <returns>The list the error messages of the checks that failed</returns>
private static IEnumerable<string> CheckInputTensorShape(
Model model, BrainParameters brainParameters)
var failedModelChecks = new List<string>();
new Dictionary<string, Func<TensorProxy, string>>()
new Dictionary<string, Func<BrainParameters, TensorProxy, string>>()
{TensorNames.RandomNormalEpsilonPlaceholder, ((tensor) => null)},
{TensorNames.ActionMaskPlaceholder, ((tensor) => null)},
{TensorNames.SequenceLengthPlaceholder, ((tensor) => null)},
{TensorNames.RecurrentInPlaceholder, ((tensor) => null)},
{TensorNames.RandomNormalEpsilonPlaceholder, ((bp, tensor) => null)},
{TensorNames.ActionMaskPlaceholder, ((bp, tensor) => null)},
{TensorNames.SequenceLengthPlaceholder, ((bp, tensor) => null)},
{TensorNames.RecurrentInPlaceholder, ((bp, tensor) => null)},
foreach (var mem in m_Model.memories)
foreach (var mem in model.memories)
tensorTester[mem.input] = ((tensor) => null);
tensorTester[mem.input] = ((bp, tensor) => null);
for (var obsIndex = 0; obsIndex < m_BrainParameters.cameraResolutions.Length; obsIndex++)
for (var obsIndex = 0; obsIndex < brainParameters.cameraResolutions.Length; obsIndex++)
(tensor) => CheckVisualObsShape(tensor, index);
(bp, tensor) => CheckVisualObsShape(bp, tensor, index);
foreach (var tensor in GetInputTensors())
foreach (var tensor in GetInputTensors(model))
m_FailedModelChecks.Add(
failedModelChecks.Add(
var error = tester.Invoke(tensor);
var error = tester.Invoke(brainParameters, tensor);
m_FailedModelChecks.Add(error);
failedModelChecks.Add(error);
return failedModelChecks;
}
/// <summary>

/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
private string CheckVectorObsShape(TensorProxy tensorProxy)
private static string CheckVectorObsShape(
BrainParameters brainParameters, TensorProxy tensorProxy)
var vecObsSizeBp = m_BrainParameters.vectorObservationSize;
var numStackedVector = m_BrainParameters.numStackedVectorObservations;
var vecObsSizeBp = brainParameters.vectorObservationSize;
var numStackedVector = brainParameters.numStackedVectorObservations;
var totalVecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1];
if (vecObsSizeBp * numStackedVector != totalVecObsSizeT)
{

/// Checks that the shape of the Previous Vector Action input placeholder is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
private string CheckPreviousActionShape(TensorProxy tensorProxy)
private static string CheckPreviousActionShape(
BrainParameters brainParameters, TensorProxy tensorProxy)
var numberActionsBp = m_BrainParameters.vectorActionSize.Length;
var numberActionsBp = brainParameters.vectorActionSize.Length;
var numberActionsT = tensorProxy.shape[tensorProxy.shape.Length - 1];
if (numberActionsBp != numberActionsT)
{

/// Checks that the shape of the visual observation input placeholder is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="tensorProxy">The tensor that is expected by the model</param>
/// <param name="visObsIndex">The index of the visual observation.</param>
/// <returns>

private string CheckVisualObsShape(TensorProxy tensorProxy, int visObsIndex)
private static string CheckVisualObsShape(
BrainParameters brainParameters, TensorProxy tensorProxy, int visObsIndex)
var resolutionBp = m_BrainParameters.cameraResolutions[visObsIndex];
var resolutionBp = brainParameters.cameraResolutions[visObsIndex];
var widthBp = resolutionBp.width;
var heightBp = resolutionBp.height;
var pixelBp = resolutionBp.blackAndWhite ? 1 : 3;

/// Generates failed checks that correspond to output shapes incompatibilities between
/// the model and the BrainParameters.
/// </summary>
/// <param name="model">
/// The Barracuda engine model for loading static parameters
/// </param>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="isContinuous">
/// Whether the model is expecting continuous or discrete control.
/// </param>

/// A IEnumerable of string corresponding to the incompatible shapes between model
/// and BrainParameters.
/// </returns>
private void CheckOutputTensorShape(ModelActionType isContinuous, int modelActionSize)
private static IEnumerable<string> CheckOutputTensorShape(
Model model,
BrainParameters brainParameters,
ModelActionType isContinuous,
int modelActionSize)
var failedModelChecks = new List<string>();
m_FailedModelChecks.Add("Cannot infer type of Control from the provided model.");
return;
failedModelChecks.Add("Cannot infer type of Control from the provided model.");
return failedModelChecks;
m_BrainParameters.vectorActionSpaceType != SpaceType.Continuous)
brainParameters.vectorActionSpaceType != SpaceType.Continuous)
m_FailedModelChecks.Add(
failedModelChecks.Add(
return;
return failedModelChecks;
m_BrainParameters.vectorActionSpaceType != SpaceType.Discrete)
brainParameters.vectorActionSpaceType != SpaceType.Discrete)
m_FailedModelChecks.Add(
failedModelChecks.Add(
return;
return failedModelChecks;
var tensorTester = new Dictionary<string, Func<TensorShape, int, string>>();
if (m_BrainParameters.vectorActionSpaceType == SpaceType.Continuous)
var tensorTester = new Dictionary<string, Func<BrainParameters, TensorShape, int, string>>();
if (brainParameters.vectorActionSpaceType == SpaceType.Continuous)
{
tensorTester[TensorNames.ActionOutput] = CheckContinuousActionOutputShape;
}

}
// If the model expects an output but it is not in this list
foreach (var name in m_Model.outputs)
foreach (var name in model.outputs)
var error = tester.Invoke(m_Model.GetShapeByName(name), modelActionSize);
var error = tester.Invoke(brainParameters, model.GetShapeByName(name), modelActionSize);
m_FailedModelChecks.Add(error);
failedModelChecks.Add(error);
return failedModelChecks;
}
/// <summary>

/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="shape"> The tensor shape that is expected by the model</param>
/// <param name="modelActionSize">
/// The size of the action output that is expected by the model.

/// check failed. If the check passed, returns null.
/// </returns>
private string CheckDiscreteActionOutputShape(TensorShape shape, int modelActionSize)
private static string CheckDiscreteActionOutputShape(
BrainParameters brainParameters, TensorShape shape, int modelActionSize)
var bpActionSize = m_BrainParameters.vectorActionSize.Sum();
var bpActionSize = brainParameters.vectorActionSize.Sum();
if (modelActionSize != bpActionSize)
{
return "Action Size of the model does not match. The BrainParameters expect " +

/// Checks that the shape of the continuous action output is the same in the
/// model and in the Brain Parameters.
/// </summary>
/// <param name="brainParameters">
/// The BrainParameters that are used verify the compatibility with the InferenceEngine
/// </param>
/// <param name="shape"> The tensor shape that is expected by the model</param>
/// <param name="modelActionSize">
/// The size of the action output that is expected by the model.

private string CheckContinuousActionOutputShape(TensorShape shape, int modelActionSize)
private static string CheckContinuousActionOutputShape(
BrainParameters brainParameters, TensorShape shape, int modelActionSize)
var bpActionSize = m_BrainParameters.vectorActionSize[0];
var bpActionSize = brainParameters.vectorActionSize[0];
if (modelActionSize != bpActionSize)
{
return "Action Size of the model does not match. The BrainParameters expect " +

153
UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs


using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.Profiling;
namespace MLAgents
{

[CreateAssetMenu(fileName = "NewLearningBrain", menuName = "ML-Agents/Learning Brain")]
public class LearningBrain : Brain
{
private ITensorAllocator m_TensorAllocator;
private TensorGenerator m_TensorGenerator;
private TensorApplier m_TensorApplier;
private Model m_BarracudaModel;
private IWorker m_Engine;
private bool m_Verbose = false;
private BarracudaModelParamLoader m_ModelParamLoader;
private string[] m_OutputNames;
private IReadOnlyList<TensorProxy> m_InferenceInputs;
private IReadOnlyList<TensorProxy> m_InferenceOutputs;
protected ICommunicator m_Communicator;
protected IBatchedDecisionMaker m_BatchedDecisionMaker;
/// <summary>
/// Sets the ICommunicator of the Brain. The brain will call the communicator at every step and give

private void SetCommunicator(ICommunicator communicator)
{
m_Communicator = communicator;
m_Communicator?.SubscribeBrain(name, brainParameters);
m_BatchedDecisionMaker = communicator;
communicator?.SubscribeBrain(name, brainParameters);
LazyInitialize();
}

{
ReloadModel();
var comm = FindObjectOfType<Academy>()?.Communicator;
var aca = FindObjectOfType<Academy>();
var comm = aca?.Communicator;
}
/// <summary>
/// Initializes the Brain with the Model that it will use when selecting actions for
/// the agents
/// </summary>
/// <param name="seed"> The seed that will be used to initialize the RandomNormal
/// and Multinomial obsjects used when running inference.</param>
/// <exception cref="UnityAgentsException">Throws an error when the model is null
/// </exception>
public void ReloadModel(int seed = 0)
{
if (m_TensorAllocator == null)
m_TensorAllocator = new TensorCachingAllocator();
if (model != null)
if (aca == null || comm != null)
#if BARRACUDA_VERBOSE
_verbose = true;
#endif
D.logEnabled = m_Verbose;
// Cleanup previous instance
if (m_Engine != null)
m_Engine.Dispose();
m_BarracudaModel = ModelLoader.Load(model.Value);
var executionDevice = inferenceDevice == InferenceDevice.GPU
? BarracudaWorkerFactory.Type.ComputePrecompiled
: BarracudaWorkerFactory.Type.CSharp;
m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, m_BarracudaModel, m_Verbose);
}
else
{
m_BarracudaModel = null;
m_Engine = null;
return;
m_ModelParamLoader = BarracudaModelParamLoader.GetLoaderAndCheck(m_Engine, m_BarracudaModel, brainParameters);
m_InferenceInputs = m_ModelParamLoader.GetInputTensors();
m_OutputNames = m_ModelParamLoader.GetOutputNames();
m_TensorGenerator = new TensorGenerator(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
m_TensorApplier = new TensorApplier(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
}
/// <summary>
/// Return a list of failed checks corresponding to the failed compatibility checks
/// between the Model and the BrainParameters. Note : This does not reload the model.
/// If changes have been made to the BrainParameters or the Model, the model must be
/// reloaded using GiveModel before trying to get the compatibility checks.
/// </summary>
/// <returns> The list of the failed compatibility checks between the Model and the
/// Brain Parameters</returns>
public IEnumerable<string> GetModelFailedChecks()
{
return (m_ModelParamLoader != null) ? m_ModelParamLoader.GetChecks() : new List<string>();
var modelRunner = aca.GetOrCreateModelRunner(model, brainParameters, inferenceDevice);
m_BatchedDecisionMaker = modelRunner;
if (m_Communicator != null)
{
m_Communicator?.PutObservations(name, m_Agents);
return;
}
var currentBatchSize = m_Agents.Count;
if (currentBatchSize == 0)
{
return;
}
Profiler.BeginSample("LearningBrain.DecideAction");
if (m_Engine == null)
if (m_BatchedDecisionMaker != null)
Debug.LogError($"No model was present for the Brain {name}.");
m_BatchedDecisionMaker?.PutObservations(name, m_Agents);
Profiler.BeginSample($"MLAgents.{name}.GenerateTensors");
// Prepare the input tensors to be feed into the engine
m_TensorGenerator.GenerateTensors(m_InferenceInputs, currentBatchSize, m_Agents);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.PrepareBarracudaInputs");
var inputs = PrepareBarracudaInputs(m_InferenceInputs);
Profiler.EndSample();
// Execute the Model
Profiler.BeginSample($"MLAgents.{name}.ExecuteGraph");
m_Engine.Execute(inputs);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.FetchBarracudaOutputs");
m_InferenceOutputs = FetchBarracudaOutputs(m_OutputNames);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{name}.ApplyTensors");
// Update the outputs
m_TensorApplier.ApplyTensors(m_InferenceOutputs, m_Agents);
Profiler.EndSample();
Profiler.EndSample();
}
protected Dictionary<string, Tensor> PrepareBarracudaInputs(IEnumerable<TensorProxy> infInputs)
{
var inputs = new Dictionary<string, Tensor>();
foreach (var inp in m_InferenceInputs)
{
inputs[inp.name] = inp.data;
}
return inputs;
}
protected List<TensorProxy> FetchBarracudaOutputs(string[] names)
{
var outputs = new List<TensorProxy>();
foreach (var n in names)
{
var output = m_Engine.Peek(n);
outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n));
}
return outputs;
m_Engine?.Dispose();
m_TensorAllocator?.Reset(false);
m_BatchedDecisionMaker?.Dispose();
}
}
}

146
UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs


using System.Collections.Generic;
using UnityEngine;
using Barracuda;
using UnityEngine.Profiling;
namespace MLAgents.InferenceBrain
{
public class ModelRunner : IBatchedDecisionMaker
{
private ITensorAllocator m_TensorAllocator;
private TensorGenerator m_TensorGenerator;
private TensorApplier m_TensorApplier;
private NNModel m_Model;
private InferenceDevice m_InferenceDevice;
private Model m_BarracudaModel;
private IWorker m_Engine;
private bool m_Verbose = false;
private string[] m_OutputNames;
private IReadOnlyList<TensorProxy> m_InferenceInputs;
private IReadOnlyList<TensorProxy> m_InferenceOutputs;
/// <summary>
/// Initializes the Brain with the Model that it will use when selecting actions for
/// the agents
/// </summary>
/// <param name="model"> The Barracuda model to load
/// <param name="brainParameters"> The parameters of the Brain used to generate the
/// placeholder tensors
/// <param name="inferenceDevice"> Inference execution device. CPU is the fastest
/// option for most of ML Agents models.
/// <param name="seed"> The seed that will be used to initialize the RandomNormal
/// and Multinomial objects used when running inference.</param>
/// <exception cref="UnityAgentsException">Throws an error when the model is null
/// </exception>
public ModelRunner(
NNModel model,
BrainParameters brainParameters,
InferenceDevice inferenceDevice = InferenceDevice.CPU,
int seed = 0)
{
m_Model = model;
m_InferenceDevice = inferenceDevice;
m_TensorAllocator = new TensorCachingAllocator();
if (model != null)
{
#if BARRACUDA_VERBOSE
m_Verbose = true;
#endif
D.logEnabled = m_Verbose;
m_BarracudaModel = ModelLoader.Load(model.Value);
var executionDevice = inferenceDevice == InferenceDevice.GPU
? BarracudaWorkerFactory.Type.ComputePrecompiled
: BarracudaWorkerFactory.Type.CSharp;
m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, m_BarracudaModel, m_Verbose);
}
else
{
m_BarracudaModel = null;
m_Engine = null;
}
m_InferenceInputs = BarracudaModelParamLoader.GetInputTensors(m_BarracudaModel);
m_OutputNames = BarracudaModelParamLoader.GetOutputNames(m_BarracudaModel);
m_TensorGenerator = new TensorGenerator(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
m_TensorApplier = new TensorApplier(brainParameters, seed, m_TensorAllocator, m_BarracudaModel);
}
private Dictionary<string, Tensor> PrepareBarracudaInputs(IEnumerable<TensorProxy> infInputs)
{
var inputs = new Dictionary<string, Tensor>();
foreach (var inp in m_InferenceInputs)
{
inputs[inp.name] = inp.data;
}
return inputs;
}
public void Dispose()
{
if (m_Engine != null)
m_Engine.Dispose();
m_TensorAllocator?.Reset(false);
}
private List<TensorProxy> FetchBarracudaOutputs(string[] names)
{
var outputs = new List<TensorProxy>();
foreach (var n in names)
{
var output = m_Engine.Peek(n);
outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n));
}
return outputs;
}
public void PutObservations(string key, ICollection<Agent> agents)
{
var currentBatchSize = agents.Count;
if (currentBatchSize == 0)
{
return;
}
Profiler.BeginSample("LearningBrain.DecideAction");
if (m_Engine == null)
{
Debug.LogError($"No model was present for the Brain {m_Model.name}.");
return;
}
Profiler.BeginSample($"MLAgents.{m_Model.name}.GenerateTensors");
// Prepare the input tensors to be feed into the engine
m_TensorGenerator.GenerateTensors(m_InferenceInputs, currentBatchSize, agents);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{m_Model.name}.PrepareBarracudaInputs");
var inputs = PrepareBarracudaInputs(m_InferenceInputs);
Profiler.EndSample();
// Execute the Model
Profiler.BeginSample($"MLAgents.{m_Model.name}.ExecuteGraph");
m_Engine.Execute(inputs);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{m_Model.name}.FetchBarracudaOutputs");
m_InferenceOutputs = FetchBarracudaOutputs(m_OutputNames);
Profiler.EndSample();
Profiler.BeginSample($"MLAgents.{m_Model.name}.ApplyTensors");
// Update the outputs
m_TensorApplier.ApplyTensors(m_InferenceOutputs, agents);
Profiler.EndSample();
Profiler.EndSample();
}
public bool HasModel(NNModel other, InferenceDevice otherInferenceDevice)
{
return m_Model == other && m_InferenceDevice == otherInferenceDevice;
}
}
}

11
UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs.meta


fileFormatVersion: 2
guid: 8f3f4b630ca3f4a4ba74922ec8249046
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存