using System;
using UnityEngine;
using System.Linq;
namespace MLAgents
{
public enum SpaceType
{
Discrete,
Continuous
};
///
/// The resolution of a camera used by an agent.
/// The width defines the number of pixels on the horizontal axis.
/// The height defines the number of pixels on the verical axis.
/// blackAndWhite defines whether or not the image is grayscale.
///
[Serializable]
public struct Resolution
{
/// The width of the observation in pixels
public int width;
/// The height of the observation in pixels
public int height;
///
/// If true, the image will be in black and white.
/// If false, it will be in colors RGB
///
public bool blackAndWhite;
}
///
/// Holds information about the Brain. It defines what are the inputs and outputs of the
/// decision process.
///
[Serializable]
public class BrainParameters
{
///
/// If continuous : The length of the float vector that represents
/// the state
/// If discrete : The number of possible values the state can take
///
public int vectorObservationSize = 1;
[Range(1, 50)] public int numStackedVectorObservations = 1;
///
/// If continuous : The length of the float vector that represents
/// the action
/// If discrete : The number of possible values the action can take*/
///
public int[] vectorActionSize = new[] {1};
/// The list of observation resolutions for the brain
public Resolution[] cameraResolutions;
/// The list of strings describing what the actions correpond to */
public string[] vectorActionDescriptions;
/// Defines if the action is discrete or continuous
public SpaceType vectorActionSpaceType = SpaceType.Discrete;
public BrainParameters()
{
}
///
/// Converts Resolution protobuf array to C# Resolution array.
///
private static Resolution[] ResolutionProtoToNative(
CommunicatorObjects.ResolutionProto[] resolutionProtos)
{
var localCameraResolutions = new Resolution[resolutionProtos.Length];
for (var i = 0; i < resolutionProtos.Length; i++)
{
localCameraResolutions[i] = new Resolution
{
height = resolutionProtos[i].Height,
width = resolutionProtos[i].Width,
blackAndWhite = resolutionProtos[i].GrayScale
};
}
return localCameraResolutions;
}
public BrainParameters(CommunicatorObjects.BrainParametersProto brainParametersProto)
{
vectorObservationSize = brainParametersProto.VectorObservationSize;
cameraResolutions = ResolutionProtoToNative(
brainParametersProto.CameraResolutions.ToArray()
);
numStackedVectorObservations = brainParametersProto.NumStackedVectorObservations;
vectorActionSize = brainParametersProto.VectorActionSize.ToArray();
vectorActionDescriptions = brainParametersProto.VectorActionDescriptions.ToArray();
vectorActionSpaceType = (SpaceType)brainParametersProto.VectorActionSpaceType;
}
///
/// Deep clones the BrainParameter object
///
/// A new BrainParameter object with the same values as the original.
public BrainParameters Clone()
{
return new BrainParameters()
{
vectorObservationSize = vectorObservationSize,
numStackedVectorObservations = numStackedVectorObservations,
vectorActionSize = (int[])vectorActionSize.Clone(),
cameraResolutions = (Resolution[])cameraResolutions.Clone(),
vectorActionDescriptions = (string[])vectorActionDescriptions.Clone(),
vectorActionSpaceType = vectorActionSpaceType
};
}
}
}