using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Profiling;
using Unity.Simulation;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Profiling;
using UnityEngine.Rendering;
using UnityEngine.UI;
#if HDRP_PRESENT
using UnityEngine.Rendering.HighDefinition;
#endif
#if URP_PRESENT
using UnityEngine.Rendering.Universal;
#endif
namespace UnityEngine.Perception.GroundTruth
{
///
/// Captures ground truth from the associated Camera.
///
[RequireComponent(typeof(Camera))]
public partial class PerceptionCamera : MonoBehaviour
{
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
internal static string RgbDirectory { get; } = $"RGB{Guid.NewGuid()}";
static string s_RgbFilePrefix = "rgb_";
///
/// A human-readable description of the camera.
///
public string description;
///
/// The period in seconds that the Camera should render
///
public float period = .0166f;
///
/// The start time in seconds of the first frame in the simulation.
///
public float startTime;
///
/// Whether camera output should be captured to disk
///
public bool captureRgbImages = true;
///
/// Event invoked after the camera finishes rendering during a frame.
///
[SerializeReference]
List m_Labelers = new List();
Dictionary m_PersistentSensorData = new Dictionary();
bool m_CapturedLastFrame;
Ego m_EgoMarker;
//only used to confirm that GroundTruthRendererFeature is present in URP
bool m_GroundTruthRendererFeatureRun;
static PerceptionCamera s_VisualizedPerceptionCamera;
static GameObject s_VisualizationCamera;
static GameObject s_VisualizationCanvas;
///
/// Should the labeling visualization routines run for this camera. This will only
/// be set to false if there is more than one camera in the scene, and this is
/// not considered the active camera.
///
bool m_VisualizationBlocked = false;
[SerializeField]
bool m_VisualizationEnabled = true;
public bool visualizationEnabled
{
get
{
return !m_VisualizationBlocked && m_VisualizationEnabled;
}
set
{
if (m_VisualizationBlocked)
return;
m_VisualizationEnabled = value;
}
}
///
/// The associated with this camera. Use this to report additional annotations and metrics at runtime.
///
public SensorHandle SensorHandle { get; private set; }
static ProfilerMarker s_WriteFrame = new ProfilerMarker("Write Frame (PerceptionCamera)");
static ProfilerMarker s_EncodeAndSave = new ProfilerMarker("Encode and save (PerceptionCamera)");
#if URP_PRESENT
internal List passes = new List();
public void AddScriptableRenderPass(ScriptableRenderPass pass)
{
passes.Add(pass);
}
#endif
///
/// Add a data object which will be added to the dataset with each capture. Overrides existing sensor data associated with the given key.
///
/// The key to associate with the data.
/// An object containing the data. Will be serialized into json.
public void SetPersistentSensorData(string key, object data)
{
m_PersistentSensorData[key] = data;
}
///
/// Removes a persistent sensor data object.
///
/// The key of the object to remove.
/// True if a data object was removed. False if it was not set.
public bool RemovePersistentSensorData(string key)
{
return m_PersistentSensorData.Remove(key);
}
// Start is called before the first frame update
void Awake()
{
m_EgoMarker = this.GetComponentInParent();
var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime);
SetupInstanceSegmentation();
var cam = GetComponent();
SetupVisualizationCamera(cam);
DatasetCapture.SimulationEnding += OnSimulationEnding;
}
void OnEnable()
{
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
RenderPipelineManager.endCameraRendering += CheckForRendererFeature;
}
void Start()
{
var cam = GetComponent();
cam.enabled = false;
}
bool SetupVisualizationCamera(Camera cam)
{
if (s_VisualizedPerceptionCamera != null)
{
Debug.LogWarning($"Currently only one PerceptionCamera may be visualized at a time. Disabling visualization on {gameObject.name}.");
m_VisualizationBlocked = true;
m_VisualizationEnabled = false;
return false;
}
s_VisualizedPerceptionCamera = this;
// set up to render to a render texture instead of the screen
var visualizationRenderTexture = new RenderTexture(new RenderTextureDescriptor(cam.pixelWidth, cam.pixelHeight, UnityEngine.Experimental.Rendering.GraphicsFormat.R8G8B8A8_UNorm, 8));
visualizationRenderTexture.name = cam.name + "_visualization_texture";
cam.targetTexture = visualizationRenderTexture;
s_VisualizationCamera = new GameObject(cam.name + "_VisualizationCamera");
var visualizationCameraComponent = s_VisualizationCamera.AddComponent();
int layerMask = 1 << LayerMask.NameToLayer("UI");
visualizationCameraComponent.cullingMask = layerMask;
s_VisualizationCanvas = new GameObject(cam.name + "_VisualizationCanvas");
var canvas = s_VisualizationCanvas.AddComponent