using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Profiling;
using Unity.Simulation;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Profiling;
using UnityEngine.Rendering;
#if HDRP_PRESENT
using UnityEngine.Rendering.HighDefinition;
#endif
#if URP_PRESENT
using UnityEngine.Rendering.Universal;
#endif
namespace UnityEngine.Perception.GroundTruth
{
///
/// Captures ground truth from the associated Camera.
///
[RequireComponent(typeof(Camera))]
public partial class PerceptionCamera : MonoBehaviour
{
//TODO: Remove the Guid path when we have proper dataset merging in USim/Thea
internal static string RgbDirectory { get; } = $"RGB{Guid.NewGuid()}";
static string s_RgbFilePrefix = "rgb_";
///
/// A human-readable description of the camera.
///
public string description;
///
/// The period in seconds that the Camera should render
///
public float period = .0166f;
///
/// The start time in seconds of the first frame in the simulation.
///
public float startTime;
///
/// Whether camera output should be captured to disk
///
public bool captureRgbImages = true;
///
/// Event invoked after the camera finishes rendering during a frame.
///
[SerializeReference]
List m_Labelers = new List();
Dictionary m_PersistentSensorData = new Dictionary();
#if URP_PRESENT
internal List passes = new List();
public void AddScriptableRenderPass(ScriptableRenderPass pass)
{
passes.Add(pass);
}
#endif
bool m_CapturedLastFrame;
Ego m_EgoMarker;
//only used to confirm that GroundTruthRendererFeature is present in URP
bool m_GroundTruthRendererFeatureRun;
///
/// The associated with this camera. Use this to report additional annotations and metrics at runtime.
///
public SensorHandle SensorHandle { get; private set; }
static ProfilerMarker s_WriteFrame = new ProfilerMarker("Write Frame (PerceptionCamera)");
static ProfilerMarker s_FlipY = new ProfilerMarker("Flip Y (PerceptionCamera)");
static ProfilerMarker s_EncodeAndSave = new ProfilerMarker("Encode and save (PerceptionCamera)");
///
/// Add a data object which will be added to the dataset with each capture. Overrides existing sensor data associated with the given key.
///
/// The key to associate with the data.
/// An object containing the data. Will be serialized into json.
public void SetPersistentSensorData(string key, object data)
{
m_PersistentSensorData[key] = data;
}
///
/// Removes a persistent sensor data object.
///
/// The key of the object to remove.
/// True if a data object was removed. False if it was not set.
public bool RemovePersistentSensorData(string key)
{
return m_PersistentSensorData.Remove(key);
}
// Start is called before the first frame update
void Awake()
{
m_EgoMarker = this.GetComponentInParent();
var ego = m_EgoMarker == null ? DatasetCapture.RegisterEgo("") : m_EgoMarker.EgoHandle;
SensorHandle = DatasetCapture.RegisterSensor(ego, "camera", description, period, startTime);
SetupInstanceSegmentation();
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
RenderPipelineManager.endCameraRendering += CheckForRendererFeature;
DatasetCapture.SimulationEnding += OnSimulationEnding;
}
void CheckForRendererFeature(ScriptableRenderContext context, Camera camera)
{
if (camera == GetComponent())
{
#if URP_PRESENT
if (!m_GroundTruthRendererFeatureRun)
{
Debug.LogError("GroundTruthRendererFeature must be present on the ScriptableRenderer associated with the camera. The ScriptableRenderer can be accessed through Edit -> Project Settings... -> Graphics -> Scriptable Render Pipeline Settings -> Renderer List.");
enabled = false;
}
#endif
RenderPipelineManager.endCameraRendering -= CheckForRendererFeature;
}
}
// Update is called once per frame
void Update()
{
if (!SensorHandle.IsValid)
return;
var cam = GetComponent();
cam.enabled = SensorHandle.ShouldCaptureThisFrame;
foreach (var labeler in m_Labelers)
{
if (!labeler.enabled)
continue;
if (!labeler.isInitialized)
labeler.Init(this);
labeler.InternalOnUpdate();
}
}
void OnValidate()
{
if (m_Labelers == null)
m_Labelers = new List();
}
void CaptureRgbData(Camera cam)
{
Profiler.BeginSample("CaptureDataFromLastFrame");
if (!captureRgbImages)
return;
var captureFilename = Path.Combine(Manager.Instance.GetDirectoryFor(RgbDirectory), $"{s_RgbFilePrefix}{Time.frameCount}.png");
var dxRootPath = Path.Combine(RgbDirectory, $"{s_RgbFilePrefix}{Time.frameCount}.png");
SensorHandle.ReportCapture(dxRootPath, SensorSpatialData.FromGameObjects(m_EgoMarker == null ? null : m_EgoMarker.gameObject, gameObject), m_PersistentSensorData.Select(kvp => (kvp.Key, kvp.Value)).ToArray());
Func, AsyncRequest.Result> colorFunctor;
var width = cam.pixelWidth;
var height = cam.pixelHeight;
var flipY = ShouldFlipY(cam);
colorFunctor = r =>
{
using (s_WriteFrame.Auto())
{
var dataColorBuffer = (byte[])r.data.colorBuffer;
byte[] encodedData;
using (s_EncodeAndSave.Auto())
{
encodedData = ImageConversion.EncodeArrayToPNG(dataColorBuffer, GraphicsFormat.R8G8B8A8_UNorm, (uint)width, (uint)height);
}
return !FileProducer.Write(captureFilename, encodedData) ? AsyncRequest.Result.Error : AsyncRequest.Result.Completed;
}
};
CaptureCamera.Capture(cam, colorFunctor, flipY: flipY);
Profiler.EndSample();
}
// ReSharper disable once ParameterHidesMember
bool ShouldFlipY(Camera camera)
{
#if HDRP_PRESENT
var hdAdditionalCameraData = GetComponent();
//Based on logic in HDRenderPipeline.PrepareFinalBlitParameters
return camera.targetTexture != null || hdAdditionalCameraData.flipYMode == HDAdditionalCameraData.FlipYMode.ForceFlipY || camera.cameraType == CameraType.Game;
#elif URP_PRESENT
return (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) &&
(camera.targetTexture != null || camera.cameraType == CameraType.Game);
#else
return false;
#endif
}
void OnSimulationEnding()
{
CleanUpInstanceSegmentation();
foreach (var labeler in m_Labelers)
{
if (labeler.isInitialized)
labeler.InternalCleanup();
}
}
void OnBeginCameraRendering(ScriptableRenderContext _, Camera cam)
{
if (cam != GetComponent())
return;
if (!SensorHandle.ShouldCaptureThisFrame)
return;
#if UNITY_EDITOR
if (UnityEditor.EditorApplication.isPaused)
return;
#endif
CaptureRgbData(cam);
foreach (var labeler in m_Labelers)
{
if (!labeler.enabled)
continue;
if (!labeler.isInitialized)
labeler.Init(this);
labeler.InternalOnBeginRendering();
}
}
void OnDisable()
{
DatasetCapture.SimulationEnding -= OnSimulationEnding;
RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;
OnSimulationEnding();
if (SensorHandle.IsValid)
SensorHandle.Dispose();
SensorHandle = default;
}
///
/// The instances which will be run for this PerceptionCamera.
///
public IReadOnlyList labelers => m_Labelers;
///
/// Add the given to the PerceptionCamera. It will be set up and executed by this
/// PerceptionCamera each frame it captures data.
///
/// The labeler to add to this PerceptionCamera
public void AddLabeler(CameraLabeler cameraLabeler) => m_Labelers.Add(cameraLabeler);
///
/// Removes the given from the list of labelers under this PerceptionCamera, if it
/// is in the list. The labeler is cleaned up in the process. Labelers removed from a PerceptionCamera should
/// not be used again.
///
///
///
public bool RemoveLabeler(CameraLabeler cameraLabeler)
{
if (m_Labelers.Remove(cameraLabeler))
{
if (cameraLabeler.isInitialized)
cameraLabeler.InternalCleanup();
return true;
}
return false;
}
internal void OnGroundTruthRendererFeatureRun()
{
//only used to confirm that GroundTruthRendererFeature is present in URP
m_GroundTruthRendererFeatureRun = true;
}
}
}