using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using JetBrains.Annotations; using Unity.Collections; using Unity.Simulation; using UnityEngine.Experimental.Rendering; using UnityEngine.Profiling; using UnityEngine.Rendering; #if HDRP_PRESENT using UnityEngine.Rendering.HighDefinition; #endif namespace UnityEngine.Perception.GroundTruth { /// /// Labeler which generates a semantic segmentation image each frame. Each object is rendered to the semantic segmentation /// image using the color associated with it based on the given . /// Semantic segmentation images are saved to the dataset in PNG format. /// Only one SemanticSegmentationLabeler can render at once across all cameras. /// [Serializable] public sealed class SemanticSegmentationLabeler : CameraLabeler, IOverlayPanelProvider { /// public override string description { get => "Generates a semantic segmentation image for each captured frame. Each object is rendered to the semantic segmentation image using the color associated with it based on this labeler's associated semantic segmentation label configuration. " + "Semantic segmentation images are saved to the dataset in PNG format. " + "Please note that only one " + GetType().Name + " can render at once across all cameras."; protected set {} } const string k_SemanticSegmentationDirectory = "SemanticSegmentation"; const string k_SegmentationFilePrefix = "segmentation_"; internal string semanticSegmentationDirectory; /// /// The id to associate with semantic segmentation annotations in the dataset. /// [Tooltip("The id to associate with semantic segmentation annotations in the dataset.")] public string annotationId = "12f94d8d-5425-4deb-9b21-5e53ad957d66"; /// /// The SemanticSegmentationLabelConfig which maps labels to pixel values. /// public SemanticSegmentationLabelConfig labelConfig; /// /// Event information for /// public struct ImageReadbackEventArgs { /// /// The on which the image was rendered. This may be multiple frames in the past. /// public int frameCount; /// /// Color pixel data. /// public NativeArray data; /// /// The source image texture. /// public RenderTexture sourceTexture; } /// /// Event which is called each frame a semantic segmentation image is read back from the GPU. /// public event Action imageReadback; /// /// The RenderTexture on which semantic segmentation images are drawn. Will be resized on startup to match /// the camera resolution. /// public RenderTexture targetTexture => m_TargetTextureOverride; /// public Texture overlayImage=> targetTexture; /// public string label => "SemanticSegmentation"; [Tooltip("(Optional) The RenderTexture on which semantic segmentation images will be drawn. Will be reformatted on startup.")] [SerializeField] RenderTexture m_TargetTextureOverride; AnnotationDefinition m_SemanticSegmentationAnnotationDefinition; RenderTextureReader m_SemanticSegmentationTextureReader; #if HDRP_PRESENT SemanticSegmentationPass m_SemanticSegmentationPass; LensDistortionPass m_LensDistortionPass; #elif URP_PRESENT SemanticSegmentationUrpPass m_SemanticSegmentationPass; LensDistortionUrpPass m_LensDistortionPass; #endif Dictionary m_AsyncAnnotations; /// /// Creates a new SemanticSegmentationLabeler. Be sure to assign before adding to a . /// public SemanticSegmentationLabeler() { } /// /// Creates a new SemanticSegmentationLabeler with the given . /// /// The label config associating labels with colors. /// Override the target texture of the labeler. Will be reformatted on startup. public SemanticSegmentationLabeler(SemanticSegmentationLabelConfig labelConfig, RenderTexture targetTextureOverride = null) { this.labelConfig = labelConfig; m_TargetTextureOverride = targetTextureOverride; } [SuppressMessage("ReSharper", "InconsistentNaming")] struct SemanticSegmentationSpec { [UsedImplicitly] public string label_name; [UsedImplicitly] public Color pixel_value; } struct AsyncSemanticSegmentationWrite { public NativeArray data; public int width; public int height; public string path; } /// protected override bool supportsVisualization => true; /// protected override void Setup() { var myCamera = perceptionCamera.GetComponent(); var camWidth = myCamera.pixelWidth; var camHeight = myCamera.pixelHeight; if (labelConfig == null) { throw new InvalidOperationException( "SemanticSegmentationLabeler's LabelConfig must be assigned"); } m_AsyncAnnotations = new Dictionary(); if (targetTexture != null) { if (targetTexture.sRGB) { Debug.LogError("targetTexture supplied to SemanticSegmentationLabeler must be in Linear mode. Disabling labeler."); enabled = false; } var renderTextureDescriptor = new RenderTextureDescriptor(camWidth, camHeight, GraphicsFormat.R8G8B8A8_UNorm, 8); targetTexture.descriptor = renderTextureDescriptor; } else m_TargetTextureOverride = new RenderTexture(camWidth, camHeight, 8, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear); targetTexture.Create(); targetTexture.name = "Labeling"; semanticSegmentationDirectory = k_SemanticSegmentationDirectory + Guid.NewGuid(); #if HDRP_PRESENT var gameObject = perceptionCamera.gameObject; var customPassVolume = gameObject.GetComponent() ?? gameObject.AddComponent(); customPassVolume.injectionPoint = CustomPassInjectionPoint.BeforeRendering; customPassVolume.isGlobal = true; m_SemanticSegmentationPass = new SemanticSegmentationPass(myCamera, targetTexture, labelConfig) { name = "Labeling Pass" }; customPassVolume.customPasses.Add(m_SemanticSegmentationPass); m_LensDistortionPass = new LensDistortionPass(myCamera, targetTexture) { name = "Lens Distortion Pass" }; customPassVolume.customPasses.Add(m_LensDistortionPass); #elif URP_PRESENT // Semantic Segmentation m_SemanticSegmentationPass = new SemanticSegmentationUrpPass(myCamera, targetTexture, labelConfig); perceptionCamera.AddScriptableRenderPass(m_SemanticSegmentationPass); // Lens Distortion m_LensDistortionPass = new LensDistortionUrpPass(myCamera, targetTexture); perceptionCamera.AddScriptableRenderPass(m_LensDistortionPass); #endif var specs = labelConfig.labelEntries.Select((l) => new SemanticSegmentationSpec() { label_name = l.label, pixel_value = l.color }); if (labelConfig.skyColor != Color.black) { specs = specs.Append(new SemanticSegmentationSpec() { label_name = "sky", pixel_value = labelConfig.skyColor }); } m_SemanticSegmentationAnnotationDefinition = DatasetCapture.RegisterAnnotationDefinition( "semantic segmentation", specs.ToArray(), "pixel-wise semantic segmentation label", "PNG", id: Guid.Parse(annotationId)); m_SemanticSegmentationTextureReader = new RenderTextureReader(targetTexture); visualizationEnabled = supportsVisualization; } void OnSemanticSegmentationImageRead(int frameCount, NativeArray data) { if (!m_AsyncAnnotations.TryGetValue(frameCount, out var annotation)) return; var datasetRelativePath = $"{semanticSegmentationDirectory}/{k_SegmentationFilePrefix}{frameCount}.png"; var localPath = $"{Manager.Instance.GetDirectoryFor(semanticSegmentationDirectory)}/{k_SegmentationFilePrefix}{frameCount}.png"; annotation.ReportFile(datasetRelativePath); var asyncRequest = Manager.Instance.CreateRequest>(); imageReadback?.Invoke(new ImageReadbackEventArgs { data = data, frameCount = frameCount, sourceTexture = targetTexture }); asyncRequest.data = new AsyncSemanticSegmentationWrite { data = new NativeArray(data, Allocator.Persistent), width = targetTexture.width, height = targetTexture.height, path = localPath }; asyncRequest.Enqueue((r) => { Profiler.BeginSample("Encode"); var pngBytes = ImageConversion.EncodeArrayToPNG(r.data.data.ToArray(), GraphicsFormat.R8G8B8A8_UNorm, (uint)r.data.width, (uint)r.data.height); Profiler.EndSample(); Profiler.BeginSample("WritePng"); File.WriteAllBytes(r.data.path, pngBytes); Manager.Instance.ConsumerFileProduced(r.data.path); Profiler.EndSample(); r.data.data.Dispose(); return AsyncRequest.Result.Completed; }); asyncRequest.Execute(); } /// protected override void OnEndRendering(ScriptableRenderContext scriptableRenderContext) { m_AsyncAnnotations[Time.frameCount] = perceptionCamera.SensorHandle.ReportAnnotationAsync(m_SemanticSegmentationAnnotationDefinition); m_SemanticSegmentationTextureReader.Capture(scriptableRenderContext, (frameCount, data, renderTexture) => OnSemanticSegmentationImageRead(frameCount, data)); } /// protected override void Cleanup() { m_SemanticSegmentationTextureReader?.WaitForAllImages(); m_SemanticSegmentationTextureReader?.Dispose(); m_SemanticSegmentationTextureReader = null; if (m_TargetTextureOverride != null) m_TargetTextureOverride.Release(); m_TargetTextureOverride = null; } } }