using System; using System.Text; using Unity.Collections.LowLevel.Unsafe; using UnityEngine; using UnityEngine.UI; using UnityEngine.XR.ARFoundation; using UnityEngine.XR.ARSubsystems; /// /// This component tests getting the latest camera image /// and converting it to RGBA format. If successful, /// it displays the image on the screen as a RawImage /// and also displays information about the image. /// /// This is useful for computer vision applications where /// you need to access the raw pixels from camera image /// on the CPU. /// /// This is different from the ARCameraBackground component, which /// efficiently displays the camera image on the screen. If you /// just want to blit the camera texture to the screen, use /// the ARCameraBackground, or use Graphics.Blit to create /// a GPU-friendly RenderTexture. /// /// In this example, we get the camera image data on the CPU, /// convert it to an RGBA format, then display it on the screen /// as a RawImage texture to demonstrate it is working. /// This is done as an example; do not use this technique simply /// to render the camera image on screen. /// public class TestDepthImage : MonoBehaviour { [SerializeField] [Tooltip("The ARHumanBodyManager which will produce frame events.")] ARHumanBodyManager m_HumanBodyManager; /// /// Get or set the ARHumanBodyManager. /// public ARHumanBodyManager humanBodyManager { get { return m_HumanBodyManager; } set { m_HumanBodyManager = value; } } [SerializeField] RawImage m_RawImage; /// /// The UI RawImage used to display the image on screen. /// public RawImage rawImage { get { return m_RawImage; } set { m_RawImage = value; } } [SerializeField] Text m_ImageInfo; /// /// The UI Text used to display information about the image on screen. /// public Text imageInfo { get { return m_ImageInfo; } set { m_ImageInfo = value; } } void LogTextureInfo(StringBuilder stringBuilder, string textureName, Texture2D texture) { stringBuilder.AppendFormat("texture : {0}\n", textureName); if (texture == null) { stringBuilder.AppendFormat(" \n"); } else { stringBuilder.AppendFormat(" format : {0}\n", texture.format.ToString()); stringBuilder.AppendFormat(" width : {0}\n", texture.width); stringBuilder.AppendFormat(" height : {0}\n", texture.height); stringBuilder.AppendFormat(" mipmap : {0}\n", texture.mipmapCount); } } void Update() { var subsystem = m_HumanBodyManager.subsystem; if (subsystem == null) { if (m_ImageInfo != null) { m_ImageInfo.text = "Human Segmentation not supported."; } return; } StringBuilder sb = new StringBuilder(); Texture2D humanStencil = m_HumanBodyManager.humanStencilTexture; Texture2D humanDepth = m_HumanBodyManager.humanDepthTexture; LogTextureInfo(sb, "stencil", humanStencil); LogTextureInfo(sb, "depth", humanDepth); if (m_ImageInfo != null) { m_ImageInfo.text = sb.ToString(); } else { Debug.Log(sb.ToString()); } // To use the stencil, be sure the HumanSegmentationStencilMode property on the ARHumanBodyManager is set to a // non-disabled value. m_RawImage.texture = humanStencil; // To use the depth, be sure the HumanSegmentationDepthMode property on the ARHumanBodyManager is set to a /// non-disabled value. // m_RawImage.texture = eventArgs.humanDepth; } }