using System.Text;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
namespace UnityEngine.XR.ARFoundation.Samples
{
///
/// This component displays a picture-in-picture view of the environment depth texture, the human depth texture, or
/// the human stencil texture.
///
public class DisplayDepthImage : MonoBehaviour
{
///
/// The display mode for the texture widget. Values must match the UI dropdown.
///
enum DisplayMode
{
HumanDepth = 0,
HumanStencil = 1,
}
///
/// Name of the max distance property in the shader.
///
const string k_MaxDistanceName = "_MaxDistance";
///
/// Name of the display rotation matrix in the shader.
///
const string k_DisplayRotationPerFrameName = "_DisplayRotationPerFrame";
///
/// The default texture aspect ratio.
///
const float k_DefaultTextureAspectRadio = 1.0f;
///
/// ID of the max distance property in the shader.
///
static readonly int k_MaxDistanceId = Shader.PropertyToID(k_MaxDistanceName);
///
/// ID of the display rotation matrix in the shader.
///
static readonly int k_DisplayRotationPerFrameId = Shader.PropertyToID(k_DisplayRotationPerFrameName);
///
/// A string builder for construction of strings.
///
readonly StringBuilder m_StringBuilder = new StringBuilder();
///
/// The current screen orientation remembered so that we are only updating the raw image layout when it changes.
///
ScreenOrientation m_CurrentScreenOrientation;
///
/// The current texture aspect ratio remembered so that we can resize the raw image layout when it changes.
///
float m_TextureAspectRatio = k_DefaultTextureAspectRadio;
///
/// The mode indicating which texture to display.
///
DisplayMode m_DisplayMode = DisplayMode.HumanDepth;
///
/// The display rotation matrix for the shader.
///
/// A matrix to flip the Y coordinate for the Android platform.
///
Matrix4x4 k_AndroidFlipYMatrix = Matrix4x4.identity;
#endif // UNITY_ANDROID
///
/// Get or set the AROcclusionManager.
///
public AROcclusionManager occlusionManager
{
get => m_OcclusionManager;
set => m_OcclusionManager = value;
}
[SerializeField]
[Tooltip("The AROcclusionManager which will produce depth textures.")]
AROcclusionManager m_OcclusionManager;
///
/// Get or set the ARCameraManager.
///
public ARCameraManager cameraManager
{
get => m_CameraManager;
set => m_CameraManager = value;
}
[SerializeField]
[Tooltip("The ARCameraManager which will produce camera frame events.")]
ARCameraManager m_CameraManager;
///
/// The UI RawImage used to display the image on screen.
///
public RawImage rawImage
{
get => m_RawImage;
set => m_RawImage = value;
}
[SerializeField]
RawImage m_RawImage;
///
/// The UI Text used to display information about the image on screen.
///
public Text imageInfo
{
get => m_ImageInfo;
set => m_ImageInfo = value;
}
[SerializeField]
Text m_ImageInfo;
///
/// The depth material for rendering depth textures.
///
public Material depthMaterial
{
get => m_DepthMaterial;
set => m_DepthMaterial = value;
}
[SerializeField]
Material m_DepthMaterial;
///
/// The stencil material for rendering stencil textures.
///
public Material stencilMaterial
{
get => m_StencilMaterial;
set => m_StencilMaterial = value;
}
[SerializeField]
Material m_StencilMaterial;
///
/// The max distance value for the shader when showing an human depth texture.
///
public float maxHumanDistance
{
get => m_MaxHumanDistance;
set => m_MaxHumanDistance = value;
}
[SerializeField]
float m_MaxHumanDistance = 3.0f;
void Awake()
{
#if UNITY_ANDROID
k_AndroidFlipYMatrix[1,1] = -1.0f;
k_AndroidFlipYMatrix[2,1] = 1.0f;
#endif // UNITY_ANDROID
}
void OnEnable()
{
// Subscribe to the camera frame received event, and initialize the display rotation matrix.
Debug.Assert(m_CameraManager != null, "no camera manager");
m_CameraManager.frameReceived += OnCameraFrameEventReceived;
m_DisplayRotationMatrix = Matrix4x4.identity;
// When enabled, get the current screen orientation, and update the raw image UI.
m_CurrentScreenOrientation = Screen.orientation;
UpdateRawImage();
}
void OnDisable()
{
// Unsubscribe to the camera frame received event, and initialize the display rotation matrix.
Debug.Assert(m_CameraManager != null, "no camera manager");
m_CameraManager.frameReceived -= OnCameraFrameEventReceived;
m_DisplayRotationMatrix = Matrix4x4.identity;
}
void Update()
{
// If we are on a device that does supports neither human stencil, human depth, nor environment depth,
// display a message about unsupported functionality and return.
Debug.Assert(m_OcclusionManager != null, "no occlusion manager");
if ((m_OcclusionManager.descriptor?.supportsHumanSegmentationStencilImage == false)
&& (m_OcclusionManager.descriptor?.supportsHumanSegmentationDepthImage == false))
{
LogText("Human segmentation is not supported on this device.");
m_RawImage.texture = null;
if (!Mathf.Approximately(m_TextureAspectRatio, k_DefaultTextureAspectRadio))
{
m_TextureAspectRatio = k_DefaultTextureAspectRadio;
UpdateRawImage();
}
return;
}
// Get all of the occlusion textures.
Texture2D humanStencil = m_OcclusionManager.humanStencilTexture;
Texture2D humanDepth = m_OcclusionManager.humanDepthTexture;
// Display some text information about each of the textures.
m_StringBuilder.Clear();
BuildTextureInfo(m_StringBuilder, "stencil", humanStencil);
BuildTextureInfo(m_StringBuilder, "depth", humanDepth);
LogText(m_StringBuilder.ToString());
// Decide which to display based on the current mode.
Texture2D displayTexture;
switch (m_DisplayMode)
{
case DisplayMode.HumanStencil:
displayTexture = humanStencil;
break;
case DisplayMode.HumanDepth:
default:
displayTexture = humanDepth;
break;
}
// Assign the texture to display to the raw image.
Debug.Assert(m_RawImage != null, "no raw image");
m_RawImage.texture = displayTexture;
// Get the aspect ratio for the current texture.
float textureAspectRatio = (displayTexture == null) ? 1.0f : ((float)displayTexture.width / (float)displayTexture.height);
// If the raw image needs to be updated because of a device orientation change or because of a texture
// aspect ratio difference, then update the raw image with the new values.
if ((m_CurrentScreenOrientation != Screen.orientation)
|| !Mathf.Approximately(m_TextureAspectRatio, textureAspectRatio))
{
m_CurrentScreenOrientation = Screen.orientation;
m_TextureAspectRatio = textureAspectRatio;
UpdateRawImage();
}
}
///
/// When the camera frame event is raised, capture the display rotation matrix.
///
/// The arguments when a camera frame event is raised.
void OnCameraFrameEventReceived(ARCameraFrameEventArgs cameraFrameEventArgs)
{
Debug.Assert(m_RawImage != null, "no raw image");
if (m_RawImage.material != null)
{
// Copy the display rotation matrix from the camera.
Matrix4x4 cameraMatrix = cameraFrameEventArgs.displayMatrix ?? Matrix4x4.identity;
Vector2 affineBasisX = new Vector2(1.0f, 0.0f);
Vector2 affineBasisY = new Vector2(0.0f, 1.0f);
Vector2 affineTranslation = new Vector2(0.0f, 0.0f);
#if UNITY_IOS
affineBasisX = new Vector2(cameraMatrix[0, 0], cameraMatrix[1, 0]);
affineBasisY = new Vector2(cameraMatrix[0, 1], cameraMatrix[1, 1]);
affineTranslation = new Vector2(cameraMatrix[2, 0], cameraMatrix[2, 1]);
#endif // UNITY_IOS
#if UNITY_ANDROID
affineBasisX = new Vector2(cameraMatrix[0, 0], cameraMatrix[0, 1]);
affineBasisY = new Vector2(cameraMatrix[1, 0], cameraMatrix[1, 1]);
affineTranslation = new Vector2(cameraMatrix[0, 2], cameraMatrix[1, 2]);
#endif // UNITY_ANDROID
// The camera display matrix includes scaling and offsets to fit the aspect ratio of the device. In most
// cases, the camera display matrix should be used directly without modification when applying depth to
// the scene because that will line up the depth image with the camera image. However, for this demo,
// we want to show the full depth image as a picture-in-picture, so we remove these scaling and offset
// factors while preserving the orientation.
affineBasisX = affineBasisX.normalized;
affineBasisY = affineBasisY.normalized;
m_DisplayRotationMatrix = Matrix4x4.identity;
m_DisplayRotationMatrix[0,0] = affineBasisX.x;
m_DisplayRotationMatrix[0,1] = affineBasisY.x;
m_DisplayRotationMatrix[1,0] = affineBasisX.y;
m_DisplayRotationMatrix[1,1] = affineBasisY.y;
m_DisplayRotationMatrix[2,0] = Mathf.Round(affineTranslation.x);
m_DisplayRotationMatrix[2,1] = Mathf.Round(affineTranslation.y);
#if UNITY_ANDROID
m_DisplayRotationMatrix = k_AndroidFlipYMatrix * m_DisplayRotationMatrix;
#endif // UNITY_ANDROID
// Set the matrix to the raw image material.
m_RawImage.material.SetMatrix(k_DisplayRotationPerFrameId, m_DisplayRotationMatrix);
}
}
///
/// Create log information about the given texture.
///
/// The string builder to which to append the texture information.
/// The semantic name of the texture for logging purposes.
/// The texture for which to log information.
void BuildTextureInfo(StringBuilder stringBuilder, string textureName, Texture2D texture)
{
stringBuilder.AppendLine($"texture : {textureName}");
if (texture == null)
{
stringBuilder.AppendLine(" ");
}
else
{
stringBuilder.AppendLine($" format : {texture.format}");
stringBuilder.AppendLine($" width : {texture.width}");
stringBuilder.AppendLine($" height : {texture.height}");
stringBuilder.AppendLine($" mipmap : {texture.mipmapCount}");
}
}
///
/// Log the given text to the screen if the image info UI is set. Otherwise, log the string to debug.
///
/// The text string to log.
void LogText(string text)
{
if (m_ImageInfo != null)
{
m_ImageInfo.text = text;
}
else
{
Debug.Log(text);
}
}
///
/// Update the raw image with the current configurations.
///
void UpdateRawImage()
{
Debug.Assert(m_RawImage != null, "no raw image");
// Determine the raw imge rectSize preserving the texture aspect ratio, matching the screen orientation,
// and keeping a minimum dimension size.
float minDimension = 480.0f;
float maxDimension = Mathf.Round(minDimension * m_TextureAspectRatio);
Vector2 rectSize;
switch (m_CurrentScreenOrientation)
{
case ScreenOrientation.LandscapeRight:
case ScreenOrientation.LandscapeLeft:
rectSize = new Vector2(maxDimension, minDimension);
break;
case ScreenOrientation.PortraitUpsideDown:
case ScreenOrientation.Portrait:
default:
rectSize = new Vector2(minDimension, maxDimension);
break;
}
// Determine the raw image material and maxDistance material parameter based on the display mode.
float maxDistance;
Material material;
switch (m_DisplayMode)
{
case DisplayMode.HumanStencil:
material = m_StencilMaterial;
maxDistance = m_MaxHumanDistance;
break;
case DisplayMode.HumanDepth:
default:
material = m_DepthMaterial;
maxDistance = m_MaxHumanDistance;
break;
}
// Update the raw image dimensions and the raw image material parameters.
m_RawImage.rectTransform.sizeDelta = rectSize;
material.SetFloat(k_MaxDistanceId, maxDistance);
material.SetMatrix(k_DisplayRotationPerFrameId, m_DisplayRotationMatrix);
m_RawImage.material = material;
}
///
/// Callback when the depth mode dropdown UI has a value change.
///
/// The dropdown UI that changed.
public void OnDepthModeDropdownValueChanged(Dropdown dropdown)
{
// Update the display mode from the dropdown value.
m_DisplayMode = (DisplayMode)dropdown.value;
// Update the raw image following the mode change.
UpdateRawImage();
}
}
}