浏览代码

Merge branch 'master' into menufixes

/4.0
Alexandra Serralta 5 年前
当前提交
2dda4dcd
共有 54 个文件被更改,包括 4356 次插入4198 次删除
  1. 139
      Assets/Scenes/ARCollaborationData/AnchorInfoManager.cs
  2. 95
      Assets/Scenes/ARCollaborationData/CollaborationNetworkingIndicator.cs
  3. 199
      Assets/Scenes/ARCollaborationData/CollaborativeSession.cs
  4. 57
      Assets/Scenes/ARCollaborationData/DisplayTrackingState.cs
  5. 243
      Assets/Scenes/ARKitCoachingOverlay/ARKitCoachingOverlay.cs
  6. 53
      Assets/Scenes/CameraGrain/CameraGrain.cs
  7. 235
      Assets/Scenes/FaceTracking/DisplayFaceInfo.cs
  8. 2
      Assets/Scenes/FaceTracking/EyeLasers.unity
  9. 57
      Assets/Scenes/FaceTracking/ToggleCameraFacingDirection.cs
  10. 301
      Assets/Scenes/ImageTracking/DynamicLibrary.cs
  11. 167
      Assets/Scenes/ImageTracking/TrackedImageInfoManager.cs
  12. 75
      Assets/Scenes/LightEstimation/PlatformSelector.cs
  13. 17
      Assets/Scenes/LightEstimation/Rotator.cs
  14. 487
      Assets/Scenes/Meshing/Scripts/MeshClassificationFracking.cs
  15. 69
      Assets/Scenes/Meshing/Scripts/ProjectileLauncher.cs
  16. 89
      Assets/Scenes/Meshing/Scripts/ToggleMeshClassification.cs
  17. 147
      Assets/Scenes/Plane Detection/PlaneClassificationLabeler.cs
  18. 59
      Assets/Scenes/SimpleAR/SessionReloader.cs
  19. 137
      Assets/Scripts/ARCoreFaceRegionManager.cs
  20. 167
      Assets/Scripts/ARFeatheredPlaneMeshVisualizer.cs
  21. 309
      Assets/Scripts/ARKitBlendShapeVisualizer.cs
  22. 497
      Assets/Scripts/ARWorldMapController.cs
  23. 80
      Assets/Scripts/AnchorCreator.cs
  24. 319
      Assets/Scripts/BoneController.cs
  25. 167
      Assets/Scripts/CameraConfigController.cs
  26. 395
      Assets/Scripts/CpuImageSample.cs
  27. 79
      Assets/Scripts/DisableVerticalPlanes.cs
  28. 45
      Assets/Scripts/EnvironmentProbeVisualizer.cs
  29. 122
      Assets/Scripts/EyePoseVisualizer.cs
  30. 41
      Assets/Scripts/EyeTrackingUI.cs
  31. 57
      Assets/Scripts/FaceMaterialSwitcher.cs
  32. 136
      Assets/Scripts/FixationPoint2DVisualizer.cs
  33. 120
      Assets/Scripts/FixationPoint3DVisualizer.cs
  34. 121
      Assets/Scripts/HumanBodyTracker.cs
  35. 265
      Assets/Scripts/LightEstimation.cs
  36. 227
      Assets/Scripts/LightEstimationUI.cs
  37. 99
      Assets/Scripts/Logger.cs
  38. 109
      Assets/Scripts/MakeAppearOnPlane.cs
  39. 81
      Assets/Scripts/PlaceMultipleObjectsOnPlane.cs
  40. 125
      Assets/Scripts/PlaceOnPlane.cs
  41. 99
      Assets/Scripts/PlaneDetectionController.cs
  42. 165
      Assets/Scripts/RotationController.cs
  43. 163
      Assets/Scripts/ScaleController.cs
  44. 241
      Assets/Scripts/ScreenSpaceJointVisualizer.cs
  45. 197
      Assets/Scripts/SupportChecker.cs
  46. 93
      Assets/Scripts/TestBodyAnchorScale.cs
  47. 189
      Assets/Scripts/TestDepthImage.cs
  48. 371
      Assets/Scripts/UX/ARSceneSelectUI.cs
  49. 24
      Assets/Scripts/UX/ActiveMenu.cs
  50. 41
      Assets/Scripts/UX/BackButton.cs
  51. 481
      Assets/Scripts/UX/CheckAvailableFeatures.cs
  52. 77
      Assets/Scripts/UX/FadePlaneOnBoundaryChange.cs
  53. 61
      Assets/Scripts/UX/Tooltip.cs
  54. 163
      Assets/Scripts/UX/UIManager.cs

139
Assets/Scenes/ARCollaborationData/AnchorInfoManager.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Displays information about each reference point including
/// whether or not the reference point is local or remote.
/// The reference point prefab is assumed to include a GameObject
/// which can be colored to indicate which session created it.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARAnchorManager))]
public class AnchorInfoManager : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ARSession m_Session;
public ARSession session
/// <summary>
/// Displays information about each reference point including
/// whether or not the reference point is local or remote.
/// The reference point prefab is assumed to include a GameObject
/// which can be colored to indicate which session created it.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARAnchorManager))]
public class AnchorInfoManager : MonoBehaviour
get { return m_Session; }
set { m_Session = value; }
}
[SerializeField]
ARSession m_Session;
void OnEnable()
{
GetComponent<ARAnchorManager>().anchorsChanged += OnAnchorsChanged;
}
public ARSession session
{
get { return m_Session; }
set { m_Session = value; }
}
void OnDisable()
{
GetComponent<ARAnchorManager>().anchorsChanged -= OnAnchorsChanged;
}
void OnEnable()
{
GetComponent<ARAnchorManager>().anchorsChanged += OnAnchorsChanged;
}
void OnAnchorsChanged(ARAnchorsChangedEventArgs eventArgs)
{
foreach (var referencePoint in eventArgs.added)
void OnDisable()
UpdateAnchor(referencePoint);
GetComponent<ARAnchorManager>().anchorsChanged -= OnAnchorsChanged;
foreach (var referencePoint in eventArgs.updated)
void OnAnchorsChanged(ARAnchorsChangedEventArgs eventArgs)
UpdateAnchor(referencePoint);
foreach (var referencePoint in eventArgs.added)
{
UpdateAnchor(referencePoint);
}
foreach (var referencePoint in eventArgs.updated)
{
UpdateAnchor(referencePoint);
}
}
unsafe struct byte128
{
public fixed byte data[16];
}
unsafe struct byte128
{
public fixed byte data[16];
}
void UpdateAnchor(ARAnchor referencePoint)
{
var canvas = referencePoint.GetComponentInChildren<Canvas>();
if (canvas == null)
return;
void UpdateAnchor(ARAnchor referencePoint)
{
var canvas = referencePoint.GetComponentInChildren<Canvas>();
if (canvas == null)
return;
canvas.worldCamera = GetComponent<ARSessionOrigin>().camera;
canvas.worldCamera = GetComponent<ARSessionOrigin>().camera;
var text = canvas.GetComponentInChildren<Text>();
if (text == null)
return;
var text = canvas.GetComponentInChildren<Text>();
if (text == null)
return;
var sessionId = referencePoint.sessionId;
if (sessionId.Equals(session.subsystem.sessionId))
{
text.text = $"Local";
}
else
{
text.text = $"Remote";
}
var sessionId = referencePoint.sessionId;
if (sessionId.Equals(session.subsystem.sessionId))
{
text.text = $"Local";
}
else
{
text.text = $"Remote";
}
var cube = referencePoint.transform.Find("Scale/SessionId Indicator");
if (cube != null)
{
var renderer = cube.GetComponent<Renderer>();
var cube = referencePoint.transform.Find("Scale/SessionId Indicator");
if (cube != null)
// Generate a color from the sessionId
Color color;
unsafe
var renderer = cube.GetComponent<Renderer>();
var bytes = *(byte128*)&sessionId;
color = new Color(
bytes.data[0] / 255f,
bytes.data[4] / 255f,
bytes.data[8] / 255f,
bytes.data[12] / 255f);
// Generate a color from the sessionId
Color color;
unsafe
{
var bytes = *(byte128*)&sessionId;
color = new Color(
bytes.data[0] / 255f,
bytes.data[4] / 255f,
bytes.data[8] / 255f,
bytes.data[12] / 255f);
}
renderer.material.color = color;
renderer.material.color = color;
}
}

95
Assets/Scenes/ARCollaborationData/CollaborationNetworkingIndicator.cs


using UnityEngine;
using UnityEngine.UI;
public class CollaborationNetworkingIndicator : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Image m_IncomingDataImage;
public Image incomingDataImage
public class CollaborationNetworkingIndicator : MonoBehaviour
get { return m_IncomingDataImage; }
set { m_IncomingDataImage = value; }
}
[SerializeField]
Image m_IncomingDataImage;
[SerializeField]
Image m_OutgoingDataImage;
public Image incomingDataImage
{
get { return m_IncomingDataImage; }
set { m_IncomingDataImage = value; }
}
public Image outgoingDataImage
{
get { return m_OutgoingDataImage; }
set { m_OutgoingDataImage = value; }
}
[SerializeField]
Image m_OutgoingDataImage;
[SerializeField]
Image m_HasCollaborationDataImage;
public Image outgoingDataImage
{
get { return m_OutgoingDataImage; }
set { m_OutgoingDataImage = value; }
}
public Image hasCollaborationDataImage
{
get { return m_HasCollaborationDataImage; }
set { m_HasCollaborationDataImage = value; }
}
[SerializeField]
Image m_HasCollaborationDataImage;
static bool s_IncomingDataReceived;
public Image hasCollaborationDataImage
{
get { return m_HasCollaborationDataImage; }
set { m_HasCollaborationDataImage = value; }
}
static bool s_IncomingDataReceived;
static bool s_OutgoingDataSent;
static bool s_OutgoingDataSent;
static bool s_HasCollaborationData;
static bool s_HasCollaborationData;
void Update()
{
m_IncomingDataImage.color = s_IncomingDataReceived ? Color.green : Color.red;
m_OutgoingDataImage.color = s_OutgoingDataSent ? Color.green : Color.red;
m_HasCollaborationDataImage.color = s_HasCollaborationData ? Color.green : Color.red;
void Update()
{
m_IncomingDataImage.color = s_IncomingDataReceived ? Color.green : Color.red;
m_OutgoingDataImage.color = s_OutgoingDataSent ? Color.green : Color.red;
m_HasCollaborationDataImage.color = s_HasCollaborationData ? Color.green : Color.red;
s_IncomingDataReceived = false;
s_OutgoingDataSent = false;
s_HasCollaborationData = false;
}
s_IncomingDataReceived = false;
s_OutgoingDataSent = false;
s_HasCollaborationData = false;
}
public static void NotifyIncomingDataReceived()
{
s_IncomingDataReceived = true;
}
public static void NotifyIncomingDataReceived()
{
s_IncomingDataReceived = true;
}
public static void NotifyOutgoingDataSent()
{
s_OutgoingDataSent = true;
}
public static void NotifyOutgoingDataSent()
{
s_OutgoingDataSent = true;
}
public static void NotifyHasCollaborationData()
{
s_HasCollaborationData = true;
public static void NotifyHasCollaborationData()
{
s_HasCollaborationData = true;
}
}
}

199
Assets/Scenes/ARCollaborationData/CollaborativeSession.cs


using UnityEngine.XR.ARKit;
#endif
[RequireComponent(typeof(ARSession))]
public class CollaborativeSession : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The name for this network service. It should be 15 characters or less and can contain ASCII, lowercase letters, numbers, and hyphens.")]
string m_ServiceType;
/// <summary>
/// The name for this network service.
/// See <a href="https://developer.apple.com/documentation/multipeerconnectivity/mcnearbyserviceadvertiser">MCNearbyServiceAdvertiser</a>
/// for the purpose of and restrictions on this name.
/// </summary>
public string serviceType
[RequireComponent(typeof(ARSession))]
public class CollaborativeSession : MonoBehaviour
get { return m_ServiceType; }
set { m_ServiceType = value; }
}
[SerializeField]
[Tooltip("The name for this network service. It should be 15 characters or less and can contain ASCII, lowercase letters, numbers, and hyphens.")]
string m_ServiceType;
ARSession m_ARSession;
/// <summary>
/// The name for this network service.
/// See <a href="https://developer.apple.com/documentation/multipeerconnectivity/mcnearbyserviceadvertiser">MCNearbyServiceAdvertiser</a>
/// for the purpose of and restrictions on this name.
/// </summary>
public string serviceType
{
get { return m_ServiceType; }
set { m_ServiceType = value; }
}
void DisableNotSupported(string reason)
{
enabled = false;
Logger.Log(reason);
}
ARSession m_ARSession;
void OnEnable()
{
#if UNITY_IOS && !UNITY_EDITOR
var subsystem = GetSubsystem();
if (!ARKitSessionSubsystem.supportsCollaboration || subsystem == null)
void DisableNotSupported(string reason)
DisableNotSupported("Collaborative sessions require iOS 13.");
return;
enabled = false;
Logger.Log(reason);
subsystem.collaborationRequested = true;
m_MCSession.Enabled = true;
#else
DisableNotSupported("Collaborative sessions are an ARKit 3 feature; This platform does not support them.");
#endif
}
void OnEnable()
{
#if UNITY_IOS && !UNITY_EDITOR
var subsystem = GetSubsystem();
if (!ARKitSessionSubsystem.supportsCollaboration || subsystem == null)
{
DisableNotSupported("Collaborative sessions require iOS 13.");
return;
}
#if UNITY_IOS && !UNITY_EDITOR
MCSession m_MCSession;
subsystem.collaborationRequested = true;
m_MCSession.Enabled = true;
#else
DisableNotSupported("Collaborative sessions are an ARKit 3 feature; This platform does not support them.");
#endif
}
ARKitSessionSubsystem GetSubsystem()
{
if (m_ARSession == null)
return null;
#if UNITY_IOS && !UNITY_EDITOR
MCSession m_MCSession;
return m_ARSession.subsystem as ARKitSessionSubsystem;
}
ARKitSessionSubsystem GetSubsystem()
{
if (m_ARSession == null)
return null;
void Awake()
{
m_ARSession = GetComponent<ARSession>();
m_MCSession = new MCSession(SystemInfo.deviceName, m_ServiceType);
}
return m_ARSession.subsystem as ARKitSessionSubsystem;
}
void OnDisable()
{
m_MCSession.Enabled = false;
void Awake()
{
m_ARSession = GetComponent<ARSession>();
m_MCSession = new MCSession(SystemInfo.deviceName, m_ServiceType);
}
var subsystem = GetSubsystem();
if (subsystem != null)
subsystem.collaborationRequested = false;
}
void OnDisable()
{
m_MCSession.Enabled = false;
void Update()
{
var subsystem = GetSubsystem();
if (subsystem == null)
return;
var subsystem = GetSubsystem();
if (subsystem != null)
subsystem.collaborationRequested = false;
}
// Check for new collaboration data
while (subsystem.collaborationDataCount > 0)
void Update()
using (var collaborationData = subsystem.DequeueCollaborationData())
var subsystem = GetSubsystem();
if (subsystem == null)
return;
// Check for new collaboration data
while (subsystem.collaborationDataCount > 0)
CollaborationNetworkingIndicator.NotifyHasCollaborationData();
using (var collaborationData = subsystem.DequeueCollaborationData())
{
CollaborationNetworkingIndicator.NotifyHasCollaborationData();
if (m_MCSession.ConnectedPeerCount == 0)
continue;
if (m_MCSession.ConnectedPeerCount == 0)
continue;
using (var serializedData = collaborationData.ToSerialized())
using (var data = NSData.CreateWithBytesNoCopy(serializedData.bytes))
{
m_MCSession.SendToAllPeers(data, collaborationData.priority == ARCollaborationDataPriority.Critical
? MCSessionSendDataMode.Reliable
: MCSessionSendDataMode.Unreliable);
using (var serializedData = collaborationData.ToSerialized())
using (var data = NSData.CreateWithBytesNoCopy(serializedData.bytes))
{
m_MCSession.SendToAllPeers(data, collaborationData.priority == ARCollaborationDataPriority.Critical
? MCSessionSendDataMode.Reliable
: MCSessionSendDataMode.Unreliable);
CollaborationNetworkingIndicator.NotifyOutgoingDataSent();
CollaborationNetworkingIndicator.NotifyOutgoingDataSent();
// Only log 'critical' data as 'optional' data tends to come every frame
if (collaborationData.priority == ARCollaborationDataPriority.Critical)
{
Logger.Log($"Sent {data.Length} bytes of collaboration data.");
// Only log 'critical' data as 'optional' data tends to come every frame
if (collaborationData.priority == ARCollaborationDataPriority.Critical)
{
Logger.Log($"Sent {data.Length} bytes of collaboration data.");
}
}
// Check for incoming data
while (m_MCSession.ReceivedDataQueueSize > 0)
{
CollaborationNetworkingIndicator.NotifyIncomingDataReceived();
// Check for incoming data
while (m_MCSession.ReceivedDataQueueSize > 0)
{
CollaborationNetworkingIndicator.NotifyIncomingDataReceived();
using (var data = m_MCSession.DequeueReceivedData())
using (var collaborationData = new ARCollaborationData(data.Bytes))
{
if (collaborationData.valid)
using (var data = m_MCSession.DequeueReceivedData())
using (var collaborationData = new ARCollaborationData(data.Bytes))
subsystem.UpdateWithCollaborationData(collaborationData);
if (collaborationData.priority == ARCollaborationDataPriority.Critical)
if (collaborationData.valid)
{
subsystem.UpdateWithCollaborationData(collaborationData);
if (collaborationData.priority == ARCollaborationDataPriority.Critical)
{
Logger.Log($"Received {data.Bytes.Length} bytes of collaboration data.");
}
}
else
Logger.Log($"Received {data.Bytes.Length} bytes of collaboration data.");
Logger.Log($"Received {data.Bytes.Length} bytes from remote, but the collaboration data was not valid.");
}
else
{
Logger.Log($"Received {data.Bytes.Length} bytes from remote, but the collaboration data was not valid.");
}
void OnDestroy()
{
m_MCSession.Dispose();
void OnDestroy()
{
m_MCSession.Dispose();
}
#endif
#endif
}
}

57
Assets/Scenes/ARCollaborationData/DisplayTrackingState.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// When relocalizing with ARCollaborationData or ARWorldMaps, the tracking state
/// should change to TrackingState.Limited until the device has successfully
/// relocalized to the new data. If it remains TrackingState.Tracking, then
/// it is not working.
/// </summary>
[RequireComponent(typeof(ARSession))]
public class DisplayTrackingState : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Text m_Text;
public Text text
/// <summary>
/// When relocalizing with ARCollaborationData or ARWorldMaps, the tracking state
/// should change to TrackingState.Limited until the device has successfully
/// relocalized to the new data. If it remains TrackingState.Tracking, then
/// it is not working.
/// </summary>
[RequireComponent(typeof(ARSession))]
public class DisplayTrackingState : MonoBehaviour
get { return m_Text; }
set { m_Text = value; }
}
[SerializeField]
Text m_Text;
public Text text
{
get { return m_Text; }
set { m_Text = value; }
}
ARSession m_Session;
ARSession m_Session;
void Start()
{
m_Session = GetComponent<ARSession>();
}
void Start()
{
m_Session = GetComponent<ARSession>();
}
void Update()
{
if (text != null)
void Update()
text.text = $"Session ID = {m_Session.subsystem.sessionId}\n" +
$"Session state = {ARSession.state.ToString()}\n" +
$"Tracking state = {m_Session.subsystem.trackingState}";
if (text != null)
{
text.text = $"Session ID = {m_Session.subsystem.sessionId}\n" +
$"Session state = {ARSession.state.ToString()}\n" +
$"Tracking state = {m_Session.subsystem.trackingState}";
}
}
}

243
Assets/Scenes/ARKitCoachingOverlay/ARKitCoachingOverlay.cs


using UnityEngine.XR.ARKit;
#endif
/// <summary>
/// This example shows how to activate the [ARCoachingOverlayView](https://developer.apple.com/documentation/arkit/arcoachingoverlayview)
/// </summary>
[RequireComponent(typeof(ARSession))]
public class ARKitCoachingOverlay : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
// Duplicate the ARCoachingGoal enum so that we can use it on a serialized field
enum CoachingGoal
/// <summary>
/// This example shows how to activate the [ARCoachingOverlayView](https://developer.apple.com/documentation/arkit/arcoachingoverlayview)
/// </summary>
[RequireComponent(typeof(ARSession))]
public class ARKitCoachingOverlay : MonoBehaviour
Tracking,
HorizontalPlane,
VerticalPlane,
AnyPlane
}
// Duplicate the ARCoachingGoal enum so that we can use it on a serialized field
enum CoachingGoal
{
Tracking,
HorizontalPlane,
VerticalPlane,
AnyPlane
}
[SerializeField]
[Tooltip("The coaching goal associated with the coaching overlay.")]
#if !UNITY_IOS
#pragma warning disable CS0414
#endif
CoachingGoal m_Goal = CoachingGoal.Tracking;
#if !UNITY_IOS
#pragma warning restore CS0414
#endif
[SerializeField]
[Tooltip("The coaching goal associated with the coaching overlay.")]
#if !UNITY_IOS
#pragma warning disable CS0414
#endif
CoachingGoal m_Goal = CoachingGoal.Tracking;
#if !UNITY_IOS
#pragma warning restore CS0414
#endif
#if UNITY_IOS
/// <summary>
/// The [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal) associated with the coaching overlay
/// </summary>
public ARCoachingGoal goal
{
get
#if UNITY_IOS
/// <summary>
/// The [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal) associated with the coaching overlay
/// </summary>
public ARCoachingGoal goal
if (GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
get
return sessionSubsystem.requestedCoachingGoal;
if (GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
return sessionSubsystem.requestedCoachingGoal;
}
else
{
return (ARCoachingGoal)m_Goal;
}
else
set
return (ARCoachingGoal)m_Goal;
m_Goal = (CoachingGoal)value;
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.requestedCoachingGoal = value;
}
#endif
set
[SerializeField]
[Tooltip("Whether the coaching overlay activates automatically.")]
bool m_ActivatesAutomatically = true;
/// <summary>
/// Whether the coaching overlay activates automatically
/// </summary>
public bool activatesAutomatically
m_Goal = (CoachingGoal)value;
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
get
{
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
return sessionSubsystem.coachingActivatesAutomatically;
}
#endif
return m_ActivatesAutomatically;
}
set
sessionSubsystem.requestedCoachingGoal = value;
m_ActivatesAutomatically = value;
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.coachingActivatesAutomatically = value;
}
#endif
}
#endif
[SerializeField]
[Tooltip("Whether the coaching overlay activates automatically.")]
bool m_ActivatesAutomatically = true;
/// <summary>
/// Whether the coaching overlay activates automatically
/// </summary>
public bool activatesAutomatically
{
get
/// <summary>
/// Whether the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal) is supported.
/// </summary>
public bool supported
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
get
return sessionSubsystem.coachingActivatesAutomatically;
#if UNITY_IOS
return ARKitSessionSubsystem.coachingOverlaySupported;
#else
return false;
#endif
#endif
return m_ActivatesAutomatically;
set
void OnEnable()
m_ActivatesAutomatically = value;
#if UNITY_IOS
#if UNITY_IOS
sessionSubsystem.coachingActivatesAutomatically = value;
sessionSubsystem.requestedCoachingGoal = (ARCoachingGoal)m_Goal;
sessionSubsystem.coachingActivatesAutomatically = m_ActivatesAutomatically;
}
else
#endif
{
Debug.LogError("ARCoachingOverlayView is not supported by this device.");
#endif
}
/// <summary>
/// Whether the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal) is supported.
/// </summary>
public bool supported
{
get
/// <summary>
/// Activates the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal)
/// </summary>
/// <param name="animated">If <c>true</c>, the coaching overlay is animated, e.g. fades in. If <c>false</c>, the coaching overlay appears instantly, without any transition.</param>
public void ActivateCoaching(bool animated)
#if UNITY_IOS
return ARKitSessionSubsystem.coachingOverlaySupported;
#else
return false;
#endif
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.SetCoachingActive(true, animated ? ARCoachingOverlayTransition.Animated : ARCoachingOverlayTransition.Instant);
}
else
#endif
{
throw new NotSupportedException("ARCoachingOverlay is not supported");
}
}
void OnEnable()
{
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.requestedCoachingGoal = (ARCoachingGoal)m_Goal;
sessionSubsystem.coachingActivatesAutomatically = m_ActivatesAutomatically;
}
else
#endif
{
Debug.LogError("ARCoachingOverlayView is not supported by this device.");
}
}
/// <summary>
/// Activates the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal)
/// </summary>
/// <param name="animated">If <c>true</c>, the coaching overlay is animated, e.g. fades in. If <c>false</c>, the coaching overlay appears instantly, without any transition.</param>
public void ActivateCoaching(bool animated)
{
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.SetCoachingActive(true, animated ? ARCoachingOverlayTransition.Animated : ARCoachingOverlayTransition.Instant);
}
else
#endif
{
throw new NotSupportedException("ARCoachingOverlay is not supported");
}
}
/// <summary>
/// Disables the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal)
/// </summary>
/// <param name="animated">If <c>true</c>, the coaching overlay is animated, e.g. fades out. If <c>false</c>, the coaching overlay disappears instantly, without any transition.</param>
public void DisableCoaching(bool animated)
{
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.SetCoachingActive(false, animated ? ARCoachingOverlayTransition.Animated : ARCoachingOverlayTransition.Instant);
}
else
#endif
/// <summary>
/// Disables the [ARCoachingGoal](https://developer.apple.com/documentation/arkit/arcoachinggoal)
/// </summary>
/// <param name="animated">If <c>true</c>, the coaching overlay is animated, e.g. fades out. If <c>false</c>, the coaching overlay disappears instantly, without any transition.</param>
public void DisableCoaching(bool animated)
throw new NotSupportedException("ARCoachingOverlay is not supported");
#if UNITY_IOS
if (supported && GetComponent<ARSession>().subsystem is ARKitSessionSubsystem sessionSubsystem)
{
sessionSubsystem.SetCoachingActive(false, animated ? ARCoachingOverlayTransition.Animated : ARCoachingOverlayTransition.Instant);
}
else
#endif
{
throw new NotSupportedException("ARCoachingOverlay is not supported");
}
}
}

53
Assets/Scenes/CameraGrain/CameraGrain.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
public class CameraGrain: MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ARCameraManager m_CameraManager;
public ARCameraManager cameraManager
public class CameraGrain: MonoBehaviour
get { return m_CameraManager; }
set { m_CameraManager = value; }
}
[SerializeField]
ARCameraManager m_CameraManager;
public ARCameraManager cameraManager
{
get { return m_CameraManager; }
set { m_CameraManager = value; }
}
Renderer m_Renderer;
Renderer m_Renderer;
void Start()
{
if(m_CameraManager == null)
void Start()
m_CameraManager = FindObjectOfType (typeof(ARCameraManager)) as ARCameraManager;
if(m_CameraManager == null)
{
m_CameraManager = FindObjectOfType (typeof(ARCameraManager)) as ARCameraManager;
}
m_Renderer = GetComponent<Renderer>();
m_CameraManager.frameReceived += OnReceivedFrame;
m_Renderer = GetComponent<Renderer>();
m_CameraManager.frameReceived += OnReceivedFrame;
}
void OnDisable()
{
m_CameraManager.frameReceived -= OnReceivedFrame;
}
void OnReceivedFrame(ARCameraFrameEventArgs eventArgs){
if(m_Renderer != null && eventArgs.cameraGrainTexture != null)
void OnDisable()
m_Renderer.material.SetTexture("_NoiseTex", eventArgs.cameraGrainTexture);
m_Renderer.material.SetFloat("_NoiseIntensity", eventArgs.noiseIntensity);
m_CameraManager.frameReceived -= OnReceivedFrame;
}
void OnReceivedFrame(ARCameraFrameEventArgs eventArgs){
if(m_Renderer != null && eventArgs.cameraGrainTexture != null)
{
m_Renderer.material.SetTexture("_NoiseTex", eventArgs.cameraGrainTexture);
m_Renderer.material.SetFloat("_NoiseIntensity", eventArgs.noiseIntensity);
}
}
}
}

235
Assets/Scenes/FaceTracking/DisplayFaceInfo.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
[RequireComponent(typeof(ARSession))]
[RequireComponent(typeof(ARFaceManager))]
[RequireComponent(typeof(ARSessionOrigin))]
public class DisplayFaceInfo : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Text m_FaceInfoText;
public Text faceInfoText
[RequireComponent(typeof(ARSession))]
[RequireComponent(typeof(ARFaceManager))]
[RequireComponent(typeof(ARSessionOrigin))]
public class DisplayFaceInfo : MonoBehaviour
get => m_FaceInfoText;
set => m_FaceInfoText = value;
}
[SerializeField]
Text m_FaceInfoText;
[SerializeField]
Text m_InstructionsText;
public Text faceInfoText
{
get => m_FaceInfoText;
set => m_FaceInfoText = value;
}
public Text instructionsText
{
get => m_InstructionsText;
set => m_InstructionsText = value;
}
[SerializeField]
Text m_InstructionsText;
[SerializeField]
GameObject m_NotSupportedElement;
public Text instructionsText
{
get => m_InstructionsText;
set => m_InstructionsText = value;
}
public GameObject notSupportedElement
{
get => m_NotSupportedElement;
set => m_NotSupportedElement = value;
}
[SerializeField]
GameObject m_NotSupportedElement;
[SerializeField]
[Tooltip("An object whose rotation will be set according to the tracked face.")]
Transform m_FaceControlledObject;
public GameObject notSupportedElement
{
get => m_NotSupportedElement;
set => m_NotSupportedElement = value;
}
public Transform faceControlledObject
{
get => m_FaceControlledObject;
set => m_FaceControlledObject = value;
}
[SerializeField]
[Tooltip("An object whose rotation will be set according to the tracked face.")]
Transform m_FaceControlledObject;
ARSession m_Session;
public Transform faceControlledObject
{
get => m_FaceControlledObject;
set => m_FaceControlledObject = value;
}
ARFaceManager m_FaceManager;
ARSession m_Session;
ARCameraManager m_CameraManager;
ARFaceManager m_FaceManager;
StringBuilder m_Info = new StringBuilder();
ARCameraManager m_CameraManager;
bool m_FaceTrackingSupported;
StringBuilder m_Info = new StringBuilder();
bool m_FaceTrackingWithWorldCameraSupported;
bool m_FaceTrackingSupported;
void Awake()
{
m_FaceManager = GetComponent<ARFaceManager>();
m_Session = GetComponent<ARSession>();
m_CameraManager = GetComponent<ARSessionOrigin>().camera?.GetComponent<ARCameraManager>();
}
bool m_FaceTrackingWithWorldCameraSupported;
void OnEnable()
{
Application.onBeforeRender += OnBeforeRender;
void Awake()
{
m_FaceManager = GetComponent<ARFaceManager>();
m_Session = GetComponent<ARSession>();
m_CameraManager = GetComponent<ARSessionOrigin>().camera?.GetComponent<ARCameraManager>();
}
// Detect face tracking with world-facing camera support
var subsystem = m_Session?.subsystem;
if (subsystem != null)
void OnEnable()
var configs = subsystem.GetConfigurationDescriptors(Allocator.Temp);
if (configs.IsCreated)
Application.onBeforeRender += OnBeforeRender;
// Detect face tracking with world-facing camera support
var subsystem = m_Session?.subsystem;
if (subsystem != null)
using (configs)
var configs = subsystem.GetConfigurationDescriptors(Allocator.Temp);
if (configs.IsCreated)
foreach (var config in configs)
using (configs)
if (config.capabilities.All(Feature.FaceTracking))
foreach (var config in configs)
m_FaceTrackingSupported = true;
}
if (config.capabilities.All(Feature.FaceTracking))
{
m_FaceTrackingSupported = true;
}
if (config.capabilities.All(Feature.WorldFacingCamera | Feature.FaceTracking))
{
m_FaceTrackingWithWorldCameraSupported = true;
if (config.capabilities.All(Feature.WorldFacingCamera | Feature.FaceTracking))
{
m_FaceTrackingWithWorldCameraSupported = true;
}
}
void OnDisable()
{
Application.onBeforeRender -= OnBeforeRender;
}
void OnBeforeRender()
{
if (m_FaceControlledObject == null)
return;
void OnDisable()
{
Application.onBeforeRender -= OnBeforeRender;
}
foreach (var face in m_FaceManager.trackables)
void OnBeforeRender()
if (face.trackingState == TrackingState.Tracking)
if (m_FaceControlledObject == null)
return;
foreach (var face in m_FaceManager.trackables)
m_FaceControlledObject.transform.rotation = face.transform.rotation;
var camera = m_CameraManager.GetComponent<Camera>();
m_FaceControlledObject.transform.position = camera.transform.position + camera.transform.forward * 0.5f;
if (face.trackingState == TrackingState.Tracking)
{
m_FaceControlledObject.transform.rotation = face.transform.rotation;
var camera = m_CameraManager.GetComponent<Camera>();
m_FaceControlledObject.transform.position = camera.transform.position + camera.transform.forward * 0.5f;
}
}
void Update()
{
m_Info.Clear();
void Update()
{
m_Info.Clear();
if (m_FaceManager.subsystem != null)
{
m_Info.Append($"Supported number of tracked faces: {m_FaceManager.supportedFaceCount}\n");
m_Info.Append($"Max number of faces to track: {m_FaceManager.currentMaximumFaceCount}\n");
m_Info.Append($"Number of tracked faces: {m_FaceManager.trackables.count}\n");
}
if (m_FaceManager.subsystem != null)
{
m_Info.Append($"Supported number of tracked faces: {m_FaceManager.supportedFaceCount}\n");
m_Info.Append($"Max number of faces to track: {m_FaceManager.currentMaximumFaceCount}\n");
m_Info.Append($"Number of tracked faces: {m_FaceManager.trackables.count}\n");
}
if (m_CameraManager)
{
m_Info.Append($"Requested camera facing direction: {m_CameraManager.requestedFacingDirection}\n");
m_Info.Append($"Current camera facing direction: {m_CameraManager.currentFacingDirection}\n");
}
if (m_CameraManager)
{
m_Info.Append($"Requested camera facing direction: {m_CameraManager.requestedFacingDirection}\n");
m_Info.Append($"Current camera facing direction: {m_CameraManager.currentFacingDirection}\n");
}
m_Info.Append($"Requested tracking mode: {m_Session.requestedTrackingMode}\n");
m_Info.Append($"Current tracking mode: {m_Session.currentTrackingMode}\n");
m_Info.Append($"Requested tracking mode: {m_Session.requestedTrackingMode}\n");
m_Info.Append($"Current tracking mode: {m_Session.currentTrackingMode}\n");
if (!m_FaceTrackingSupported)
{
if (m_InstructionsText)
if (!m_FaceTrackingSupported)
m_InstructionsText.text = "Face tracking is not supported.\n";
if (m_InstructionsText)
{
m_InstructionsText.text = "Face tracking is not supported.\n";
}
else
{
m_Info.Append("Face tracking is not supported.\n");
}
else
else if (m_CameraManager.requestedFacingDirection == CameraFacingDirection.World && !m_FaceTrackingWithWorldCameraSupported)
m_Info.Append("Face tracking is not supported.\n");
m_Info.Append("Face tracking in world facing camera mode is not supported.\n");
}
else if (m_CameraManager.requestedFacingDirection == CameraFacingDirection.World && !m_FaceTrackingWithWorldCameraSupported)
{
m_Info.Append("Face tracking in world facing camera mode is not supported.\n");
}
if (m_FaceControlledObject)
{
m_FaceControlledObject.gameObject.SetActive(m_CameraManager.currentFacingDirection == CameraFacingDirection.World);
}
if (m_FaceControlledObject)
{
m_FaceControlledObject.gameObject.SetActive(m_CameraManager.currentFacingDirection == CameraFacingDirection.World);
}
if (m_NotSupportedElement)
{
m_NotSupportedElement.SetActive(m_CameraManager.requestedFacingDirection == CameraFacingDirection.World && !m_FaceTrackingWithWorldCameraSupported);
}
if (m_NotSupportedElement)
{
m_NotSupportedElement.SetActive(m_CameraManager.requestedFacingDirection == CameraFacingDirection.World && !m_FaceTrackingWithWorldCameraSupported);
}
if (m_FaceInfoText)
{
m_FaceInfoText.text = m_Info.ToString();
if (m_FaceInfoText)
{
m_FaceInfoText.text = m_Info.ToString();
}
}
}

2
Assets/Scenes/FaceTracking/EyeLasers.unity


m_LightEstimationMode: -1
m_AutoFocus: 1
m_LightEstimation: 0
m_FacingDirection: 1
m_FacingDirection: 2
--- !u!1 &393820500
GameObject:
m_ObjectHideFlags: 0

57
Assets/Scenes/FaceTracking/ToggleCameraFacingDirection.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
public class ToggleCameraFacingDirection : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ARCameraManager m_CameraManager;
public ARCameraManager cameraManager
public class ToggleCameraFacingDirection : MonoBehaviour
get => m_CameraManager;
set => m_CameraManager = value;
}
[SerializeField]
ARCameraManager m_CameraManager;
[SerializeField]
ARSession m_Session;
public ARCameraManager cameraManager
{
get => m_CameraManager;
set => m_CameraManager = value;
}
public ARSession session
{
get => m_Session;
set => m_Session = value;
}
[SerializeField]
ARSession m_Session;
void Update()
{
if (m_CameraManager == null || m_Session == null)
return;
public ARSession session
{
get => m_Session;
set => m_Session = value;
}
if (Input.touchCount > 0 && Input.touches[0].phase == TouchPhase.Began)
void Update()
if (m_CameraManager.requestedFacingDirection == CameraFacingDirection.User)
{
m_CameraManager.requestedFacingDirection = CameraFacingDirection.World;
}
else
if (m_CameraManager == null || m_Session == null)
return;
if (Input.touchCount > 0 && Input.touches[0].phase == TouchPhase.Began)
m_CameraManager.requestedFacingDirection = CameraFacingDirection.User;
if (m_CameraManager.requestedFacingDirection == CameraFacingDirection.User)
{
m_CameraManager.requestedFacingDirection = CameraFacingDirection.World;
}
else
{
m_CameraManager.requestedFacingDirection = CameraFacingDirection.User;
}
}
}

301
Assets/Scenes/ImageTracking/DynamicLibrary.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Adds images to the reference library at runtime.
/// </summary>
[RequireComponent(typeof(ARTrackedImageManager))]
public class DynamicLibrary : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Serializable]
public class ImageData
/// <summary>
/// Adds images to the reference library at runtime.
/// </summary>
[RequireComponent(typeof(ARTrackedImageManager))]
public class DynamicLibrary : MonoBehaviour
[SerializeField, Tooltip("The source texture for the image. Must be marked as readable.")]
Texture2D m_Texture;
public Texture2D texture
[Serializable]
public class ImageData
get => m_Texture;
set => m_Texture = value;
[SerializeField, Tooltip("The source texture for the image. Must be marked as readable.")]
Texture2D m_Texture;
public Texture2D texture
{
get => m_Texture;
set => m_Texture = value;
}
[SerializeField, Tooltip("The name for this image.")]
string m_Name;
public string name
{
get => m_Name;
set => m_Name = value;
}
[SerializeField, Tooltip("The width, in meters, of the image in the real world.")]
float m_Width;
public float width
{
get => m_Width;
set => m_Width = value;
}
public JobHandle jobHandle { get; set; }
[SerializeField, Tooltip("The name for this image.")]
string m_Name;
[SerializeField, Tooltip("The set of images to add to the image library at runtime")]
ImageData[] m_Images;
public string name
/// <summary>
/// The set of images to add to the image library at runtime
/// </summary>
public ImageData[] images
get => m_Name;
set => m_Name = value;
get => m_Images;
set => m_Images = value;
[SerializeField, Tooltip("The width, in meters, of the image in the real world.")]
float m_Width;
public float width
enum State
get => m_Width;
set => m_Width = value;
NoImagesAdded,
AddImagesRequested,
AddingImages,
Done,
Error
public JobHandle jobHandle { get; set; }
}
[SerializeField, Tooltip("The set of images to add to the image library at runtime")]
ImageData[] m_Images;
State m_State;
string m_ErrorMessage = "";
/// <summary>
/// The set of images to add to the image library at runtime
/// </summary>
public ImageData[] images
{
get => m_Images;
set => m_Images = value;
}
StringBuilder m_StringBuilder = new StringBuilder();
enum State
{
NoImagesAdded,
AddImagesRequested,
AddingImages,
Done,
Error
}
void OnGUI()
{
var fontSize = 50;
GUI.skin.button.fontSize = fontSize;
GUI.skin.label.fontSize = fontSize;
State m_State;
float margin = 50;
string m_ErrorMessage = "";
StringBuilder m_StringBuilder = new StringBuilder();
void OnGUI()
{
var fontSize = 50;
GUI.skin.button.fontSize = fontSize;
GUI.skin.label.fontSize = fontSize;
GUILayout.BeginArea(new Rect(margin, margin, Screen.width - margin * 2, Screen.height - margin * 2));
float margin = 50;
GUILayout.BeginArea(new Rect(margin, margin, Screen.width - margin * 2, Screen.height - margin * 2));
switch (m_State)
{
case State.NoImagesAdded:
switch (m_State)
if (GUILayout.Button("Add images"))
case State.NoImagesAdded:
m_State = State.AddImagesRequested;
if (GUILayout.Button("Add images"))
{
m_State = State.AddImagesRequested;
}
break;
break;
}
case State.AddingImages:
{
m_StringBuilder.Clear();
m_StringBuilder.AppendLine("Add image status:");
foreach (var image in m_Images)
case State.AddingImages:
m_StringBuilder.AppendLine($"\t{image.name}: {(image.jobHandle.IsCompleted ? "done" : "pending")}");
m_StringBuilder.Clear();
m_StringBuilder.AppendLine("Add image status:");
foreach (var image in m_Images)
{
m_StringBuilder.AppendLine($"\t{image.name}: {(image.jobHandle.IsCompleted ? "done" : "pending")}");
}
GUILayout.Label(m_StringBuilder.ToString());
break;
GUILayout.Label(m_StringBuilder.ToString());
break;
}
case State.Done:
{
GUILayout.Label("All images added");
break;
}
case State.Error:
{
GUILayout.Label(m_ErrorMessage);
break;
}
}
GUILayout.EndArea();
}
void SetError(string errorMessage)
{
m_State = State.Error;
m_ErrorMessage = $"Error: {errorMessage}";
}
void Update()
{
switch (m_State)
{
case State.AddImagesRequested:
{
if (m_Images == null)
case State.Done:
SetError("No images to add.");
GUILayout.Label("All images added");
var manager = GetComponent<ARTrackedImageManager>();
if (manager == null)
case State.Error:
SetError($"No {nameof(ARTrackedImageManager)} available.");
GUILayout.Label(m_ErrorMessage);
// You can either add raw image bytes or use the extension method (used below) which accepts
// a texture. To use a texture, however, its import settings must have enabled read/write
// access to the texture.
foreach (var image in m_Images)
}
GUILayout.EndArea();
}
void SetError(string errorMessage)
{
m_State = State.Error;
m_ErrorMessage = $"Error: {errorMessage}";
}
void Update()
{
switch (m_State)
{
case State.AddImagesRequested:
if (!image.texture.isReadable)
if (m_Images == null)
SetError($"Image {image.name} must be readable to be added to the image library.");
SetError("No images to add.");
}
if (manager.referenceLibrary is MutableRuntimeReferenceImageLibrary mutableLibrary)
{
try
var manager = GetComponent<ARTrackedImageManager>();
if (manager == null)
foreach (var image in m_Images)
SetError($"No {nameof(ARTrackedImageManager)} available.");
break;
}
// You can either add raw image bytes or use the extension method (used below) which accepts
// a texture. To use a texture, however, its import settings must have enabled read/write
// access to the texture.
foreach (var image in m_Images)
{
if (!image.texture.isReadable)
// Note: You do not need to do anything with the returned JobHandle, but it can be
// useful if you want to know when the image has been added to the library since it may
// take several frames.
image.jobHandle = mutableLibrary.ScheduleAddImageJob(image.texture, image.name, image.width);
SetError($"Image {image.name} must be readable to be added to the image library.");
break;
}
m_State = State.AddingImages;
if (manager.referenceLibrary is MutableRuntimeReferenceImageLibrary mutableLibrary)
{
try
{
foreach (var image in m_Images)
{
// Note: You do not need to do anything with the returned JobHandle, but it can be
// useful if you want to know when the image has been added to the library since it may
// take several frames.
image.jobHandle = mutableLibrary.ScheduleAddImageJob(image.texture, image.name, image.width);
}
m_State = State.AddingImages;
}
catch (InvalidOperationException e)
{
SetError($"ScheduleAddImageJob threw exception: {e.Message}");
}
catch (InvalidOperationException e)
else
SetError($"ScheduleAddImageJob threw exception: {e.Message}");
SetError($"The reference image library is not mutable.");
break;
else
case State.AddingImages:
SetError($"The reference image library is not mutable.");
}
// Check for completion
var done = true;
foreach (var image in m_Images)
{
if (!image.jobHandle.IsCompleted)
{
done = false;
break;
}
}
break;
}
case State.AddingImages:
{
// Check for completion
var done = true;
foreach (var image in m_Images)
{
if (!image.jobHandle.IsCompleted)
if (done)
done = false;
break;
m_State = State.Done;
}
if (done)
{
m_State = State.Done;
break;
break;
}
}

167
Assets/Scenes/ImageTracking/TrackedImageInfoManager.cs


using UnityEngine.XR.ARSubsystems;
using UnityEngine.XR.ARFoundation;
/// This component listens for images detected by the <c>XRImageTrackingSubsystem</c>
/// and overlays some information as well as the source Texture2D on top of the
/// detected image.
/// </summary>
[RequireComponent(typeof(ARTrackedImageManager))]
public class TrackedImageInfoManager : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The camera to set on the world space UI canvas for each instantiated image info.")]
Camera m_WorldSpaceCanvasCamera;
/// <summary>
/// The prefab has a world space UI canvas,
/// which requires a camera to function properly.
/// This component listens for images detected by the <c>XRImageTrackingSubsystem</c>
/// and overlays some information as well as the source Texture2D on top of the
/// detected image.
public Camera worldSpaceCanvasCamera
[RequireComponent(typeof(ARTrackedImageManager))]
public class TrackedImageInfoManager : MonoBehaviour
get { return m_WorldSpaceCanvasCamera; }
set { m_WorldSpaceCanvasCamera = value; }
}
[SerializeField]
[Tooltip("The camera to set on the world space UI canvas for each instantiated image info.")]
Camera m_WorldSpaceCanvasCamera;
[SerializeField]
[Tooltip("If an image is detected but no source texture can be found, this texture is used instead.")]
Texture2D m_DefaultTexture;
/// <summary>
/// The prefab has a world space UI canvas,
/// which requires a camera to function properly.
/// </summary>
public Camera worldSpaceCanvasCamera
{
get { return m_WorldSpaceCanvasCamera; }
set { m_WorldSpaceCanvasCamera = value; }
}
/// <summary>
/// If an image is detected but no source texture can be found,
/// this texture is used instead.
/// </summary>
public Texture2D defaultTexture
{
get { return m_DefaultTexture; }
set { m_DefaultTexture = value; }
}
[SerializeField]
[Tooltip("If an image is detected but no source texture can be found, this texture is used instead.")]
Texture2D m_DefaultTexture;
ARTrackedImageManager m_TrackedImageManager;
/// <summary>
/// If an image is detected but no source texture can be found,
/// this texture is used instead.
/// </summary>
public Texture2D defaultTexture
{
get { return m_DefaultTexture; }
set { m_DefaultTexture = value; }
}
void Awake()
{
m_TrackedImageManager = GetComponent<ARTrackedImageManager>();
}
ARTrackedImageManager m_TrackedImageManager;
void Awake()
{
m_TrackedImageManager = GetComponent<ARTrackedImageManager>();
}
void OnEnable()
{
m_TrackedImageManager.trackedImagesChanged += OnTrackedImagesChanged;
}
void OnEnable()
{
m_TrackedImageManager.trackedImagesChanged += OnTrackedImagesChanged;
}
void OnDisable()
{
m_TrackedImageManager.trackedImagesChanged -= OnTrackedImagesChanged;
}
void OnDisable()
{
m_TrackedImageManager.trackedImagesChanged -= OnTrackedImagesChanged;
}
void UpdateInfo(ARTrackedImage trackedImage)
{
// Set canvas camera
var canvas = trackedImage.GetComponentInChildren<Canvas>();
canvas.worldCamera = worldSpaceCanvasCamera;
void UpdateInfo(ARTrackedImage trackedImage)
{
// Set canvas camera
var canvas = trackedImage.GetComponentInChildren<Canvas>();
canvas.worldCamera = worldSpaceCanvasCamera;
// Update information about the tracked image
var text = canvas.GetComponentInChildren<Text>();
text.text = string.Format(
"{0}\ntrackingState: {1}\nGUID: {2}\nReference size: {3} cm\nDetected size: {4} cm",
trackedImage.referenceImage.name,
trackedImage.trackingState,
trackedImage.referenceImage.guid,
trackedImage.referenceImage.size * 100f,
trackedImage.size * 100f);
// Update information about the tracked image
var text = canvas.GetComponentInChildren<Text>();
text.text = string.Format(
"{0}\ntrackingState: {1}\nGUID: {2}\nReference size: {3} cm\nDetected size: {4} cm",
trackedImage.referenceImage.name,
trackedImage.trackingState,
trackedImage.referenceImage.guid,
trackedImage.referenceImage.size * 100f,
trackedImage.size * 100f);
var planeParentGo = trackedImage.transform.GetChild(0).gameObject;
var planeGo = planeParentGo.transform.GetChild(0).gameObject;
var planeParentGo = trackedImage.transform.GetChild(0).gameObject;
var planeGo = planeParentGo.transform.GetChild(0).gameObject;
// Disable the visual plane if it is not being tracked
if (trackedImage.trackingState != TrackingState.None)
{
planeGo.SetActive(true);
// Disable the visual plane if it is not being tracked
if (trackedImage.trackingState != TrackingState.None)
{
planeGo.SetActive(true);
// The image extents is only valid when the image is being tracked
trackedImage.transform.localScale = new Vector3(trackedImage.size.x, 1f, trackedImage.size.y);
// The image extents is only valid when the image is being tracked
trackedImage.transform.localScale = new Vector3(trackedImage.size.x, 1f, trackedImage.size.y);
// Set the texture
var material = planeGo.GetComponentInChildren<MeshRenderer>().material;
material.mainTexture = (trackedImage.referenceImage.texture == null) ? defaultTexture : trackedImage.referenceImage.texture;
// Set the texture
var material = planeGo.GetComponentInChildren<MeshRenderer>().material;
material.mainTexture = (trackedImage.referenceImage.texture == null) ? defaultTexture : trackedImage.referenceImage.texture;
}
else
{
planeGo.SetActive(false);
}
else
void OnTrackedImagesChanged(ARTrackedImagesChangedEventArgs eventArgs)
planeGo.SetActive(false);
}
}
foreach (var trackedImage in eventArgs.added)
{
// Give the initial image a reasonable default scale
trackedImage.transform.localScale = new Vector3(0.01f, 1f, 0.01f);
void OnTrackedImagesChanged(ARTrackedImagesChangedEventArgs eventArgs)
{
foreach (var trackedImage in eventArgs.added)
{
// Give the initial image a reasonable default scale
trackedImage.transform.localScale = new Vector3(0.01f, 1f, 0.01f);
UpdateInfo(trackedImage);
}
UpdateInfo(trackedImage);
foreach (var trackedImage in eventArgs.updated)
UpdateInfo(trackedImage);
foreach (var trackedImage in eventArgs.updated)
UpdateInfo(trackedImage);
}
}

75
Assets/Scenes/LightEstimation/PlatformSelector.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// On ARKit, HDR light estimation only works in face tracking mode.
/// On ARCore, HDR light estimation only works when NOT in face tracking mode.
/// This script enables face tracking on iOS and disables it otherwise.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARFaceManager))]
public class PlatformSelector : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_WorldSpaceObject;
public GameObject worldSpaceObject
/// <summary>
/// On ARKit, HDR light estimation only works in face tracking mode.
/// On ARCore, HDR light estimation only works when NOT in face tracking mode.
/// This script enables face tracking on iOS and disables it otherwise.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARFaceManager))]
public class PlatformSelector : MonoBehaviour
get => m_WorldSpaceObject;
set => m_WorldSpaceObject = value;
}
[SerializeField]
GameObject m_WorldSpaceObject;
public GameObject worldSpaceObject
{
get => m_WorldSpaceObject;
set => m_WorldSpaceObject = value;
}
void OnEnable()
{
#if UNITY_IOS
GetComponent<ARFaceManager>().enabled = true;
#else
GetComponent<ARFaceManager>().enabled = false;
worldSpaceObject?.SetActive(true);
Application.onBeforeRender += OnBeforeRender;
#endif
}
void OnEnable()
{
#if UNITY_IOS
GetComponent<ARFaceManager>().enabled = true;
#else
GetComponent<ARFaceManager>().enabled = false;
worldSpaceObject?.SetActive(true);
Application.onBeforeRender += OnBeforeRender;
#endif
}
void OnDisable()
{
GetComponent<ARFaceManager>().enabled = false;
Application.onBeforeRender -= OnBeforeRender;
}
void OnDisable()
{
GetComponent<ARFaceManager>().enabled = false;
Application.onBeforeRender -= OnBeforeRender;
}
void OnBeforeRender()
{
var camera = GetComponent<ARSessionOrigin>().camera;
if (camera && worldSpaceObject)
void OnBeforeRender()
worldSpaceObject.transform.position = camera.transform.position + camera.transform.forward;
var camera = GetComponent<ARSessionOrigin>().camera;
if (camera && worldSpaceObject)
{
worldSpaceObject.transform.position = camera.transform.position + camera.transform.forward;
}
}
}

17
Assets/Scenes/LightEstimation/Rotator.cs


using System.Collections.Generic;
using UnityEngine;
public class Rotator : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
float m_Angle;
public class Rotator : MonoBehaviour
{
float m_Angle;
void Update()
{
m_Angle += Time.deltaTime * 10f;
transform.rotation = Quaternion.Euler(m_Angle, m_Angle, m_Angle);
void Update()
{
m_Angle += Time.deltaTime * 10f;
transform.rotation = Quaternion.Euler(m_Angle, m_Angle, m_Angle);
}
}
}

487
Assets/Scenes/Meshing/Scripts/MeshClassificationFracking.cs


using Object = UnityEngine.Object;
public class MeshClassificationFracking : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
/// <summary>
/// The number of mesh classifications detected.
/// </summary>
const int k_NumClassifications = 8;
public class MeshClassificationFracking : MonoBehaviour
{
/// <summary>
/// The number of mesh classifications detected.
/// </summary>
const int k_NumClassifications = 8;
/// <summary>
/// The mesh manager for the scene.
/// </summary>
public ARMeshManager m_MeshManager;
/// <summary>
/// The mesh manager for the scene.
/// </summary>
public ARMeshManager m_MeshManager;
/// <summary>
/// The mesh prefab for the None classification.
/// </summary>
public MeshFilter m_NoneMeshPrefab;
/// <summary>
/// The mesh prefab for the None classification.
/// </summary>
public MeshFilter m_NoneMeshPrefab;
/// <summary>
/// The mesh prefab for the Wall classification.
/// </summary>
public MeshFilter m_WallMeshPrefab;
/// <summary>
/// The mesh prefab for the Floor classification.
/// </summary>
public MeshFilter m_FloorMeshPrefab;
/// <summary>
/// The mesh prefab for the Ceiling classification.
/// </summary>
public MeshFilter m_CeilingMeshPrefab;
/// <summary>
/// The mesh prefab for the Table classification.
/// </summary>
public MeshFilter m_TableMeshPrefab;
/// <summary>
/// The mesh prefab for the Wall classification.
/// </summary>
public MeshFilter m_WallMeshPrefab;
/// <summary>
/// The mesh prefab for the Seat classification.
/// </summary>
public MeshFilter m_SeatMeshPrefab;
/// <summary>
/// The mesh prefab for the Floor classification.
/// </summary>
public MeshFilter m_FloorMeshPrefab;
/// <summary>
/// The mesh prefab for the Window classification.
/// </summary>
public MeshFilter m_WindowMeshPrefab;
/// <summary>
/// The mesh prefab for the Ceiling classification.
/// </summary>
public MeshFilter m_CeilingMeshPrefab;
/// <summary>
/// The mesh prefab for the Door classification.
/// </summary>
public MeshFilter m_DoorMeshPrefab;
/// <summary>
/// The mesh prefab for the Table classification.
/// </summary>
public MeshFilter m_TableMeshPrefab;
#if UNITY_IOS && !UNITY_EDITOR
/// <summary>
/// The mesh prefab for the Seat classification.
/// </summary>
public MeshFilter m_SeatMeshPrefab;
/// <summary>
/// A mapping from tracking ID to instantiated mesh filters.
/// </summary>
readonly Dictionary<TrackableId, MeshFilter[]> m_MeshFrackingMap = new Dictionary<TrackableId, MeshFilter[]>();
/// <summary>
/// The mesh prefab for the Window classification.
/// </summary>
public MeshFilter m_WindowMeshPrefab;
/// <summary>
/// The delegate to call to breakup a mesh.
/// </summary>
Action<MeshFilter> m_BreakupMeshAction;
/// <summary>
/// The mesh prefab for the Door classification.
/// </summary>
public MeshFilter m_DoorMeshPrefab;
/// <summary>
/// The delegate to call to update a mesh.
/// </summary>
Action<MeshFilter> m_UpdateMeshAction;
#if UNITY_IOS && !UNITY_EDITOR
/// <summary>
/// The delegate to call to remove a mesh.
/// </summary>
Action<MeshFilter> m_RemoveMeshAction;
/// <summary>
/// A mapping from tracking ID to instantiated mesh filters.
/// </summary>
readonly Dictionary<TrackableId, MeshFilter[]> m_MeshFrackingMap = new Dictionary<TrackableId, MeshFilter[]>();
/// <summary>
/// An array to store the triangle vertices of the base mesh.
/// </summary>
readonly List<int> m_BaseTriangles = new List<int>();
/// <summary>
/// The delegate to call to breakup a mesh.
/// </summary>
Action<MeshFilter> m_BreakupMeshAction;
/// <summary>
/// An array to store the triangle vertices of the classified mesh.
/// </summary>
readonly List<int> m_ClassifiedTriangles = new List<int>();
/// <summary>
/// The delegate to call to update a mesh.
/// </summary>
Action<MeshFilter> m_UpdateMeshAction;
/// <summary>
/// On awake, set up the mesh filter delegates.
/// </summary>
void Awake()
{
m_BreakupMeshAction = new Action<MeshFilter>(BreakupMesh);
m_UpdateMeshAction = new Action<MeshFilter>(UpdateMesh);
m_RemoveMeshAction = new Action<MeshFilter>(RemoveMesh);
}
/// <summary>
/// The delegate to call to remove a mesh.
/// </summary>
Action<MeshFilter> m_RemoveMeshAction;
/// <summary>
/// On enable, subscribe to the meshes changed event.
/// </summary>
void OnEnable()
{
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
m_MeshManager.meshesChanged += OnMeshesChanged;
}
/// <summary>
/// An array to store the triangle vertices of the base mesh.
/// </summary>
readonly List<int> m_BaseTriangles = new List<int>();
/// <summary>
/// On disable, unsubscribe from the meshes changed event.
/// </summary>
void OnDisable()
{
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
m_MeshManager.meshesChanged -= OnMeshesChanged;
}
/// <summary>
/// An array to store the triangle vertices of the classified mesh.
/// </summary>
readonly List<int> m_ClassifiedTriangles = new List<int>();
/// <summary>
/// When the meshes change, update the scene meshes.
/// </summary>
void OnMeshesChanged(ARMeshesChangedEventArgs args)
{
if (args.added != null)
/// <summary>
/// On awake, set up the mesh filter delegates.
/// </summary>
void Awake()
args.added.ForEach(m_BreakupMeshAction);
m_BreakupMeshAction = new Action<MeshFilter>(BreakupMesh);
m_UpdateMeshAction = new Action<MeshFilter>(UpdateMesh);
m_RemoveMeshAction = new Action<MeshFilter>(RemoveMesh);
if (args.updated != null)
/// <summary>
/// On enable, subscribe to the meshes changed event.
/// </summary>
void OnEnable()
args.updated.ForEach(m_UpdateMeshAction);
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
m_MeshManager.meshesChanged += OnMeshesChanged;
if (args.removed != null)
/// <summary>
/// On disable, unsubscribe from the meshes changed event.
/// </summary>
void OnDisable()
args.removed.ForEach(m_RemoveMeshAction);
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
m_MeshManager.meshesChanged -= OnMeshesChanged;
}
/// <summary>
/// When the meshes change, update the scene meshes.
/// </summary>
void OnMeshesChanged(ARMeshesChangedEventArgs args)
{
if (args.added != null)
{
args.added.ForEach(m_BreakupMeshAction);
}
/// <summary>
/// Parse the trackable ID from the mesh filter name.
/// </summary>
/// <param name="meshFilterName">The mesh filter name containing the trackable ID.</param>
/// <returns>
/// The trackable ID parsed from the string.
/// </returns>
TrackableId ExtractTrackableId(string meshFilterName)
{
string[] nameSplit = meshFilterName.Split(' ');
return new TrackableId(nameSplit[1]);
}
if (args.updated != null)
{
args.updated.ForEach(m_UpdateMeshAction);
}
/// <summary>
/// Given a base mesh, the face classifications for all faces in the mesh, and a single face classification to
/// extract, extract into a new mesh only the faces that have the selected face classification.
/// </summary>
/// <param name="baseMesh">The original base mesh.</param>
/// <param name="faceClassifications">The array of face classifications for each triangle in the
/// <paramref name="baseMesh"/></param>
/// <param name="selectedMeshClassification">A single classification to extract the faces from the
/// <paramref="baseMesh"/>into the <paramref name="classifiedMesh"/></param>
/// <param name="classifiedMesh">The output mesh to be updated with the extracted mesh.</param>
void ExtractClassifiedMesh(Mesh baseMesh, NativeArray<ARMeshClassification> faceClassifications, ARMeshClassification selectedMeshClassification, Mesh classifiedMesh)
{
// Count the number of faces matching the selected classification.
int classifiedFaceCount = 0;
for (int i = 0; i < faceClassifications.Length; ++i)
{
if (faceClassifications[i] == selectedMeshClassification)
if (args.removed != null)
++classifiedFaceCount;
args.removed.ForEach(m_RemoveMeshAction);
// Clear the existing mesh.
classifiedMesh.Clear();
/// <summary>
/// Parse the trackable ID from the mesh filter name.
/// </summary>
/// <param name="meshFilterName">The mesh filter name containing the trackable ID.</param>
/// <returns>
/// The trackable ID parsed from the string.
/// </returns>
TrackableId ExtractTrackableId(string meshFilterName)
{
string[] nameSplit = meshFilterName.Split(' ');
return new TrackableId(nameSplit[1]);
}
// If there were matching face classifications, build a new mesh from the base mesh.
if (classifiedFaceCount > 0)
/// <summary>
/// Given a base mesh, the face classifications for all faces in the mesh, and a single face classification to
/// extract, extract into a new mesh only the faces that have the selected face classification.
/// </summary>
/// <param name="baseMesh">The original base mesh.</param>
/// <param name="faceClassifications">The array of face classifications for each triangle in the
/// <paramref name="baseMesh"/></param>
/// <param name="selectedMeshClassification">A single classification to extract the faces from the
/// <paramref="baseMesh"/>into the <paramref name="classifiedMesh"/></param>
/// <param name="classifiedMesh">The output mesh to be updated with the extracted mesh.</param>
void ExtractClassifiedMesh(Mesh baseMesh, NativeArray<ARMeshClassification> faceClassifications, ARMeshClassification selectedMeshClassification, Mesh classifiedMesh)
baseMesh.GetTriangles(m_BaseTriangles, 0);
Debug.Assert(m_BaseTriangles.Count == (faceClassifications.Length * 3),
"unexpected mismatch between triangle count and face classification count");
m_ClassifiedTriangles.Clear();
m_ClassifiedTriangles.Capacity = classifiedFaceCount * 3;
// Count the number of faces matching the selected classification.
int classifiedFaceCount = 0;
int baseTriangleIndex = i * 3;
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 0]);
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 1]);
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 2]);
++classifiedFaceCount;
classifiedMesh.vertices = baseMesh.vertices;
classifiedMesh.normals = baseMesh.normals;
classifiedMesh.SetTriangles(m_ClassifiedTriangles, 0);
}
// Clear the existing mesh.
classifiedMesh.Clear();
}
// If there were matching face classifications, build a new mesh from the base mesh.
if (classifiedFaceCount > 0)
{
baseMesh.GetTriangles(m_BaseTriangles, 0);
Debug.Assert(m_BaseTriangles.Count == (faceClassifications.Length * 3),
"unexpected mismatch between triangle count and face classification count");
/// <summary>
/// Break up a single mesh with multiple face classifications into submeshes, each with an unique and uniform mesh
/// classification.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void BreakupMesh(MeshFilter meshFilter)
{
XRMeshSubsystem meshSubsystem = m_MeshManager.subsystem as XRMeshSubsystem;
if (meshSubsystem == null)
{
return;
}
m_ClassifiedTriangles.Clear();
m_ClassifiedTriangles.Capacity = classifiedFaceCount * 3;
var meshId = ExtractTrackableId(meshFilter.name);
var faceClassifications = meshSubsystem.GetFaceClassifications(meshId, Allocator.Persistent);
for (int i = 0; i < faceClassifications.Length; ++i)
{
if (faceClassifications[i] == selectedMeshClassification)
{
int baseTriangleIndex = i * 3;
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 0]);
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 1]);
m_ClassifiedTriangles.Add(m_BaseTriangles[baseTriangleIndex + 2]);
}
}
classifiedMesh.vertices = baseMesh.vertices;
classifiedMesh.normals = baseMesh.normals;
classifiedMesh.SetTriangles(m_ClassifiedTriangles, 0);
}
if (!faceClassifications.IsCreated)
{
return;
using (faceClassifications)
/// <summary>
/// Break up a single mesh with multiple face classifications into submeshes, each with an unique and uniform mesh
/// classification.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void BreakupMesh(MeshFilter meshFilter)
if (faceClassifications.Length <= 0)
XRMeshSubsystem meshSubsystem = m_MeshManager.subsystem as XRMeshSubsystem;
if (meshSubsystem == null)
var parent = meshFilter.transform.parent;
var meshId = ExtractTrackableId(meshFilter.name);
var faceClassifications = meshSubsystem.GetFaceClassifications(meshId, Allocator.Persistent);
MeshFilter[] meshFilters = new MeshFilter[k_NumClassifications];
if (!faceClassifications.IsCreated)
{
return;
}
meshFilters[(int)ARMeshClassification.None] = (m_NoneMeshPrefab == null) ? null : Instantiate(m_NoneMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Wall] = (m_WallMeshPrefab == null) ? null : Instantiate(m_WallMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Floor] = (m_FloorMeshPrefab == null) ? null : Instantiate(m_FloorMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Ceiling] = (m_CeilingMeshPrefab == null) ? null : Instantiate(m_CeilingMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Table] = (m_TableMeshPrefab == null) ? null : Instantiate(m_TableMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Seat] = (m_SeatMeshPrefab == null) ? null : Instantiate(m_SeatMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Window] = (m_WindowMeshPrefab == null) ? null : Instantiate(m_WindowMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Door] = (m_DoorMeshPrefab == null) ? null : Instantiate(m_DoorMeshPrefab, parent);
using (faceClassifications)
{
if (faceClassifications.Length <= 0)
{
return;
}
var parent = meshFilter.transform.parent;
MeshFilter[] meshFilters = new MeshFilter[k_NumClassifications];
meshFilters[(int)ARMeshClassification.None] = (m_NoneMeshPrefab == null) ? null : Instantiate(m_NoneMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Wall] = (m_WallMeshPrefab == null) ? null : Instantiate(m_WallMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Floor] = (m_FloorMeshPrefab == null) ? null : Instantiate(m_FloorMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Ceiling] = (m_CeilingMeshPrefab == null) ? null : Instantiate(m_CeilingMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Table] = (m_TableMeshPrefab == null) ? null : Instantiate(m_TableMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Seat] = (m_SeatMeshPrefab == null) ? null : Instantiate(m_SeatMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Window] = (m_WindowMeshPrefab == null) ? null : Instantiate(m_WindowMeshPrefab, parent);
meshFilters[(int)ARMeshClassification.Door] = (m_DoorMeshPrefab == null) ? null : Instantiate(m_DoorMeshPrefab, parent);
m_MeshFrackingMap[meshId] = meshFilters;
m_MeshFrackingMap[meshId] = meshFilters;
var baseMesh = meshFilter.sharedMesh;
for (int i = 0; i < k_NumClassifications; ++i)
{
var classifiedMeshFilter = meshFilters[i];
if (classifiedMeshFilter != null)
var baseMesh = meshFilter.sharedMesh;
for (int i = 0; i < k_NumClassifications; ++i)
var classifiedMesh = classifiedMeshFilter.mesh;
ExtractClassifiedMesh(baseMesh, faceClassifications, (ARMeshClassification)i, classifiedMesh);
meshFilters[i].mesh = classifiedMesh;
var classifiedMeshFilter = meshFilters[i];
if (classifiedMeshFilter != null)
{
var classifiedMesh = classifiedMeshFilter.mesh;
ExtractClassifiedMesh(baseMesh, faceClassifications, (ARMeshClassification)i, classifiedMesh);
meshFilters[i].mesh = classifiedMesh;
}
}
/// <summary>
/// Update the submeshes corresponding to the single mesh with multiple face classifications into submeshes.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void UpdateMesh(MeshFilter meshFilter)
{
XRMeshSubsystem meshSubsystem = m_MeshManager.subsystem as XRMeshSubsystem;
if (meshSubsystem == null)
/// <summary>
/// Update the submeshes corresponding to the single mesh with multiple face classifications into submeshes.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void UpdateMesh(MeshFilter meshFilter)
return;
}
XRMeshSubsystem meshSubsystem = m_MeshManager.subsystem as XRMeshSubsystem;
if (meshSubsystem == null)
{
return;
}
var meshId = ExtractTrackableId(meshFilter.name);
var faceClassifications = meshSubsystem.GetFaceClassifications(meshId, Allocator.Persistent);
var meshId = ExtractTrackableId(meshFilter.name);
var faceClassifications = meshSubsystem.GetFaceClassifications(meshId, Allocator.Persistent);
if (!faceClassifications.IsCreated)
{
return;
}
if (!faceClassifications.IsCreated)
{
return;
}
using (faceClassifications)
{
if (faceClassifications.Length <= 0)
using (faceClassifications)
return;
if (faceClassifications.Length <= 0)
{
return;
}
var meshFilters = m_MeshFrackingMap[meshId];
var baseMesh = meshFilter.sharedMesh;
for (int i = 0; i < k_NumClassifications; ++i)
{
var classifiedMeshFilter = meshFilters[i];
if (classifiedMeshFilter != null)
{
var classifiedMesh = classifiedMeshFilter.mesh;
ExtractClassifiedMesh(baseMesh, faceClassifications, (ARMeshClassification)i, classifiedMesh);
meshFilters[i].mesh = classifiedMesh;
}
}
}
/// <summary>
/// Remove the submeshes corresponding to the single mesh.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void RemoveMesh(MeshFilter meshFilter)
{
var meshId = ExtractTrackableId(meshFilter.name);
var baseMesh = meshFilter.sharedMesh;
var classifiedMesh = classifiedMeshFilter.mesh;
ExtractClassifiedMesh(baseMesh, faceClassifications, (ARMeshClassification)i, classifiedMesh);
meshFilters[i].mesh = classifiedMesh;
Object.Destroy(classifiedMeshFilter);
}
}
/// <summary>
/// Remove the submeshes corresponding to the single mesh.
/// </summary>
/// <param name="meshFilter">The mesh filter for the base mesh with multiple face classifications.</param>
void RemoveMesh(MeshFilter meshFilter)
{
var meshId = ExtractTrackableId(meshFilter.name);
var meshFilters = m_MeshFrackingMap[meshId];
for (int i = 0; i < k_NumClassifications; ++i)
{
var classifiedMeshFilter = meshFilters[i];
if (classifiedMeshFilter != null)
{
Object.Destroy(classifiedMeshFilter);
}
m_MeshFrackingMap.Remove(meshId);
m_MeshFrackingMap.Remove(meshId);
#endif // UNITY_IOS && !UNITY_EDITOR
#endif // UNITY_IOS && !UNITY_EDITOR
}
}

69
Assets/Scenes/Meshing/Scripts/ProjectileLauncher.cs


using UnityEngine;
/// <summary>
/// Launches projectiles from a touch point with the specified <see cref="initialSpeed"/>.
/// </summary>
[RequireComponent(typeof(Camera))]
public class ProjectileLauncher : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Rigidbody m_ProjectilePrefab;
/// <summary>
/// Launches projectiles from a touch point with the specified <see cref="initialSpeed"/>.
/// </summary>
[RequireComponent(typeof(Camera))]
public class ProjectileLauncher : MonoBehaviour
{
[SerializeField]
Rigidbody m_ProjectilePrefab;
public Rigidbody projectilePrefab
{
get => m_ProjectilePrefab;
set => m_ProjectilePrefab = value;
}
public Rigidbody projectilePrefab
{
get => m_ProjectilePrefab;
set => m_ProjectilePrefab = value;
}
[SerializeField]
float m_InitialSpeed = 25;
[SerializeField]
float m_InitialSpeed = 25;
public float initialSpeed
{
get => m_InitialSpeed;
set => m_InitialSpeed = value;
}
public float initialSpeed
{
get => m_InitialSpeed;
set => m_InitialSpeed = value;
}
void Update()
{
if (m_ProjectilePrefab == null)
return;
void Update()
{
if (m_ProjectilePrefab == null)
return;
if (Input.touchCount == 0)
return;
if (Input.touchCount == 0)
return;
var touch = Input.touches[0];
if (touch.phase == TouchPhase.Began)
{
var ray = GetComponent<Camera>().ScreenPointToRay(touch.position);
var projectile = Instantiate(m_ProjectilePrefab, ray.origin, Quaternion.identity);
var rigidbody = projectile.GetComponent<Rigidbody>();
rigidbody.velocity = ray.direction * m_InitialSpeed;
var touch = Input.touches[0];
if (touch.phase == TouchPhase.Began)
{
var ray = GetComponent<Camera>().ScreenPointToRay(touch.position);
var projectile = Instantiate(m_ProjectilePrefab, ray.origin, Quaternion.identity);
var rigidbody = projectile.GetComponent<Rigidbody>();
rigidbody.velocity = ray.direction * m_InitialSpeed;
}
}
}

89
Assets/Scenes/Meshing/Scripts/ToggleMeshClassification.cs


using UnityEngine.XR.ARKit;
#endif // UNITY_IOS && !UNITY_EDITOR
public class ToggleMeshClassification : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
/// <summary>
/// The mesh manager for the scene.
/// </summary>
[SerializeField]
ARMeshManager m_MeshManager;
public class ToggleMeshClassification : MonoBehaviour
{
/// <summary>
/// The mesh manager for the scene.
/// </summary>
[SerializeField]
ARMeshManager m_MeshManager;
/// <summary>
/// Whether mesh classification should be enabled.
/// </summary>
[SerializeField]
bool m_ClassificationEnabled = false;
/// <summary>
/// Whether mesh classification should be enabled.
/// </summary>
[SerializeField]
bool m_ClassificationEnabled = false;
/// <summary>
/// The mesh manager for the scene.
/// </summary>
public ARMeshManager meshManager { get => m_MeshManager; set => m_MeshManager = value; }
/// <summary>
/// The mesh manager for the scene.
/// </summary>
public ARMeshManager meshManager { get => m_MeshManager; set => m_MeshManager = value; }
/// <summary>
/// Whether mesh classification should be enabled.
/// </summary>
public bool classificationEnabled
{
get => m_ClassificationEnabled;
set
{
m_ClassificationEnabled = value;
UpdateMeshSubsystem();
}
}
/// <summary>
/// Whether mesh classification should be enabled.
/// </summary>
public bool classificationEnabled
{
get => m_ClassificationEnabled;
set
/// <summary>
/// On enable, update the mesh subsystem with the classification enabled setting.
/// </summary>
void OnEnable()
m_ClassificationEnabled = value;
}
/// <summary>
/// On enable, update the mesh subsystem with the classification enabled setting.
/// </summary>
void OnEnable()
{
UpdateMeshSubsystem();
}
/// <summary>
/// Update the mesh subsystem with the classiication enabled setting.
/// </summary>
void UpdateMeshSubsystem()
{
#if UNITY_IOS && !UNITY_EDITOR
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
if ((m_MeshManager != null) && (m_MeshManager.subsystem is XRMeshSubsystem meshSubsystem))
/// <summary>
/// Update the mesh subsystem with the classiication enabled setting.
/// </summary>
void UpdateMeshSubsystem()
meshSubsystem.SetClassificationEnabled(m_ClassificationEnabled);
#if UNITY_IOS && !UNITY_EDITOR
Debug.Assert(m_MeshManager != null, "mesh manager cannot be null");
if ((m_MeshManager != null) && (m_MeshManager.subsystem is XRMeshSubsystem meshSubsystem))
{
meshSubsystem.SetClassificationEnabled(m_ClassificationEnabled);
}
#endif // UNITY_IOS && !UNITY_EDITOR
#endif // UNITY_IOS && !UNITY_EDITOR
}
}

147
Assets/Scenes/Plane Detection/PlaneClassificationLabeler.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Manages the label and plane material color for each recognized plane based on
/// the PlaneClassification enumeration defined in ARSubsystems.
/// </summary>
[RequireComponent(typeof(ARPlane))]
[RequireComponent(typeof(MeshRenderer))]
public class PlaneClassificationLabeler : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
ARPlane m_ARPlane;
MeshRenderer m_PlaneMeshRenderer;
TextMesh m_TextMesh;
GameObject m_TextObj;
Vector3 m_TextFlipVec = new Vector3(0, 180, 0);
/// <summary>
/// Manages the label and plane material color for each recognized plane based on
/// the PlaneClassification enumeration defined in ARSubsystems.
/// </summary>
void Awake()
[RequireComponent(typeof(ARPlane))]
[RequireComponent(typeof(MeshRenderer))]
public class PlaneClassificationLabeler : MonoBehaviour
m_ARPlane = GetComponent<ARPlane>();
m_PlaneMeshRenderer = GetComponent<MeshRenderer>();
ARPlane m_ARPlane;
MeshRenderer m_PlaneMeshRenderer;
TextMesh m_TextMesh;
GameObject m_TextObj;
Vector3 m_TextFlipVec = new Vector3(0, 180, 0);
// Setup label
m_TextObj = new GameObject();
m_TextMesh = m_TextObj.AddComponent<TextMesh>();
m_TextMesh.characterSize = 0.05f;
m_TextMesh.color = Color.black;
}
void Awake()
{
m_ARPlane = GetComponent<ARPlane>();
m_PlaneMeshRenderer = GetComponent<MeshRenderer>();
void Update()
{
UpdateLabel();
UpdatePlaneColor();
}
// Setup label
m_TextObj = new GameObject();
m_TextMesh = m_TextObj.AddComponent<TextMesh>();
m_TextMesh.characterSize = 0.05f;
m_TextMesh.color = Color.black;
}
void UpdateLabel()
{
// Update text
m_TextMesh.text = m_ARPlane.classification.ToString();
void Update()
{
UpdateLabel();
UpdatePlaneColor();
}
// Update Pose
m_TextObj.transform.position = m_ARPlane.center;
m_TextObj.transform.LookAt(Camera.main.transform);
m_TextObj.transform.Rotate(m_TextFlipVec);
}
void UpdateLabel()
{
// Update text
m_TextMesh.text = m_ARPlane.classification.ToString();
void UpdatePlaneColor()
{
Color planeMatColor = Color.cyan;
// Update Pose
m_TextObj.transform.position = m_ARPlane.center;
m_TextObj.transform.LookAt(Camera.main.transform);
m_TextObj.transform.Rotate(m_TextFlipVec);
}
switch (m_ARPlane.classification)
void UpdatePlaneColor()
case PlaneClassification.None:
planeMatColor = Color.cyan;
break;
case PlaneClassification.Wall:
planeMatColor = Color.white;
break;
case PlaneClassification.Floor:
planeMatColor = Color.green;
break;
case PlaneClassification.Ceiling:
planeMatColor = Color.blue;
break;
case PlaneClassification.Table:
planeMatColor = Color.yellow;
break;
case PlaneClassification.Seat:
planeMatColor = Color.magenta;
break;
case PlaneClassification.Door:
planeMatColor = Color.red;
break;
case PlaneClassification.Window:
planeMatColor = Color.clear;
break;
Color planeMatColor = Color.cyan;
switch (m_ARPlane.classification)
{
case PlaneClassification.None:
planeMatColor = Color.cyan;
break;
case PlaneClassification.Wall:
planeMatColor = Color.white;
break;
case PlaneClassification.Floor:
planeMatColor = Color.green;
break;
case PlaneClassification.Ceiling:
planeMatColor = Color.blue;
break;
case PlaneClassification.Table:
planeMatColor = Color.yellow;
break;
case PlaneClassification.Seat:
planeMatColor = Color.magenta;
break;
case PlaneClassification.Door:
planeMatColor = Color.red;
break;
case PlaneClassification.Window:
planeMatColor = Color.clear;
break;
}
planeMatColor.a = 0.33f;
m_PlaneMeshRenderer.material.color = planeMatColor;
planeMatColor.a = 0.33f;
m_PlaneMeshRenderer.material.color = planeMatColor;
}
void OnDestroy()
{
Destroy(m_TextObj);
void OnDestroy()
{
Destroy(m_TextObj);
}
}
}

59
Assets/Scenes/SimpleAR/SessionReloader.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Reloads the ARSession by first destroying the ARSession's GameObject
/// and then instantiating a new ARSession from a Prefab.
/// </summary>
public class SessionReloader : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
public ARSession session;
public GameObject sessionPrefab;
public Button pauseButton;
public Button resumeButton;
public Button resetButton;
public void ReloadSession()
/// <summary>
/// Reloads the ARSession by first destroying the ARSession's GameObject
/// and then instantiating a new ARSession from a Prefab.
/// </summary>
public class SessionReloader : MonoBehaviour
if (session != null)
public ARSession session;
public GameObject sessionPrefab;
public Button pauseButton;
public Button resumeButton;
public Button resetButton;
public void ReloadSession()
StartCoroutine(DoReload());
if (session != null)
{
StartCoroutine(DoReload());
}
}
IEnumerator DoReload()
{
Destroy(session.gameObject);
yield return null;
IEnumerator DoReload()
{
Destroy(session.gameObject);
yield return null;
if (sessionPrefab != null)
{
session = Instantiate(sessionPrefab).GetComponent<ARSession>();
if (sessionPrefab != null)
{
session = Instantiate(sessionPrefab).GetComponent<ARSession>();
// Hook the buttons back up
resetButton.onClick.AddListener(session.Reset);
pauseButton.onClick.AddListener(() => { session.enabled = false; });
resumeButton.onClick.AddListener(() => { session.enabled = true; });
}
// Hook the buttons back up
resetButton.onClick.AddListener(session.Reset);
pauseButton.onClick.AddListener(() => { session.enabled = false; });
resumeButton.onClick.AddListener(() => { session.enabled = true; });
}
}

137
Assets/Scripts/ARCoreFaceRegionManager.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
#if UNITY_ANDROID && !UNITY_EDITOR
using UnityEngine.XR.ARCore;
#endif
/// <summary>
/// This component uses the ARCoreFaceSubsystem to query for face regions, special
/// regions detected within a face, such as the nose tip. Each region has a pose
/// associated with it. This component instantiates <see cref="regionPrefab"/>
/// at each detected region.
/// </summary>
[RequireComponent(typeof(ARFaceManager))]
[RequireComponent(typeof(ARSessionOrigin))]
public class ARCoreFaceRegionManager : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_RegionPrefab;
#if UNITY_ANDROID && !UNITY_EDITOR
using UnityEngine.XR.ARCore;
#endif
/// Get or set the prefab which will be instantiated at each detected face region.
/// This component uses the ARCoreFaceSubsystem to query for face regions, special
/// regions detected within a face, such as the nose tip. Each region has a pose
/// associated with it. This component instantiates <see cref="regionPrefab"/>
/// at each detected region.
public GameObject regionPrefab
[RequireComponent(typeof(ARFaceManager))]
[RequireComponent(typeof(ARSessionOrigin))]
public class ARCoreFaceRegionManager : MonoBehaviour
get { return m_RegionPrefab; }
set { m_RegionPrefab = value; }
}
[SerializeField]
GameObject m_RegionPrefab;
ARFaceManager m_FaceManager;
/// <summary>
/// Get or set the prefab which will be instantiated at each detected face region.
/// </summary>
public GameObject regionPrefab
{
get { return m_RegionPrefab; }
set { m_RegionPrefab = value; }
}
ARSessionOrigin m_SessionOrigin;
ARFaceManager m_FaceManager;
#if UNITY_ANDROID && !UNITY_EDITOR
NativeArray<ARCoreFaceRegionData> m_FaceRegions;
ARSessionOrigin m_SessionOrigin;
Dictionary<TrackableId, Dictionary<ARCoreFaceRegion, GameObject>> m_InstantiatedPrefabs;
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
NativeArray<ARCoreFaceRegionData> m_FaceRegions;
// Start is called before the first frame update
void Start()
{
m_FaceManager = GetComponent<ARFaceManager>();
m_SessionOrigin = GetComponent<ARSessionOrigin>();
#if UNITY_ANDROID && !UNITY_EDITOR
m_InstantiatedPrefabs = new Dictionary<TrackableId, Dictionary<ARCoreFaceRegion, GameObject>>();
#endif
}
Dictionary<TrackableId, Dictionary<ARCoreFaceRegion, GameObject>> m_InstantiatedPrefabs;
#endif
// Update is called once per frame
void Update()
{
#if UNITY_ANDROID && !UNITY_EDITOR
var subsystem = (ARCoreFaceSubsystem)m_FaceManager.subsystem;
if (subsystem == null)
return;
// Start is called before the first frame update
void Start()
{
m_FaceManager = GetComponent<ARFaceManager>();
m_SessionOrigin = GetComponent<ARSessionOrigin>();
#if UNITY_ANDROID && !UNITY_EDITOR
m_InstantiatedPrefabs = new Dictionary<TrackableId, Dictionary<ARCoreFaceRegion, GameObject>>();
#endif
}
foreach (var face in m_FaceManager.trackables)
// Update is called once per frame
void Update()
Dictionary<ARCoreFaceRegion, GameObject> regionGos;
if (!m_InstantiatedPrefabs.TryGetValue(face.trackableId, out regionGos))
{
regionGos = new Dictionary<ARCoreFaceRegion, GameObject>();
m_InstantiatedPrefabs.Add(face.trackableId, regionGos);
}
#if UNITY_ANDROID && !UNITY_EDITOR
var subsystem = (ARCoreFaceSubsystem)m_FaceManager.subsystem;
if (subsystem == null)
return;
subsystem.GetRegionPoses(face.trackableId, Allocator.Persistent, ref m_FaceRegions);
for (int i = 0; i < m_FaceRegions.Length; ++i)
foreach (var face in m_FaceManager.trackables)
var regionType = m_FaceRegions[i].region;
GameObject go;
if (!regionGos.TryGetValue(regionType, out go))
Dictionary<ARCoreFaceRegion, GameObject> regionGos;
if (!m_InstantiatedPrefabs.TryGetValue(face.trackableId, out regionGos))
go = Instantiate(m_RegionPrefab, m_SessionOrigin.trackablesParent);
regionGos.Add(regionType, go);
regionGos = new Dictionary<ARCoreFaceRegion, GameObject>();
m_InstantiatedPrefabs.Add(face.trackableId, regionGos);
go.transform.localPosition = m_FaceRegions[i].pose.position;
go.transform.localRotation = m_FaceRegions[i].pose.rotation;
subsystem.GetRegionPoses(face.trackableId, Allocator.Persistent, ref m_FaceRegions);
for (int i = 0; i < m_FaceRegions.Length; ++i)
{
var regionType = m_FaceRegions[i].region;
GameObject go;
if (!regionGos.TryGetValue(regionType, out go))
{
go = Instantiate(m_RegionPrefab, m_SessionOrigin.trackablesParent);
regionGos.Add(regionType, go);
}
go.transform.localPosition = m_FaceRegions[i].pose.position;
go.transform.localRotation = m_FaceRegions[i].pose.rotation;
}
#endif
#endif
}
void OnDestroy()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (m_FaceRegions.IsCreated)
m_FaceRegions.Dispose();
#endif
void OnDestroy()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (m_FaceRegions.IsCreated)
m_FaceRegions.Dispose();
#endif
}
}
}

167
Assets/Scripts/ARFeatheredPlaneMeshVisualizer.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// This plane visualizer demonstrates the use of a feathering effect
/// at the edge of the detected plane, which reduces the visual impression
/// of a hard edge.
/// </summary>
[RequireComponent(typeof(ARPlaneMeshVisualizer), typeof(MeshRenderer), typeof(ARPlane))]
public class ARFeatheredPlaneMeshVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Tooltip("The width of the texture feathering (in world units).")]
[SerializeField]
float m_FeatheringWidth = 0.2f;
/// The width of the texture feathering (in world units).
/// This plane visualizer demonstrates the use of a feathering effect
/// at the edge of the detected plane, which reduces the visual impression
/// of a hard edge.
public float featheringWidth
{
get { return m_FeatheringWidth; }
set { m_FeatheringWidth = value; }
}
void Awake()
[RequireComponent(typeof(ARPlaneMeshVisualizer), typeof(MeshRenderer), typeof(ARPlane))]
public class ARFeatheredPlaneMeshVisualizer : MonoBehaviour
m_PlaneMeshVisualizer = GetComponent<ARPlaneMeshVisualizer>();
m_FeatheredPlaneMaterial = GetComponent<MeshRenderer>().material;
m_Plane = GetComponent<ARPlane>();
}
[Tooltip("The width of the texture feathering (in world units).")]
[SerializeField]
float m_FeatheringWidth = 0.2f;
void OnEnable()
{
m_Plane.boundaryChanged += ARPlane_boundaryUpdated;
}
/// <summary>
/// The width of the texture feathering (in world units).
/// </summary>
public float featheringWidth
{
get { return m_FeatheringWidth; }
set { m_FeatheringWidth = value; }
}
void OnDisable()
{
m_Plane.boundaryChanged -= ARPlane_boundaryUpdated;
}
void Awake()
{
m_PlaneMeshVisualizer = GetComponent<ARPlaneMeshVisualizer>();
m_FeatheredPlaneMaterial = GetComponent<MeshRenderer>().material;
m_Plane = GetComponent<ARPlane>();
}
void ARPlane_boundaryUpdated(ARPlaneBoundaryChangedEventArgs eventArgs)
{
GenerateBoundaryUVs(m_PlaneMeshVisualizer.mesh);
}
void OnEnable()
{
m_Plane.boundaryChanged += ARPlane_boundaryUpdated;
}
/// <summary>
/// Generate UV2s to mark the boundary vertices and feathering UV coords.
/// </summary>
/// <remarks>
/// The <c>ARPlaneMeshVisualizer</c> has a <c>meshUpdated</c> event that can be used to modify the generated
/// mesh. In this case we'll add UV2s to mark the boundary vertices.
/// This technique avoids having to generate extra vertices for the boundary. It works best when the plane is
/// is fairly uniform.
/// </remarks>
/// <param name="mesh">The <c>Mesh</c> generated by <c>ARPlaneMeshVisualizer</c></param>
void GenerateBoundaryUVs(Mesh mesh)
{
int vertexCount = mesh.vertexCount;
void OnDisable()
{
m_Plane.boundaryChanged -= ARPlane_boundaryUpdated;
}
void ARPlane_boundaryUpdated(ARPlaneBoundaryChangedEventArgs eventArgs)
{
GenerateBoundaryUVs(m_PlaneMeshVisualizer.mesh);
}
/// <summary>
/// Generate UV2s to mark the boundary vertices and feathering UV coords.
/// </summary>
/// <remarks>
/// The <c>ARPlaneMeshVisualizer</c> has a <c>meshUpdated</c> event that can be used to modify the generated
/// mesh. In this case we'll add UV2s to mark the boundary vertices.
/// This technique avoids having to generate extra vertices for the boundary. It works best when the plane is
/// is fairly uniform.
/// </remarks>
/// <param name="mesh">The <c>Mesh</c> generated by <c>ARPlaneMeshVisualizer</c></param>
void GenerateBoundaryUVs(Mesh mesh)
{
int vertexCount = mesh.vertexCount;
// Reuse the list of UVs
s_FeatheringUVs.Clear();
if (s_FeatheringUVs.Capacity < vertexCount) { s_FeatheringUVs.Capacity = vertexCount; }
// Reuse the list of UVs
s_FeatheringUVs.Clear();
if (s_FeatheringUVs.Capacity < vertexCount) { s_FeatheringUVs.Capacity = vertexCount; }
mesh.GetVertices(s_Vertices);
mesh.GetVertices(s_Vertices);
Vector3 centerInPlaneSpace = s_Vertices[s_Vertices.Count - 1];
Vector3 uv = new Vector3(0, 0, 0);
float shortestUVMapping = float.MaxValue;
Vector3 centerInPlaneSpace = s_Vertices[s_Vertices.Count - 1];
Vector3 uv = new Vector3(0, 0, 0);
float shortestUVMapping = float.MaxValue;
// Assume the last vertex is the center vertex.
for (int i = 0; i < vertexCount - 1; i++)
{
float vertexDist = Vector3.Distance(s_Vertices[i], centerInPlaneSpace);
// Assume the last vertex is the center vertex.
for (int i = 0; i < vertexCount - 1; i++)
{
float vertexDist = Vector3.Distance(s_Vertices[i], centerInPlaneSpace);
// Remap the UV so that a UV of "1" marks the feathering boudary.
// The ratio of featherBoundaryDistance/edgeDistance is the same as featherUV/edgeUV.
// Rearrange to get the edge UV.
float uvMapping = vertexDist / Mathf.Max(vertexDist - featheringWidth, 0.001f);
uv.x = uvMapping;
// Remap the UV so that a UV of "1" marks the feathering boudary.
// The ratio of featherBoundaryDistance/edgeDistance is the same as featherUV/edgeUV.
// Rearrange to get the edge UV.
float uvMapping = vertexDist / Mathf.Max(vertexDist - featheringWidth, 0.001f);
uv.x = uvMapping;
// All the UV mappings will be different. In the shader we need to know the UV value we need to fade out by.
// Choose the shortest UV to guarentee we fade out before the border.
// This means the feathering widths will be slightly different, we again rely on a fairly uniform plane.
if (shortestUVMapping > uvMapping) { shortestUVMapping = uvMapping; }
// All the UV mappings will be different. In the shader we need to know the UV value we need to fade out by.
// Choose the shortest UV to guarentee we fade out before the border.
// This means the feathering widths will be slightly different, we again rely on a fairly uniform plane.
if (shortestUVMapping > uvMapping) { shortestUVMapping = uvMapping; }
s_FeatheringUVs.Add(uv);
}
s_FeatheringUVs.Add(uv);
}
m_FeatheredPlaneMaterial.SetFloat("_ShortestUVMapping", shortestUVMapping);
m_FeatheredPlaneMaterial.SetFloat("_ShortestUVMapping", shortestUVMapping);
// Add the center vertex UV
uv.Set(0, 0, 0);
s_FeatheringUVs.Add(uv);
// Add the center vertex UV
uv.Set(0, 0, 0);
s_FeatheringUVs.Add(uv);
mesh.SetUVs(1, s_FeatheringUVs);
mesh.UploadMeshData(false);
}
mesh.SetUVs(1, s_FeatheringUVs);
mesh.UploadMeshData(false);
}
static List<Vector3> s_FeatheringUVs = new List<Vector3>();
static List<Vector3> s_FeatheringUVs = new List<Vector3>();
static List<Vector3> s_Vertices = new List<Vector3>();
static List<Vector3> s_Vertices = new List<Vector3>();
ARPlaneMeshVisualizer m_PlaneMeshVisualizer;
ARPlaneMeshVisualizer m_PlaneMeshVisualizer;
ARPlane m_Plane;
ARPlane m_Plane;
Material m_FeatheredPlaneMaterial;
Material m_FeatheredPlaneMaterial;
}
}

309
Assets/Scripts/ARKitBlendShapeVisualizer.cs


using UnityEngine.XR.ARKit;
#endif
/// <summary>
/// Populates the action unit coefficients for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// If this <c>GameObject</c> has a <c>SkinnedMeshRenderer</c>,
/// this component will generate the blend shape coefficients from the underlying <c>ARFace</c>.
///
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class ARKitBlendShapeVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
float m_CoefficientScale = 100.0f;
public float coefficientScale
/// <summary>
/// Populates the action unit coefficients for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// If this <c>GameObject</c> has a <c>SkinnedMeshRenderer</c>,
/// this component will generate the blend shape coefficients from the underlying <c>ARFace</c>.
///
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class ARKitBlendShapeVisualizer : MonoBehaviour
get { return m_CoefficientScale; }
set { m_CoefficientScale = value; }
}
[SerializeField]
float m_CoefficientScale = 100.0f;
[SerializeField]
SkinnedMeshRenderer m_SkinnedMeshRenderer;
public SkinnedMeshRenderer skinnedMeshRenderer
{
get
public float coefficientScale
return m_SkinnedMeshRenderer;
get { return m_CoefficientScale; }
set { m_CoefficientScale = value; }
set
[SerializeField]
SkinnedMeshRenderer m_SkinnedMeshRenderer;
public SkinnedMeshRenderer skinnedMeshRenderer
m_SkinnedMeshRenderer = value;
CreateFeatureBlendMapping();
get
{
return m_SkinnedMeshRenderer;
}
set
{
m_SkinnedMeshRenderer = value;
CreateFeatureBlendMapping();
}
}
#if UNITY_IOS && !UNITY_EDITOR
ARKitFaceSubsystem m_ARKitFaceSubsystem;
#if UNITY_IOS && !UNITY_EDITOR
ARKitFaceSubsystem m_ARKitFaceSubsystem;
Dictionary<ARKitBlendShapeLocation, int> m_FaceArkitBlendShapeIndexMap;
#endif
Dictionary<ARKitBlendShapeLocation, int> m_FaceArkitBlendShapeIndexMap;
#endif
ARFace m_Face;
ARFace m_Face;
void Awake()
{
m_Face = GetComponent<ARFace>();
CreateFeatureBlendMapping();
}
void Awake()
{
m_Face = GetComponent<ARFace>();
CreateFeatureBlendMapping();
}
void CreateFeatureBlendMapping()
{
if (skinnedMeshRenderer == null || skinnedMeshRenderer.sharedMesh == null)
void CreateFeatureBlendMapping()
return;
if (skinnedMeshRenderer == null || skinnedMeshRenderer.sharedMesh == null)
{
return;
}
#if UNITY_IOS && !UNITY_EDITOR
const string strPrefix = "blendShape2.";
m_FaceArkitBlendShapeIndexMap = new Dictionary<ARKitBlendShapeLocation, int>();
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowInnerUp ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browInnerUp");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowOuterUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowOuterUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekPuff ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekPuff");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeBlinkLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeBlinkRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookInLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookInRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookOutLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookOutRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeWideLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeWideRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawForward ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawForward");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawLeft");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawOpen ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawOpen");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawRight");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthClose ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthClose");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthDimpleLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthDimpleRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFrownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFrownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFunnel ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFunnel");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLeft");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLowerDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLowerDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPressLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPressRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPucker ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPucker");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRight");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRollLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollLower");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRollUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollUpper");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthShrugLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugLower");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthShrugUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugUpper");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthSmileLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthSmileRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthStretchLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthStretchRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthUpperUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthUpperUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.NoseSneerLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.NoseSneerRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.TongueOut ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "tongueOut");
#endif
#if UNITY_IOS && !UNITY_EDITOR
const string strPrefix = "blendShape2.";
m_FaceArkitBlendShapeIndexMap = new Dictionary<ARKitBlendShapeLocation, int>();
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowInnerUp ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browInnerUp");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowOuterUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.BrowOuterUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekPuff ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekPuff");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.CheekSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeBlinkLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeBlinkRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookInLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookInRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookOutLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookOutRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeLookUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeWideLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.EyeWideRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawForward ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawForward");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawLeft");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawOpen ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawOpen");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.JawRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawRight");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthClose ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthClose");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthDimpleLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthDimpleRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFrownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFrownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthFunnel ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFunnel");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLeft");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLowerDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthLowerDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPressLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPressRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthPucker ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPucker");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRight");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRollLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollLower");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthRollUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollUpper");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthShrugLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugLower");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthShrugUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugUpper");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthSmileLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthSmileRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthStretchLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthStretchRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthUpperUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.MouthUpperUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.NoseSneerLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_L");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.NoseSneerRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_R");
m_FaceArkitBlendShapeIndexMap[ARKitBlendShapeLocation.TongueOut ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "tongueOut");
#endif
}
void SetVisible(bool visible)
{
if (skinnedMeshRenderer == null) return;
void SetVisible(bool visible)
{
if (skinnedMeshRenderer == null) return;
skinnedMeshRenderer.enabled = visible;
}
skinnedMeshRenderer.enabled = visible;
}
void UpdateVisibility()
{
var visible =
enabled &&
(m_Face.trackingState == TrackingState.Tracking) &&
(ARSession.state > ARSessionState.Ready);
void UpdateVisibility()
{
var visible =
enabled &&
(m_Face.trackingState == TrackingState.Tracking) &&
(ARSession.state > ARSessionState.Ready);
SetVisible(visible);
}
SetVisible(visible);
}
void OnEnable()
{
#if UNITY_IOS && !UNITY_EDITOR
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null)
void OnEnable()
m_ARKitFaceSubsystem = (ARKitFaceSubsystem)faceManager.subsystem;
#if UNITY_IOS && !UNITY_EDITOR
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null)
{
m_ARKitFaceSubsystem = (ARKitFaceSubsystem)faceManager.subsystem;
}
#endif
UpdateVisibility();
m_Face.updated += OnUpdated;
ARSession.stateChanged += OnSystemStateChanged;
#endif
UpdateVisibility();
m_Face.updated += OnUpdated;
ARSession.stateChanged += OnSystemStateChanged;
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
ARSession.stateChanged -= OnSystemStateChanged;
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
ARSession.stateChanged -= OnSystemStateChanged;
}
void OnSystemStateChanged(ARSessionStateChangedEventArgs eventArgs)
{
UpdateVisibility();
}
void OnSystemStateChanged(ARSessionStateChangedEventArgs eventArgs)
{
UpdateVisibility();
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
UpdateVisibility();
UpdateFaceFeatures();
}
void UpdateFaceFeatures()
{
if (skinnedMeshRenderer == null || !skinnedMeshRenderer.enabled || skinnedMeshRenderer.sharedMesh == null)
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
return;
UpdateVisibility();
UpdateFaceFeatures();
#if UNITY_IOS && !UNITY_EDITOR
using (var blendShapes = m_ARKitFaceSubsystem.GetBlendShapeCoefficients(m_Face.trackableId, Allocator.Temp))
void UpdateFaceFeatures()
foreach (var featureCoefficient in blendShapes)
if (skinnedMeshRenderer == null || !skinnedMeshRenderer.enabled || skinnedMeshRenderer.sharedMesh == null)
int mappedBlendShapeIndex;
if (m_FaceArkitBlendShapeIndexMap.TryGetValue(featureCoefficient.blendShapeLocation, out mappedBlendShapeIndex))
return;
}
#if UNITY_IOS && !UNITY_EDITOR
using (var blendShapes = m_ARKitFaceSubsystem.GetBlendShapeCoefficients(m_Face.trackableId, Allocator.Temp))
{
foreach (var featureCoefficient in blendShapes)
if (mappedBlendShapeIndex >= 0)
int mappedBlendShapeIndex;
if (m_FaceArkitBlendShapeIndexMap.TryGetValue(featureCoefficient.blendShapeLocation, out mappedBlendShapeIndex))
skinnedMeshRenderer.SetBlendShapeWeight(mappedBlendShapeIndex, featureCoefficient.coefficient * coefficientScale);
if (mappedBlendShapeIndex >= 0)
{
skinnedMeshRenderer.SetBlendShapeWeight(mappedBlendShapeIndex, featureCoefficient.coefficient * coefficientScale);
}
#endif
#endif
}
}

497
Assets/Scripts/ARWorldMapController.cs


using UnityEngine.XR.ARKit;
#endif
/// <summary>
/// Demonstrates the saving and loading of an
/// <a href="https://developer.apple.com/documentation/arkit/arworldmap">ARWorldMap</a>
/// </summary>
/// <remarks>
/// ARWorldMaps are only supported by ARKit, so this API is in the
/// <c>UntyEngine.XR.ARKit</c> namespace.
/// </remarks>
public class ARWorldMapController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Tooltip("The ARSession component controlling the session from which to generate ARWorldMaps.")]
[SerializeField]
ARSession m_ARSession;
/// The ARSession component controlling the session from which to generate ARWorldMaps.
/// Demonstrates the saving and loading of an
/// <a href="https://developer.apple.com/documentation/arkit/arworldmap">ARWorldMap</a>
public ARSession arSession
/// <remarks>
/// ARWorldMaps are only supported by ARKit, so this API is in the
/// <c>UntyEngine.XR.ARKit</c> namespace.
/// </remarks>
public class ARWorldMapController : MonoBehaviour
get { return m_ARSession; }
set { m_ARSession = value; }
}
[Tooltip("The ARSession component controlling the session from which to generate ARWorldMaps.")]
[SerializeField]
ARSession m_ARSession;
[Tooltip("UI Text component to display error messages")]
[SerializeField]
Text m_ErrorText;
/// <summary>
/// The ARSession component controlling the session from which to generate ARWorldMaps.
/// </summary>
public ARSession arSession
{
get { return m_ARSession; }
set { m_ARSession = value; }
}
/// <summary>
/// The UI Text component used to display error messages
/// </summary>
public Text errorText
{
get { return m_ErrorText; }
set { m_ErrorText = value; }
}
[Tooltip("UI Text component to display error messages")]
[SerializeField]
Text m_ErrorText;
[Tooltip("The UI Text element used to display log messages.")]
[SerializeField]
Text m_LogText;
/// <summary>
/// The UI Text component used to display error messages
/// </summary>
public Text errorText
{
get { return m_ErrorText; }
set { m_ErrorText = value; }
}
/// <summary>
/// The UI Text element used to display log messages.
/// </summary>
public Text logText
{
get { return m_LogText; }
set { m_LogText = value; }
}
[Tooltip("The UI Text element used to display log messages.")]
[SerializeField]
Text m_LogText;
[Tooltip("The UI Text element used to display the current AR world mapping status.")]
[SerializeField]
Text m_MappingStatusText;
/// <summary>
/// The UI Text element used to display log messages.
/// </summary>
public Text logText
{
get { return m_LogText; }
set { m_LogText = value; }
}
/// <summary>
/// The UI Text element used to display the current AR world mapping status.
/// </summary>
public Text mappingStatusText
{
get { return m_MappingStatusText; }
set { m_MappingStatusText = value; }
}
[Tooltip("The UI Text element used to display the current AR world mapping status.")]
[SerializeField]
Text m_MappingStatusText;
[Tooltip("A UI button component which will generate an ARWorldMap and save it to disk.")]
[SerializeField]
Button m_SaveButton;
/// <summary>
/// The UI Text element used to display the current AR world mapping status.
/// </summary>
public Text mappingStatusText
{
get { return m_MappingStatusText; }
set { m_MappingStatusText = value; }
}
/// <summary>
/// A UI button component which will generate an ARWorldMap and save it to disk.
/// </summary>
public Button saveButton
{
get { return m_SaveButton; }
set { m_SaveButton = value; }
}
[Tooltip("A UI button component which will generate an ARWorldMap and save it to disk.")]
[SerializeField]
Button m_SaveButton;
[Tooltip("A UI button component which will load a previously saved ARWorldMap from disk and apply it to the current session.")]
[SerializeField]
Button m_LoadButton;
/// <summary>
/// A UI button component which will generate an ARWorldMap and save it to disk.
/// </summary>
public Button saveButton
{
get { return m_SaveButton; }
set { m_SaveButton = value; }
}
/// <summary>
/// A UI button component which will load a previously saved ARWorldMap from disk and apply it to the current session.
/// </summary>
public Button loadButton
{
get { return m_LoadButton; }
set { m_LoadButton = value; }
}
[Tooltip("A UI button component which will load a previously saved ARWorldMap from disk and apply it to the current session.")]
[SerializeField]
Button m_LoadButton;
/// <summary>
/// Create an <c>ARWorldMap</c> and save it to disk.
/// </summary>
public void OnSaveButton()
{
#if UNITY_IOS
StartCoroutine(Save());
#endif
}
/// <summary>
/// A UI button component which will load a previously saved ARWorldMap from disk and apply it to the current session.
/// </summary>
public Button loadButton
{
get { return m_LoadButton; }
set { m_LoadButton = value; }
}
/// <summary>
/// Load an <c>ARWorldMap</c> from disk and apply it
/// to the current session.
/// </summary>
public void OnLoadButton()
{
#if UNITY_IOS
StartCoroutine(Load());
#endif
}
/// <summary>
/// Create an <c>ARWorldMap</c> and save it to disk.
/// </summary>
public void OnSaveButton()
{
#if UNITY_IOS
StartCoroutine(Save());
#endif
}
/// <summary>
/// Reset the <c>ARSession</c>, destroying any existing trackables,
/// such as planes. Upon loading a saved <c>ARWorldMap</c>, saved
/// trackables will be restored.
/// </summary>
public void OnResetButton()
{
m_ARSession.Reset();
}
/// <summary>
/// Load an <c>ARWorldMap</c> from disk and apply it
/// to the current session.
/// </summary>
public void OnLoadButton()
{
#if UNITY_IOS
StartCoroutine(Load());
#endif
}
#if UNITY_IOS
IEnumerator Save()
{
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
if (sessionSubsystem == null)
/// <summary>
/// Reset the <c>ARSession</c>, destroying any existing trackables,
/// such as planes. Upon loading a saved <c>ARWorldMap</c>, saved
/// trackables will be restored.
/// </summary>
public void OnResetButton()
Log("No session subsystem available. Could not save.");
yield break;
m_ARSession.Reset();
var request = sessionSubsystem.GetARWorldMapAsync();
#if UNITY_IOS
IEnumerator Save()
{
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
if (sessionSubsystem == null)
{
Log("No session subsystem available. Could not save.");
yield break;
}
while (!request.status.IsDone())
yield return null;
var request = sessionSubsystem.GetARWorldMapAsync();
if (request.status.IsError())
{
Log(string.Format("Session serialization failed with status {0}", request.status));
yield break;
}
while (!request.status.IsDone())
yield return null;
var worldMap = request.GetWorldMap();
request.Dispose();
if (request.status.IsError())
{
Log(string.Format("Session serialization failed with status {0}", request.status));
yield break;
}
SaveAndDisposeWorldMap(worldMap);
}
var worldMap = request.GetWorldMap();
request.Dispose();
IEnumerator Load()
{
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
if (sessionSubsystem == null)
{
Log("No session subsystem available. Could not load.");
yield break;
SaveAndDisposeWorldMap(worldMap);
var file = File.Open(path, FileMode.Open);
if (file == null)
IEnumerator Load()
Log(string.Format("File {0} does not exist.", path));
yield break;
}
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
if (sessionSubsystem == null)
{
Log("No session subsystem available. Could not load.");
yield break;
}
Log(string.Format("Reading {0}...", path));
var file = File.Open(path, FileMode.Open);
if (file == null)
{
Log(string.Format("File {0} does not exist.", path));
yield break;
}
int bytesPerFrame = 1024 * 10;
var bytesRemaining = file.Length;
var binaryReader = new BinaryReader(file);
var allBytes = new List<byte>();
while (bytesRemaining > 0)
{
var bytes = binaryReader.ReadBytes(bytesPerFrame);
allBytes.AddRange(bytes);
bytesRemaining -= bytesPerFrame;
yield return null;
}
var data = new NativeArray<byte>(allBytes.Count, Allocator.Temp);
data.CopyFrom(allBytes.ToArray());
Log(string.Format("Reading {0}...", path));
Log(string.Format("Deserializing to ARWorldMap...", path));
ARWorldMap worldMap;
if (ARWorldMap.TryDeserialize(data, out worldMap))
data.Dispose();
int bytesPerFrame = 1024 * 10;
var bytesRemaining = file.Length;
var binaryReader = new BinaryReader(file);
var allBytes = new List<byte>();
while (bytesRemaining > 0)
{
var bytes = binaryReader.ReadBytes(bytesPerFrame);
allBytes.AddRange(bytes);
bytesRemaining -= bytesPerFrame;
yield return null;
}
if (worldMap.valid)
{
Log("Deserialized successfully.");
}
else
{
Debug.LogError("Data is not a valid ARWorldMap.");
yield break;
}
var data = new NativeArray<byte>(allBytes.Count, Allocator.Temp);
data.CopyFrom(allBytes.ToArray());
Log("Apply ARWorldMap to current session.");
sessionSubsystem.ApplyWorldMap(worldMap);
}
Log(string.Format("Deserializing to ARWorldMap...", path));
ARWorldMap worldMap;
if (ARWorldMap.TryDeserialize(data, out worldMap))
data.Dispose();
void SaveAndDisposeWorldMap(ARWorldMap worldMap)
{
Log("Serializing ARWorldMap to byte array...");
var data = worldMap.Serialize(Allocator.Temp);
Log(string.Format("ARWorldMap has {0} bytes.", data.Length));
if (worldMap.valid)
{
Log("Deserialized successfully.");
}
else
{
Debug.LogError("Data is not a valid ARWorldMap.");
yield break;
}
var file = File.Open(path, FileMode.Create);
var writer = new BinaryWriter(file);
writer.Write(data.ToArray());
writer.Close();
data.Dispose();
worldMap.Dispose();
Log(string.Format("ARWorldMap written to {0}", path));
}
#endif
Log("Apply ARWorldMap to current session.");
sessionSubsystem.ApplyWorldMap(worldMap);
}
string path
{
get
void SaveAndDisposeWorldMap(ARWorldMap worldMap)
return Path.Combine(Application.persistentDataPath, "my_session.worldmap");
Log("Serializing ARWorldMap to byte array...");
var data = worldMap.Serialize(Allocator.Temp);
Log(string.Format("ARWorldMap has {0} bytes.", data.Length));
var file = File.Open(path, FileMode.Create);
var writer = new BinaryWriter(file);
writer.Write(data.ToArray());
writer.Close();
data.Dispose();
worldMap.Dispose();
Log(string.Format("ARWorldMap written to {0}", path));
}
#endif
bool supported
{
get
string path
#if UNITY_IOS
return m_ARSession.subsystem is ARKitSessionSubsystem && ARKitSessionSubsystem.worldMapSupported;
#else
return false;
#endif
get
{
return Path.Combine(Application.persistentDataPath, "my_session.worldmap");
}
}
void Awake()
{
m_LogMessages = new List<string>();
}
bool supported
{
get
{
#if UNITY_IOS
return m_ARSession.subsystem is ARKitSessionSubsystem && ARKitSessionSubsystem.worldMapSupported;
#else
return false;
#endif
}
}
void Log(string logMessage)
{
m_LogMessages.Add(logMessage);
}
static void SetActive(Button button, bool active)
{
if (button != null)
button.gameObject.SetActive(active);
}
static void SetActive(Text text, bool active)
{
if (text != null)
text.gameObject.SetActive(active);
}
void Awake()
{
m_LogMessages = new List<string>();
}
static void SetText(Text text, string value)
{
if (text != null)
text.text = value;
}
void Log(string logMessage)
{
m_LogMessages.Add(logMessage);
}
void Update()
{
if (supported)
static void SetActive(Button button, bool active)
SetActive(errorText, false);
SetActive(saveButton, true);
SetActive(loadButton, true);
SetActive(mappingStatusText, true);
if (button != null)
button.gameObject.SetActive(active);
else
static void SetActive(Text text, bool active)
SetActive(errorText, true);
SetActive(saveButton, false);
SetActive(loadButton, false);
SetActive(mappingStatusText, false);
if (text != null)
text.gameObject.SetActive(active);
#if UNITY_IOS
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
#else
XRSessionSubsystem sessionSubsystem = null;
#endif
if (sessionSubsystem == null)
return;
static void SetText(Text text, string value)
{
if (text != null)
text.text = value;
}
var numLogsToShow = 20;
string msg = "";
for (int i = Mathf.Max(0, m_LogMessages.Count - numLogsToShow); i < m_LogMessages.Count; ++i)
void Update()
msg += m_LogMessages[i];
msg += "\n";
if (supported)
{
SetActive(errorText, false);
SetActive(saveButton, true);
SetActive(loadButton, true);
SetActive(mappingStatusText, true);
}
else
{
SetActive(errorText, true);
SetActive(saveButton, false);
SetActive(loadButton, false);
SetActive(mappingStatusText, false);
}
#if UNITY_IOS
var sessionSubsystem = (ARKitSessionSubsystem)m_ARSession.subsystem;
#else
XRSessionSubsystem sessionSubsystem = null;
#endif
if (sessionSubsystem == null)
return;
var numLogsToShow = 20;
string msg = "";
for (int i = Mathf.Max(0, m_LogMessages.Count - numLogsToShow); i < m_LogMessages.Count; ++i)
{
msg += m_LogMessages[i];
msg += "\n";
}
SetText(logText, msg);
#if UNITY_IOS
SetText(mappingStatusText, string.Format("Mapping Status: {0}", sessionSubsystem.worldMappingStatus));
#endif
SetText(logText, msg);
#if UNITY_IOS
SetText(mappingStatusText, string.Format("Mapping Status: {0}", sessionSubsystem.worldMappingStatus));
#endif
List<string> m_LogMessages;
List<string> m_LogMessages;
}
}

80
Assets/Scripts/AnchorCreator.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
[RequireComponent(typeof(ARAnchorManager))]
[RequireComponent(typeof(ARRaycastManager))]
public class AnchorCreator : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
public void RemoveAllAnchors()
[RequireComponent(typeof(ARAnchorManager))]
[RequireComponent(typeof(ARRaycastManager))]
public class AnchorCreator : MonoBehaviour
foreach (var anchor in m_Anchors)
public void RemoveAllAnchors()
m_AnchorManager.RemoveAnchor(anchor);
foreach (var anchor in m_Anchors)
{
m_AnchorManager.RemoveAnchor(anchor);
}
m_Anchors.Clear();
m_Anchors.Clear();
}
void Awake()
{
m_RaycastManager = GetComponent<ARRaycastManager>();
m_AnchorManager = GetComponent<ARAnchorManager>();
m_Anchors = new List<ARAnchor>();
}
void Awake()
{
m_RaycastManager = GetComponent<ARRaycastManager>();
m_AnchorManager = GetComponent<ARAnchorManager>();
m_Anchors = new List<ARAnchor>();
}
void Update()
{
if (Input.touchCount == 0)
return;
void Update()
{
if (Input.touchCount == 0)
return;
var touch = Input.GetTouch(0);
if (touch.phase != TouchPhase.Began)
return;
var touch = Input.GetTouch(0);
if (touch.phase != TouchPhase.Began)
return;
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.FeaturePoint))
{
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
var anchor = m_AnchorManager.AddAnchor(hitPose);
if (anchor == null)
{
Logger.Log("Error creating anchor");
}
else
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.FeaturePoint))
m_Anchors.Add(anchor);
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
var anchor = m_AnchorManager.AddAnchor(hitPose);
if (anchor == null)
{
Logger.Log("Error creating anchor");
}
else
{
m_Anchors.Add(anchor);
}
}
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
List<ARAnchor> m_Anchors;
List<ARAnchor> m_Anchors;
ARRaycastManager m_RaycastManager;
ARRaycastManager m_RaycastManager;
ARAnchorManager m_AnchorManager;
ARAnchorManager m_AnchorManager;
}
}

319
Assets/Scripts/BoneController.cs


using System.Collections.Generic;
using System;
public class BoneController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
// 3D joint skeleton
enum JointIndices
{
Invalid = -1,
Root = 0, // parent: <none> [-1]
Hips = 1, // parent: Root [0]
LeftUpLeg = 2, // parent: Hips [1]
LeftLeg = 3, // parent: LeftUpLeg [2]
LeftFoot = 4, // parent: LeftLeg [3]
LeftToes = 5, // parent: LeftFoot [4]
LeftToesEnd = 6, // parent: LeftToes [5]
RightUpLeg = 7, // parent: Hips [1]
RightLeg = 8, // parent: RightUpLeg [7]
RightFoot = 9, // parent: RightLeg [8]
RightToes = 10, // parent: RightFoot [9]
RightToesEnd = 11, // parent: RightToes [10]
Spine1 = 12, // parent: Hips [1]
Spine2 = 13, // parent: Spine1 [12]
Spine3 = 14, // parent: Spine2 [13]
Spine4 = 15, // parent: Spine3 [14]
Spine5 = 16, // parent: Spine4 [15]
Spine6 = 17, // parent: Spine5 [16]
Spine7 = 18, // parent: Spine6 [17]
LeftShoulder1 = 19, // parent: Spine7 [18]
LeftArm = 20, // parent: LeftShoulder1 [19]
LeftForearm = 21, // parent: LeftArm [20]
LeftHand = 22, // parent: LeftForearm [21]
LeftHandIndexStart = 23, // parent: LeftHand [22]
LeftHandIndex1 = 24, // parent: LeftHandIndexStart [23]
LeftHandIndex2 = 25, // parent: LeftHandIndex1 [24]
LeftHandIndex3 = 26, // parent: LeftHandIndex2 [25]
LeftHandIndexEnd = 27, // parent: LeftHandIndex3 [26]
LeftHandMidStart = 28, // parent: LeftHand [22]
LeftHandMid1 = 29, // parent: LeftHandMidStart [28]
LeftHandMid2 = 30, // parent: LeftHandMid1 [29]
LeftHandMid3 = 31, // parent: LeftHandMid2 [30]
LeftHandMidEnd = 32, // parent: LeftHandMid3 [31]
LeftHandPinkyStart = 33, // parent: LeftHand [22]
LeftHandPinky1 = 34, // parent: LeftHandPinkyStart [33]
LeftHandPinky2 = 35, // parent: LeftHandPinky1 [34]
LeftHandPinky3 = 36, // parent: LeftHandPinky2 [35]
LeftHandPinkyEnd = 37, // parent: LeftHandPinky3 [36]
LeftHandRingStart = 38, // parent: LeftHand [22]
LeftHandRing1 = 39, // parent: LeftHandRingStart [38]
LeftHandRing2 = 40, // parent: LeftHandRing1 [39]
LeftHandRing3 = 41, // parent: LeftHandRing2 [40]
LeftHandRingEnd = 42, // parent: LeftHandRing3 [41]
LeftHandThumbStart = 43, // parent: LeftHand [22]
LeftHandThumb1 = 44, // parent: LeftHandThumbStart [43]
LeftHandThumb2 = 45, // parent: LeftHandThumb1 [44]
LeftHandThumbEnd = 46, // parent: LeftHandThumb2 [45]
Neck1 = 47, // parent: Spine7 [18]
Neck2 = 48, // parent: Neck1 [47]
Neck3 = 49, // parent: Neck2 [48]
Neck4 = 50, // parent: Neck3 [49]
Head = 51, // parent: Neck4 [50]
Jaw = 52, // parent: Head [51]
Chin = 53, // parent: Jaw [52]
LeftEye = 54, // parent: Head [51]
LeftEyeLowerLid = 55, // parent: LeftEye [54]
LeftEyeUpperLid = 56, // parent: LeftEye [54]
LeftEyeball = 57, // parent: LeftEye [54]
Nose = 58, // parent: Head [51]
RightEye = 59, // parent: Head [51]
RightEyeLowerLid = 60, // parent: RightEye [59]
RightEyeUpperLid = 61, // parent: RightEye [59]
RightEyeball = 62, // parent: RightEye [59]
RightShoulder1 = 63, // parent: Spine7 [18]
RightArm = 64, // parent: RightShoulder1 [63]
RightForearm = 65, // parent: RightArm [64]
RightHand = 66, // parent: RightForearm [65]
RightHandIndexStart = 67, // parent: RightHand [66]
RightHandIndex1 = 68, // parent: RightHandIndexStart [67]
RightHandIndex2 = 69, // parent: RightHandIndex1 [68]
RightHandIndex3 = 70, // parent: RightHandIndex2 [69]
RightHandIndexEnd = 71, // parent: RightHandIndex3 [70]
RightHandMidStart = 72, // parent: RightHand [66]
RightHandMid1 = 73, // parent: RightHandMidStart [72]
RightHandMid2 = 74, // parent: RightHandMid1 [73]
RightHandMid3 = 75, // parent: RightHandMid2 [74]
RightHandMidEnd = 76, // parent: RightHandMid3 [75]
RightHandPinkyStart = 77, // parent: RightHand [66]
RightHandPinky1 = 78, // parent: RightHandPinkyStart [77]
RightHandPinky2 = 79, // parent: RightHandPinky1 [78]
RightHandPinky3 = 80, // parent: RightHandPinky2 [79]
RightHandPinkyEnd = 81, // parent: RightHandPinky3 [80]
RightHandRingStart = 82, // parent: RightHand [66]
RightHandRing1 = 83, // parent: RightHandRingStart [82]
RightHandRing2 = 84, // parent: RightHandRing1 [83]
RightHandRing3 = 85, // parent: RightHandRing2 [84]
RightHandRingEnd = 86, // parent: RightHandRing3 [85]
RightHandThumbStart = 87, // parent: RightHand [66]
RightHandThumb1 = 88, // parent: RightHandThumbStart [87]
RightHandThumb2 = 89, // parent: RightHandThumb1 [88]
RightHandThumbEnd = 90, // parent: RightHandThumb2 [89]
}
const int k_NumSkeletonJoints = 91;
[SerializeField]
[Tooltip("The root bone of the skeleton.")]
Transform m_SkeletonRoot;
/// <summary>
/// Get/Set the root bone of the skeleton.
/// </summary>
public Transform skeletonRoot
public class BoneController : MonoBehaviour
get
// 3D joint skeleton
enum JointIndices
return m_SkeletonRoot;
}
set
{
m_SkeletonRoot = value;
Invalid = -1,
Root = 0, // parent: <none> [-1]
Hips = 1, // parent: Root [0]
LeftUpLeg = 2, // parent: Hips [1]
LeftLeg = 3, // parent: LeftUpLeg [2]
LeftFoot = 4, // parent: LeftLeg [3]
LeftToes = 5, // parent: LeftFoot [4]
LeftToesEnd = 6, // parent: LeftToes [5]
RightUpLeg = 7, // parent: Hips [1]
RightLeg = 8, // parent: RightUpLeg [7]
RightFoot = 9, // parent: RightLeg [8]
RightToes = 10, // parent: RightFoot [9]
RightToesEnd = 11, // parent: RightToes [10]
Spine1 = 12, // parent: Hips [1]
Spine2 = 13, // parent: Spine1 [12]
Spine3 = 14, // parent: Spine2 [13]
Spine4 = 15, // parent: Spine3 [14]
Spine5 = 16, // parent: Spine4 [15]
Spine6 = 17, // parent: Spine5 [16]
Spine7 = 18, // parent: Spine6 [17]
LeftShoulder1 = 19, // parent: Spine7 [18]
LeftArm = 20, // parent: LeftShoulder1 [19]
LeftForearm = 21, // parent: LeftArm [20]
LeftHand = 22, // parent: LeftForearm [21]
LeftHandIndexStart = 23, // parent: LeftHand [22]
LeftHandIndex1 = 24, // parent: LeftHandIndexStart [23]
LeftHandIndex2 = 25, // parent: LeftHandIndex1 [24]
LeftHandIndex3 = 26, // parent: LeftHandIndex2 [25]
LeftHandIndexEnd = 27, // parent: LeftHandIndex3 [26]
LeftHandMidStart = 28, // parent: LeftHand [22]
LeftHandMid1 = 29, // parent: LeftHandMidStart [28]
LeftHandMid2 = 30, // parent: LeftHandMid1 [29]
LeftHandMid3 = 31, // parent: LeftHandMid2 [30]
LeftHandMidEnd = 32, // parent: LeftHandMid3 [31]
LeftHandPinkyStart = 33, // parent: LeftHand [22]
LeftHandPinky1 = 34, // parent: LeftHandPinkyStart [33]
LeftHandPinky2 = 35, // parent: LeftHandPinky1 [34]
LeftHandPinky3 = 36, // parent: LeftHandPinky2 [35]
LeftHandPinkyEnd = 37, // parent: LeftHandPinky3 [36]
LeftHandRingStart = 38, // parent: LeftHand [22]
LeftHandRing1 = 39, // parent: LeftHandRingStart [38]
LeftHandRing2 = 40, // parent: LeftHandRing1 [39]
LeftHandRing3 = 41, // parent: LeftHandRing2 [40]
LeftHandRingEnd = 42, // parent: LeftHandRing3 [41]
LeftHandThumbStart = 43, // parent: LeftHand [22]
LeftHandThumb1 = 44, // parent: LeftHandThumbStart [43]
LeftHandThumb2 = 45, // parent: LeftHandThumb1 [44]
LeftHandThumbEnd = 46, // parent: LeftHandThumb2 [45]
Neck1 = 47, // parent: Spine7 [18]
Neck2 = 48, // parent: Neck1 [47]
Neck3 = 49, // parent: Neck2 [48]
Neck4 = 50, // parent: Neck3 [49]
Head = 51, // parent: Neck4 [50]
Jaw = 52, // parent: Head [51]
Chin = 53, // parent: Jaw [52]
LeftEye = 54, // parent: Head [51]
LeftEyeLowerLid = 55, // parent: LeftEye [54]
LeftEyeUpperLid = 56, // parent: LeftEye [54]
LeftEyeball = 57, // parent: LeftEye [54]
Nose = 58, // parent: Head [51]
RightEye = 59, // parent: Head [51]
RightEyeLowerLid = 60, // parent: RightEye [59]
RightEyeUpperLid = 61, // parent: RightEye [59]
RightEyeball = 62, // parent: RightEye [59]
RightShoulder1 = 63, // parent: Spine7 [18]
RightArm = 64, // parent: RightShoulder1 [63]
RightForearm = 65, // parent: RightArm [64]
RightHand = 66, // parent: RightForearm [65]
RightHandIndexStart = 67, // parent: RightHand [66]
RightHandIndex1 = 68, // parent: RightHandIndexStart [67]
RightHandIndex2 = 69, // parent: RightHandIndex1 [68]
RightHandIndex3 = 70, // parent: RightHandIndex2 [69]
RightHandIndexEnd = 71, // parent: RightHandIndex3 [70]
RightHandMidStart = 72, // parent: RightHand [66]
RightHandMid1 = 73, // parent: RightHandMidStart [72]
RightHandMid2 = 74, // parent: RightHandMid1 [73]
RightHandMid3 = 75, // parent: RightHandMid2 [74]
RightHandMidEnd = 76, // parent: RightHandMid3 [75]
RightHandPinkyStart = 77, // parent: RightHand [66]
RightHandPinky1 = 78, // parent: RightHandPinkyStart [77]
RightHandPinky2 = 79, // parent: RightHandPinky1 [78]
RightHandPinky3 = 80, // parent: RightHandPinky2 [79]
RightHandPinkyEnd = 81, // parent: RightHandPinky3 [80]
RightHandRingStart = 82, // parent: RightHand [66]
RightHandRing1 = 83, // parent: RightHandRingStart [82]
RightHandRing2 = 84, // parent: RightHandRing1 [83]
RightHandRing3 = 85, // parent: RightHandRing2 [84]
RightHandRingEnd = 86, // parent: RightHandRing3 [85]
RightHandThumbStart = 87, // parent: RightHand [66]
RightHandThumb1 = 88, // parent: RightHandThumbStart [87]
RightHandThumb2 = 89, // parent: RightHandThumb1 [88]
RightHandThumbEnd = 90, // parent: RightHandThumb2 [89]
}
const int k_NumSkeletonJoints = 91;
Transform[] m_BoneMapping = new Transform[k_NumSkeletonJoints];
[SerializeField]
[Tooltip("The root bone of the skeleton.")]
Transform m_SkeletonRoot;
public void InitializeSkeletonJoints()
{
// Walk through all the child joints in the skeleton and
// store the skeleton joints at the corresponding index in the m_BoneMapping array.
// This assumes that the bones in the skeleton are named as per the
// JointIndices enum above.
Queue<Transform> nodes = new Queue<Transform>();
nodes.Enqueue(m_SkeletonRoot);
while (nodes.Count > 0)
/// <summary>
/// Get/Set the root bone of the skeleton.
/// </summary>
public Transform skeletonRoot
Transform next = nodes.Dequeue();
for (int i = 0; i < next.childCount; ++i)
get
nodes.Enqueue(next.GetChild(i));
return m_SkeletonRoot;
ProcessJoint(next);
set
{
m_SkeletonRoot = value;
}
}
public void ApplyBodyPose(ARHumanBody body)
{
var joints = body.joints;
if (!joints.IsCreated)
return;
Transform[] m_BoneMapping = new Transform[k_NumSkeletonJoints];
for (int i = 0; i < k_NumSkeletonJoints; ++i)
public void InitializeSkeletonJoints()
XRHumanBodyJoint joint = joints[i];
var bone = m_BoneMapping[i];
if (bone != null)
// Walk through all the child joints in the skeleton and
// store the skeleton joints at the corresponding index in the m_BoneMapping array.
// This assumes that the bones in the skeleton are named as per the
// JointIndices enum above.
Queue<Transform> nodes = new Queue<Transform>();
nodes.Enqueue(m_SkeletonRoot);
while (nodes.Count > 0)
bone.transform.localPosition = joint.localPose.position;
bone.transform.localRotation = joint.localPose.rotation;
Transform next = nodes.Dequeue();
for (int i = 0; i < next.childCount; ++i)
{
nodes.Enqueue(next.GetChild(i));
}
ProcessJoint(next);
}
void ProcessJoint(Transform joint)
{
int index = GetJointIndex(joint.name);
if (index >= 0 && index < k_NumSkeletonJoints)
public void ApplyBodyPose(ARHumanBody body)
m_BoneMapping[index] = joint;
var joints = body.joints;
if (!joints.IsCreated)
return;
for (int i = 0; i < k_NumSkeletonJoints; ++i)
{
XRHumanBodyJoint joint = joints[i];
var bone = m_BoneMapping[i];
if (bone != null)
{
bone.transform.localPosition = joint.localPose.position;
bone.transform.localRotation = joint.localPose.rotation;
}
}
else
void ProcessJoint(Transform joint)
Debug.LogWarning($"{joint.name} was not found.");
int index = GetJointIndex(joint.name);
if (index >= 0 && index < k_NumSkeletonJoints)
{
m_BoneMapping[index] = joint;
}
else
{
Debug.LogWarning($"{joint.name} was not found.");
}
}
// Returns the integer value corresponding to the JointIndices enum value
// passed in as a string.
int GetJointIndex(string jointName)
{
JointIndices val;
if (Enum.TryParse(jointName, out val))
// Returns the integer value corresponding to the JointIndices enum value
// passed in as a string.
int GetJointIndex(string jointName)
return (int)val;
JointIndices val;
if (Enum.TryParse(jointName, out val))
{
return (int)val;
}
return -1;
return -1;
}
}

167
Assets/Scripts/CameraConfigController.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Populates a drop down UI element with all the supported
/// camera configurations and changes the active camera
/// configuration when the user changes the selection in the dropdown.
///
/// The camera configuration affects the resolution (and possibly framerate)
/// of the hardware camera during an AR session.
/// </summary>
[RequireComponent(typeof(Dropdown))]
public class CameraConfigController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
List<string> m_ConfigurationNames;
Dropdown m_Dropdown;
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// <summary>
/// Populates a drop down UI element with all the supported
/// camera configurations and changes the active camera
/// configuration when the user changes the selection in the dropdown.
///
/// The camera configuration affects the resolution (and possibly framerate)
/// of the hardware camera during an AR session.
public ARCameraManager cameraManager
[RequireComponent(typeof(Dropdown))]
public class CameraConfigController : MonoBehaviour
get { return m_CameraManager; }
set { m_CameraManager = value; }
}
List<string> m_ConfigurationNames;
/// <summary>
/// Callback invoked when <see cref="m_Dropdown"/> changes. This
/// lets us change the camera configuration when the user changes
/// the selection in the UI.
/// </summary>
/// <param name="dropdown">The <c>Dropdown</c> which changed.</param>
void OnDropdownValueChanged(Dropdown dropdown)
{
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
Dropdown m_Dropdown;
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
return;
get { return m_CameraManager; }
set { m_CameraManager = value; }
var configurationIndex = dropdown.value;
// Check that the value makes sense
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
/// <summary>
/// Callback invoked when <see cref="m_Dropdown"/> changes. This
/// lets us change the camera configuration when the user changes
/// the selection in the UI.
/// </summary>
/// <param name="dropdown">The <c>Dropdown</c> which changed.</param>
void OnDropdownValueChanged(Dropdown dropdown)
if (configurationIndex >= configurations.Length)
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
// Get that configuration by index
var configuration = configurations[configurationIndex];
var configurationIndex = dropdown.value;
// Check that the value makes sense
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
{
if (configurationIndex >= configurations.Length)
{
return;
}
// Get that configuration by index
var configuration = configurations[configurationIndex];
// Make it the active one
cameraManager.currentConfiguration = configuration;
// Make it the active one
cameraManager.currentConfiguration = configuration;
}
}
void Awake()
{
m_Dropdown = GetComponent<Dropdown>();
m_Dropdown.ClearOptions();
m_Dropdown.onValueChanged.AddListener(delegate { OnDropdownValueChanged(m_Dropdown); });
m_ConfigurationNames = new List<string>();
}
void PopulateDropdown()
{
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
return;
void Awake()
{
m_Dropdown = GetComponent<Dropdown>();
m_Dropdown.ClearOptions();
m_Dropdown.onValueChanged.AddListener(delegate { OnDropdownValueChanged(m_Dropdown); });
m_ConfigurationNames = new List<string>();
}
// No configurations available probably means this feature
// isn't supported by the current device.
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
void PopulateDropdown()
if (!configurations.IsCreated || (configurations.Length <= 0))
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
return;
// No configurations available probably means this feature
// isn't supported by the current device.
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
return;
}
if (!configurations.IsCreated || (configurations.Length <= 0))
{
return;
}
// There are two ways to enumerate the camera configurations.
// There are two ways to enumerate the camera configurations.
// 1. Use a foreach to iterate over all the available configurations
foreach (var config in configurations)
{
m_ConfigurationNames.Add(config.ToString());
}
m_Dropdown.AddOptions(m_ConfigurationNames);
// 1. Use a foreach to iterate over all the available configurations
foreach (var config in configurations)
{
m_ConfigurationNames.Add(config.ToString());
}
m_Dropdown.AddOptions(m_ConfigurationNames);
// 2. Use a normal for...loop
var currentConfig = cameraManager.currentConfiguration;
for (int i = 0; i < configurations.Length; ++i)
{
// Find the current configuration and update the drop down value
if (currentConfig == configurations[i])
// 2. Use a normal for...loop
var currentConfig = cameraManager.currentConfiguration;
for (int i = 0; i < configurations.Length; ++i)
m_Dropdown.value = i;
// Find the current configuration and update the drop down value
if (currentConfig == configurations[i])
{
m_Dropdown.value = i;
}
}
void Update()
{
if (m_ConfigurationNames.Count == 0)
PopulateDropdown();
void Update()
{
if (m_ConfigurationNames.Count == 0)
PopulateDropdown();
}
}
}

395
Assets/Scripts/CpuImageSample.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
/// </summary>
public class TestCameraImage : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// Get or set the <c>ARCameraManager</c>.
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
public ARCameraManager cameraManager
public class CpuImageSample : MonoBehaviour
get => m_CameraManager;
set => m_CameraManager = value;
}
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
[SerializeField]
RawImage m_RawCameraImage;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get => m_CameraManager;
set => m_CameraManager = value;
}
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawCameraImage
{
get => m_RawCameraImage;
set => m_RawCameraImage = value;
}
[SerializeField]
RawImage m_RawCameraImage;
[SerializeField]
[Tooltip("The AROcclusionManager which will produce human depth and stencil textures.")]
AROcclusionManager m_OcclusionManager;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawCameraImage
{
get => m_RawCameraImage;
set => m_RawCameraImage = value;
}
public AROcclusionManager occlusionManager
{
get => m_OcclusionManager;
set => m_OcclusionManager = value;
}
[SerializeField]
[Tooltip("The AROcclusionManager which will produce human depth and stencil textures.")]
AROcclusionManager m_OcclusionManager;
[SerializeField]
RawImage m_RawHumanDepthImage;
public AROcclusionManager occlusionManager
{
get => m_OcclusionManager;
set => m_OcclusionManager = value;
}
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawHumanDepthImage
{
get => m_RawHumanDepthImage;
set => m_RawHumanDepthImage = value;
}
[SerializeField]
RawImage m_RawHumanDepthImage;
[SerializeField]
RawImage m_RawHumanStencilImage;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawHumanDepthImage
{
get => m_RawHumanDepthImage;
set => m_RawHumanDepthImage = value;
}
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawHumanStencilImage
{
get => m_RawHumanStencilImage;
set => m_RawHumanStencilImage = value;
}
[SerializeField]
RawImage m_RawHumanStencilImage;
[SerializeField]
Text m_ImageInfo;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawHumanStencilImage
{
get => m_RawHumanStencilImage;
set => m_RawHumanStencilImage = value;
}
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get => m_ImageInfo;
set => m_ImageInfo = value;
}
[SerializeField]
Text m_ImageInfo;
void OnEnable()
{
if (m_CameraManager != null)
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
m_CameraManager.frameReceived += OnCameraFrameReceived;
get => m_ImageInfo;
set => m_ImageInfo = value;
}
void OnDisable()
{
if (m_CameraManager != null)
void OnEnable()
m_CameraManager.frameReceived -= OnCameraFrameReceived;
if (m_CameraManager != null)
{
m_CameraManager.frameReceived += OnCameraFrameReceived;
}
}
unsafe void UpdateCameraImage()
{
// Attempt to get the latest camera image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image))
void OnDisable()
return;
if (m_CameraManager != null)
{
m_CameraManager.frameReceived -= OnCameraFrameReceived;
}
// Display some information about the camera image
m_ImageInfo.text = string.Format(
"Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
image.width, image.height, image.planeCount, image.timestamp, image.format);
unsafe void UpdateCameraImage()
{
// Attempt to get the latest camera image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image))
{
return;
}
// Once we have a valid XRCpuImage, we can access the individual image "planes"
// (the separate channels in the image). XRCpuImage.GetPlane provides
// low-overhead access to this data. This could then be passed to a
// computer vision algorithm. Here, we will convert the camera image
// to an RGBA texture and draw it on the screen.
// Display some information about the camera image
m_ImageInfo.text = string.Format(
"Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
image.width, image.height, image.planeCount, image.timestamp, image.format);
// Choose an RGBA format.
// See XRCpuImage.FormatSupported for a complete list of supported formats.
var format = TextureFormat.RGBA32;
// Once we have a valid XRCpuImage, we can access the individual image "planes"
// (the separate channels in the image). XRCpuImage.GetPlane provides
// low-overhead access to this data. This could then be passed to a
// computer vision algorithm. Here, we will convert the camera image
// to an RGBA texture and draw it on the screen.
if (m_CameraTexture == null || m_CameraTexture.width != image.width || m_CameraTexture.height != image.height)
{
m_CameraTexture = new Texture2D(image.width, image.height, format, false);
}
// Choose an RGBA format.
// See XRCpuImage.FormatSupported for a complete list of supported formats.
var format = TextureFormat.RGBA32;
// Convert the image to format, flipping the image across the Y axis.
// We can also get a sub rectangle, but we'll get the full image here.
var conversionParams = new XRCpuImage.ConversionParams(image, format, XRCpuImage.Transformation.MirrorY);
if (m_CameraTexture == null || m_CameraTexture.width != image.width || m_CameraTexture.height != image.height)
{
m_CameraTexture = new Texture2D(image.width, image.height, format, false);
}
// Texture2D allows us write directly to the raw texture data
// This allows us to do the conversion in-place without making any copies.
var rawTextureData = m_CameraTexture.GetRawTextureData<byte>();
try
{
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
}
finally
{
// We must dispose of the XRCpuImage after we're finished
// with it to avoid leaking native resources.
image.Dispose();
}
// Convert the image to format, flipping the image across the Y axis.
// We can also get a sub rectangle, but we'll get the full image here.
var conversionParams = new XRCpuImage.ConversionParams(image, format, XRCpuImage.Transformation.MirrorY);
// Apply the updated texture data to our texture
m_CameraTexture.Apply();
// Texture2D allows us write directly to the raw texture data
// This allows us to do the conversion in-place without making any copies.
var rawTextureData = m_CameraTexture.GetRawTextureData<byte>();
try
{
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
}
finally
{
// We must dispose of the XRCpuImage after we're finished
// with it to avoid leaking native resources.
image.Dispose();
}
// Set the RawImage's texture so we can visualize it.
m_RawCameraImage.texture = m_CameraTexture;
}
// Apply the updated texture data to our texture
m_CameraTexture.Apply();
void UpdateHumanDepthImage()
{
if (m_RawHumanDepthImage == null)
return;
// Set the RawImage's texture so we can visualize it.
m_RawCameraImage.texture = m_CameraTexture;
}
// Attempt to get the latest human depth image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (occlusionManager && occlusionManager.TryAcquireHumanDepthCpuImage(out XRCpuImage image))
void UpdateHumanDepthImage()
using (image)
if (m_RawHumanDepthImage == null)
return;
// Attempt to get the latest human depth image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (occlusionManager && occlusionManager.TryAcquireHumanDepthCpuImage(out XRCpuImage image))
UpdateRawImage(m_RawHumanDepthImage, image);
using (image)
{
UpdateRawImage(m_RawHumanDepthImage, image);
}
}
else
{
m_RawHumanDepthImage.enabled = false;
else
void UpdateHumanStencilImage()
m_RawHumanDepthImage.enabled = false;
}
}
if (m_RawHumanStencilImage == null)
return;
void UpdateHumanStencilImage()
{
if (m_RawHumanStencilImage == null)
return;
// Attempt to get the latest human stencil image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (occlusionManager && occlusionManager.TryAcquireHumanStencilCpuImage(out XRCpuImage image))
{
using (image)
// Attempt to get the latest human stencil image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (occlusionManager && occlusionManager.TryAcquireHumanStencilCpuImage(out XRCpuImage image))
{
using (image)
{
UpdateRawImage(m_RawHumanStencilImage, image);
}
}
else
UpdateRawImage(m_RawHumanStencilImage, image);
m_RawHumanStencilImage.enabled = false;
else
static void UpdateRawImage(RawImage rawImage, XRCpuImage cpuImage)
m_RawHumanStencilImage.enabled = false;
}
}
// Get the texture associated with the UI.RawImage that we wish to display on screen.
var texture = rawImage.texture as Texture2D;
static void UpdateRawImage(RawImage rawImage, XRCpuImage cpuImage)
{
// Get the texture associated with the UI.RawImage that we wish to display on screen.
var texture = rawImage.texture as Texture2D;
// If the texture hasn't yet been created, or if its dimensions have changed, (re)create the texture.
// Note: Although texture dimensions do not normally change frame-to-frame, they can change in response to
// a change in the camera resolution (for camera images) or changes to the quality of the human depth
// and human stencil buffers.
if (texture == null || texture.width != cpuImage.width || texture.height != cpuImage.height)
{
texture = new Texture2D(cpuImage.width, cpuImage.height, cpuImage.format.AsTextureFormat(), false);
rawImage.texture = texture;
}
// If the texture hasn't yet been created, or if its dimensions have changed, (re)create the texture.
// Note: Although texture dimensions do not normally change frame-to-frame, they can change in response to
// a change in the camera resolution (for camera images) or changes to the quality of the human depth
// and human stencil buffers.
if (texture == null || texture.width != cpuImage.width || texture.height != cpuImage.height)
{
texture = new Texture2D(cpuImage.width, cpuImage.height, cpuImage.format.AsTextureFormat(), false);
rawImage.texture = texture;
}
// For display, we need to mirror about the vertical access.
var conversionParams = new XRCpuImage.ConversionParams(cpuImage, cpuImage.format.AsTextureFormat(), XRCpuImage.Transformation.MirrorY);
// For display, we need to mirror about the vertical access.
var conversionParams = new XRCpuImage.ConversionParams(cpuImage, cpuImage.format.AsTextureFormat(), XRCpuImage.Transformation.MirrorY);
// Get the Texture2D's underlying pixel buffer.
var rawTextureData = texture.GetRawTextureData<byte>();
// Get the Texture2D's underlying pixel buffer.
var rawTextureData = texture.GetRawTextureData<byte>();
// Make sure the destination buffer is large enough to hold the converted data (they should be the same size)
Debug.Assert(rawTextureData.Length == cpuImage.GetConvertedDataSize(conversionParams.outputDimensions, conversionParams.outputFormat),
"The Texture2D is not the same size as the converted data.");
// Make sure the destination buffer is large enough to hold the converted data (they should be the same size)
Debug.Assert(rawTextureData.Length == cpuImage.GetConvertedDataSize(conversionParams.outputDimensions, conversionParams.outputFormat),
"The Texture2D is not the same size as the converted data.");
// Perform the conversion.
cpuImage.Convert(conversionParams, rawTextureData);
// Perform the conversion.
cpuImage.Convert(conversionParams, rawTextureData);
// "Apply" the new pixel data to the Texture2D.
texture.Apply();
// "Apply" the new pixel data to the Texture2D.
texture.Apply();
// Make sure it's enabled.
rawImage.enabled = true;
}
// Make sure it's enabled.
rawImage.enabled = true;
}
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
UpdateCameraImage();
UpdateHumanDepthImage();
UpdateHumanStencilImage();
}
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
UpdateCameraImage();
UpdateHumanDepthImage();
UpdateHumanStencilImage();
Texture2D m_CameraTexture;
Texture2D m_CameraTexture;
}
}

79
Assets/Scripts/DisableVerticalPlanes.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// This example demonstrates disabling vertical planes as they are
/// detected and instantiated by the <c>ARPlaneManager</c>.
/// </summary>
[RequireComponent(typeof(ARPlaneManager))]
public class DisableVerticalPlanes : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Tooltip("The UI Text element used to display log messages.")]
[SerializeField]
Text m_LogText;
/// The UI Text element used to display log messages.
/// This example demonstrates disabling vertical planes as they are
/// detected and instantiated by the <c>ARPlaneManager</c>.
public Text logText
[RequireComponent(typeof(ARPlaneManager))]
public class DisableVerticalPlanes : MonoBehaviour
get { return m_LogText; }
set { m_LogText = value; }
}
[Tooltip("The UI Text element used to display log messages.")]
[SerializeField]
Text m_LogText;
void OnEnable()
{
GetComponent<ARPlaneManager>().planesChanged += OnPlaneAdded;
}
/// <summary>
/// The UI Text element used to display log messages.
/// </summary>
public Text logText
{
get { return m_LogText; }
set { m_LogText = value; }
}
void OnEnable()
{
GetComponent<ARPlaneManager>().planesChanged += OnPlaneAdded;
}
void OnDisable()
{
GetComponent<ARPlaneManager>().planesChanged -= OnPlaneAdded;
}
void OnDisable()
{
GetComponent<ARPlaneManager>().planesChanged -= OnPlaneAdded;
}
void OnPlaneAdded(ARPlanesChangedEventArgs eventArgs)
{
foreach (var plane in eventArgs.added)
DisableIfVertical(plane);
}
void OnPlaneAdded(ARPlanesChangedEventArgs eventArgs)
{
foreach (var plane in eventArgs.added)
DisableIfVertical(plane);
}
void DisableIfVertical(ARPlane plane)
{
// Check whether the plane is a vertical plane.
if (plane.alignment == PlaneAlignment.Vertical)
void DisableIfVertical(ARPlane plane)
// Disable the entire GameObject.
plane.gameObject.SetActive(false);
// Check whether the plane is a vertical plane.
if (plane.alignment == PlaneAlignment.Vertical)
{
// Disable the entire GameObject.
plane.gameObject.SetActive(false);
// Add to our log so the user knows something happened.
if (logText != null)
logText.text = string.Format("\n{0}", plane.trackableId);
// Add to our log so the user knows something happened.
if (logText != null)
logText.text = string.Format("\n{0}", plane.trackableId);
}
}
}

45
Assets/Scripts/EnvironmentProbeVisualizer.cs


using UnityEngine;
[RequireComponent(typeof(MeshRenderer))]
public class EnvironmentProbeVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ReflectionProbe m_ReflectionProbe;
public ReflectionProbe reflectionProbe
[RequireComponent(typeof(MeshRenderer))]
public class EnvironmentProbeVisualizer : MonoBehaviour
get { return m_ReflectionProbe; }
set { m_ReflectionProbe = value; }
}
[SerializeField]
ReflectionProbe m_ReflectionProbe;
void Update()
{
if (m_ReflectionProbe == null)
public ReflectionProbe reflectionProbe
GetComponent<MeshRenderer>().enabled = false;
get { return m_ReflectionProbe; }
set { m_ReflectionProbe = value; }
else
void Update()
GetComponent<MeshRenderer>().enabled = true;
if (m_ReflectionProbe == null)
{
GetComponent<MeshRenderer>().enabled = false;
}
else
{
GetComponent<MeshRenderer>().enabled = true;
transform.localPosition = m_ReflectionProbe.center;
transform.localScale = m_ReflectionProbe.size;
transform.localPosition = m_ReflectionProbe.center;
transform.localScale = m_ReflectionProbe.size;
// Unity doesn't yet support rotated reflection probes, so the visualizer
// needs to unrotate in order to display the box that will actually be used.
transform.localRotation = Quaternion.Inverse(m_ReflectionProbe.transform.rotation);
// Unity doesn't yet support rotated reflection probes, so the visualizer
// needs to unrotate in order to display the box that will actually be used.
transform.localRotation = Quaternion.Inverse(m_ReflectionProbe.transform.rotation);
}
}
}

122
Assets/Scripts/EyePoseVisualizer.cs


using UnityEngine.XR.ARKit;
#endif
/// <summary>
/// Visualizes the eye poses for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class EyePoseVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_EyePrefab;
public GameObject eyePrefab
/// <summary>
/// Visualizes the eye poses for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class EyePoseVisualizer : MonoBehaviour
get => m_EyePrefab;
set => m_EyePrefab = value;
}
[SerializeField]
GameObject m_EyePrefab;
GameObject m_LeftEyeGameObject;
GameObject m_RightEyeGameObject;
public GameObject eyePrefab
{
get => m_EyePrefab;
set => m_EyePrefab = value;
}
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
GameObject m_LeftEyeGameObject;
GameObject m_RightEyeGameObject;
void Awake()
{
m_Face = GetComponent<ARFace>();
}
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
void CreateEyeGameObjectsIfNecessary()
{
if (m_Face.leftEye != null && m_LeftEyeGameObject == null )
void Awake()
m_LeftEyeGameObject = Instantiate(m_EyePrefab, m_Face.leftEye);
m_LeftEyeGameObject.SetActive(false);
m_Face = GetComponent<ARFace>();
if (m_Face.rightEye != null && m_RightEyeGameObject == null)
void CreateEyeGameObjectsIfNecessary()
m_RightEyeGameObject = Instantiate(m_EyePrefab, m_Face.rightEye);
m_RightEyeGameObject.SetActive(false);
if (m_Face.leftEye != null && m_LeftEyeGameObject == null )
{
m_LeftEyeGameObject = Instantiate(m_EyePrefab, m_Face.leftEye);
m_LeftEyeGameObject.SetActive(false);
}
if (m_Face.rightEye != null && m_RightEyeGameObject == null)
{
m_RightEyeGameObject = Instantiate(m_EyePrefab, m_Face.rightEye);
m_RightEyeGameObject.SetActive(false);
}
}
void SetVisible(bool visible)
{
if (m_LeftEyeGameObject != null && m_RightEyeGameObject != null)
void SetVisible(bool visible)
m_LeftEyeGameObject.SetActive(visible);
m_RightEyeGameObject.SetActive(visible);
if (m_LeftEyeGameObject != null && m_RightEyeGameObject != null)
{
m_LeftEyeGameObject.SetActive(visible);
m_RightEyeGameObject.SetActive(visible);
}
}
void OnEnable()
{
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.subsystem.SubsystemDescriptor.supportsEyeTracking)
void OnEnable()
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
m_Face.updated += OnUpdated;
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.descriptor.supportsEyeTracking)
{
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
m_Face.updated += OnUpdated;
}
else
{
enabled = false;
}
else
void OnDisable()
enabled = false;
m_Face.updated -= OnUpdated;
SetVisible(false);
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
SetVisible(false);
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
}
}
}

41
Assets/Scripts/EyeTrackingUI.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
[RequireComponent(typeof(Text))]
public class EyeTrackingUI : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ARFaceManager m_Manager;
[RequireComponent(typeof(Text))]
public class EyeTrackingUI : MonoBehaviour
{
[SerializeField]
ARFaceManager m_Manager;
void OnEnable()
{
if (m_Manager == null)
void OnEnable()
m_Manager = FindObjectOfType<ARFaceManager>();
}
if (m_Manager != null && m_Manager.subsystem != null && m_Manager.subsystem.SubsystemDescriptor.supportsEyeTracking)
{
var infoGO = GetComponent<Text>();
infoGO.text = "This device supports eye tracking.";
}
else
{
var infoGO = GetComponent<Text>();
infoGO.text = "This device does not support eye tracking.";
if (m_Manager == null)
{
m_Manager = FindObjectOfType<ARFaceManager>();
}
if (m_Manager != null && m_Manager.subsystem != null && m_Manager.descriptor.supportsEyeTracking)
{
var infoGO = GetComponent<Text>();
infoGO.text = "This device supports eye tracking.";
}
else
{
var infoGO = GetComponent<Text>();
infoGO.text = "This device does not support eye tracking.";
}
}
}

57
Assets/Scripts/FaceMaterialSwitcher.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
public class FaceMaterialSwitcher : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("Materials to use for face meshes.")]
Material[] m_FaceMaterials;
/// <summary>
/// Getter/setter for the Face Materials.
/// </summary>
public Material[] faceMaterials
public class FaceMaterialSwitcher : MonoBehaviour
get { return m_FaceMaterials; }
set { m_FaceMaterials = value; }
}
[SerializeField]
[Tooltip("Materials to use for face meshes.")]
Material[] m_FaceMaterials;
static int s_CurrentMaterialIndex;
static Dictionary<TrackableId, Material> s_FaceTracker = new Dictionary<TrackableId, Material>();
void Start()
{
ARFace face = GetComponent<ARFace>();
Material mat;
if (!s_FaceTracker.TryGetValue(face.trackableId, out mat))
/// <summary>
/// Getter/setter for the Face Materials.
/// </summary>
public Material[] faceMaterials
s_FaceTracker.Add(face.trackableId, m_FaceMaterials[s_CurrentMaterialIndex]);
GetComponent<MeshRenderer>().material = m_FaceMaterials[s_CurrentMaterialIndex];
s_CurrentMaterialIndex = (s_CurrentMaterialIndex + 1) % m_FaceMaterials.Length;
get { return m_FaceMaterials; }
set { m_FaceMaterials = value; }
else
static int s_CurrentMaterialIndex;
static Dictionary<TrackableId, Material> s_FaceTracker = new Dictionary<TrackableId, Material>();
void Start()
// Assign the material that was already used for the face's unique id.
GetComponent<MeshRenderer>().material = mat;
ARFace face = GetComponent<ARFace>();
Material mat;
if (!s_FaceTracker.TryGetValue(face.trackableId, out mat))
{
s_FaceTracker.Add(face.trackableId, m_FaceMaterials[s_CurrentMaterialIndex]);
GetComponent<MeshRenderer>().material = m_FaceMaterials[s_CurrentMaterialIndex];
s_CurrentMaterialIndex = (s_CurrentMaterialIndex + 1) % m_FaceMaterials.Length;
}
else
{
// Assign the material that was already used for the face's unique id.
GetComponent<MeshRenderer>().material = mat;
}
}
}

136
Assets/Scripts/FixationPoint2DVisualizer.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Visualizes the eye gaze position in face space for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class FixationPoint2DVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_GUIFixationReticlePrefab;
public GameObject fixationReticlePrefab
/// <summary>
/// Visualizes the eye gaze position in face space for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class FixationPoint2DVisualizer : MonoBehaviour
get => m_GUIFixationReticlePrefab;
set => m_GUIFixationReticlePrefab = value;
}
GameObject m_FixationReticleGameObject;
[SerializeField]
GameObject m_GUIFixationReticlePrefab;
Canvas m_Canvas;
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
public GameObject fixationReticlePrefab
{
get => m_GUIFixationReticlePrefab;
set => m_GUIFixationReticlePrefab = value;
}
GameObject m_FixationReticleGameObject;
void Awake()
{
m_Face = GetComponent<ARFace>();
}
Canvas m_Canvas;
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
void CreateEyeGameObjectsIfNecessary()
{
var canvas = FindObjectOfType<Canvas>();
if (m_Face.fixationPoint != null && canvas != null && m_FixationReticleGameObject == null)
void Awake()
m_FixationReticleGameObject = Instantiate(m_GUIFixationReticlePrefab, canvas.transform);
m_Face = GetComponent<ARFace>();
}
void SetVisible(bool visible)
{
if (m_FixationReticleGameObject != null)
m_FixationReticleGameObject.SetActive(visible);
}
void CreateEyeGameObjectsIfNecessary()
{
var canvas = FindObjectOfType<Canvas>();
if (m_Face.fixationPoint != null && canvas != null && m_FixationReticleGameObject == null)
{
m_FixationReticleGameObject = Instantiate(m_GUIFixationReticlePrefab, canvas.transform);
}
}
void OnEnable()
{
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.subsystem.SubsystemDescriptor.supportsEyeTracking)
void SetVisible(bool visible)
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
m_Face.updated += OnUpdated;
if (m_FixationReticleGameObject != null)
m_FixationReticleGameObject.SetActive(visible);
else
void OnEnable()
enabled = false;
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.descriptor.supportsEyeTracking)
{
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
m_Face.updated += OnUpdated;
}
else
{
enabled = false;
}
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
SetVisible(false);
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
SetVisible(false);
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
UpdateScreenReticle();
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
UpdateScreenReticle();
}
void UpdateScreenReticle()
{
var mainCamera = Camera.main;
void UpdateScreenReticle()
{
var mainCamera = Camera.main;
var fixationInViewSpace = mainCamera.WorldToViewportPoint(m_Face.fixationPoint.position);
// The camera texture is mirrored so x and y must be changed to match where the fixation point is in relation to the face.
var mirrorFixationInView = new Vector3(1 - fixationInViewSpace.x, 1 - fixationInViewSpace.y, fixationInViewSpace.z);
var fixationInViewSpace = mainCamera.WorldToViewportPoint(m_Face.fixationPoint.position);
// The camera texture is mirrored so x and y must be changed to match where the fixation point is in relation to the face.
var mirrorFixationInView = new Vector3(1 - fixationInViewSpace.x, 1 - fixationInViewSpace.y, fixationInViewSpace.z);
if (m_FixationReticleGameObject != null)
{
m_FixationReticleGameObject.GetComponent<RectTransform>().anchoredPosition3D = mainCamera.ViewportToScreenPoint(mirrorFixationInView);
if (m_FixationReticleGameObject != null)
{
m_FixationReticleGameObject.GetComponent<RectTransform>().anchoredPosition3D = mainCamera.ViewportToScreenPoint(mirrorFixationInView);
}
}
}

120
Assets/Scripts/FixationPoint3DVisualizer.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Visualizes the eye gaze position in face space for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class FixationPoint3DVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_FixationRayPrefab;
public GameObject fixationRayPrefab
/// <summary>
/// Visualizes the eye gaze position in face space for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// Face space is the space where the origin is the transform of an <see cref="ARFace"/>.
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class FixationPoint3DVisualizer : MonoBehaviour
get => m_FixationRayPrefab;
set => m_FixationRayPrefab = value;
}
[SerializeField]
GameObject m_FixationRayPrefab;
GameObject m_FixationRayGameObject;
public GameObject fixationRayPrefab
{
get => m_FixationRayPrefab;
set => m_FixationRayPrefab = value;
}
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
GameObject m_FixationRayGameObject;
void Awake()
{
m_Face = GetComponent<ARFace>();
}
ARFace m_Face;
XRFaceSubsystem m_FaceSubsystem;
void CreateEyeGameObjectsIfNecessary()
{
if (m_FixationRayGameObject == null && m_Face.fixationPoint != null)
void Awake()
m_FixationRayGameObject = Instantiate(m_FixationRayPrefab, m_Face.transform);
m_FixationRayGameObject.SetActive(false);
m_Face = GetComponent<ARFace>();
}
void SetVisible(bool visible)
{
if (m_FixationRayGameObject != null)
m_FixationRayGameObject.SetActive(visible);
}
void CreateEyeGameObjectsIfNecessary()
{
if (m_FixationRayGameObject == null && m_Face.fixationPoint != null)
{
m_FixationRayGameObject = Instantiate(m_FixationRayPrefab, m_Face.transform);
m_FixationRayGameObject.SetActive(false);
}
}
void OnEnable()
{
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.subsystem.SubsystemDescriptor.supportsEyeTracking)
void SetVisible(bool visible)
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
m_Face.updated += OnUpdated;
if (m_FixationRayGameObject != null)
m_FixationRayGameObject.SetActive(visible);
else
void OnEnable()
enabled = false;
var faceManager = FindObjectOfType<ARFaceManager>();
if (faceManager != null && faceManager.subsystem != null && faceManager.descriptor.supportsEyeTracking)
{
m_FaceSubsystem = (XRFaceSubsystem)faceManager.subsystem;
m_Face.updated += OnUpdated;
}
else
{
enabled = false;
}
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
SetVisible(false);
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
SetVisible(false);
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
UpdateFixationPoint();
}
void OnUpdated(ARFaceUpdatedEventArgs eventArgs)
{
CreateEyeGameObjectsIfNecessary();
SetVisible((m_Face.trackingState == TrackingState.Tracking) && (ARSession.state > ARSessionState.Ready));
UpdateFixationPoint();
}
void UpdateFixationPoint()
{
if (m_FixationRayGameObject != null)
void UpdateFixationPoint()
m_FixationRayGameObject.transform.LookAt(m_Face.fixationPoint.position);
if (m_FixationRayGameObject != null)
{
m_FixationRayGameObject.transform.LookAt(m_Face.fixationPoint.position);
}
}
}

121
Assets/Scripts/HumanBodyTracker.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
public class HumanBodyTracker : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The Skeleton prefab to be controlled.")]
GameObject m_SkeletonPrefab;
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce body tracking events.")]
ARHumanBodyManager m_HumanBodyManager;
/// <summary>
/// Get/Set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
public class HumanBodyTracker : MonoBehaviour
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
[SerializeField]
[Tooltip("The Skeleton prefab to be controlled.")]
GameObject m_SkeletonPrefab;
/// <summary>
/// Get/Set the skeleton prefab.
/// </summary>
public GameObject skeletonPrefab
{
get { return m_SkeletonPrefab; }
set { m_SkeletonPrefab = value; }
}
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce body tracking events.")]
ARHumanBodyManager m_HumanBodyManager;
Dictionary<TrackableId, BoneController> m_SkeletonTracker = new Dictionary<TrackableId, BoneController>();
/// <summary>
/// Get/Set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
{
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
void OnEnable()
{
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged += OnHumanBodiesChanged;
}
/// <summary>
/// Get/Set the skeleton prefab.
/// </summary>
public GameObject skeletonPrefab
{
get { return m_SkeletonPrefab; }
set { m_SkeletonPrefab = value; }
}
void OnDisable()
{
if (m_HumanBodyManager != null)
m_HumanBodyManager.humanBodiesChanged -= OnHumanBodiesChanged;
}
Dictionary<TrackableId, BoneController> m_SkeletonTracker = new Dictionary<TrackableId, BoneController>();
void OnHumanBodiesChanged(ARHumanBodiesChangedEventArgs eventArgs)
{
BoneController boneController;
foreach (var humanBody in eventArgs.added)
void OnEnable()
if (!m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
{
Debug.Log($"Adding a new skeleton [{humanBody.trackableId}].");
var newSkeletonGO = Instantiate(m_SkeletonPrefab, humanBody.transform);
boneController = newSkeletonGO.GetComponent<BoneController>();
m_SkeletonTracker.Add(humanBody.trackableId, boneController);
}
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged += OnHumanBodiesChanged;
}
boneController.InitializeSkeletonJoints();
boneController.ApplyBodyPose(humanBody);
void OnDisable()
{
if (m_HumanBodyManager != null)
m_HumanBodyManager.humanBodiesChanged -= OnHumanBodiesChanged;
foreach (var humanBody in eventArgs.updated)
void OnHumanBodiesChanged(ARHumanBodiesChangedEventArgs eventArgs)
if (m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
BoneController boneController;
foreach (var humanBody in eventArgs.added)
if (!m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
{
Debug.Log($"Adding a new skeleton [{humanBody.trackableId}].");
var newSkeletonGO = Instantiate(m_SkeletonPrefab, humanBody.transform);
boneController = newSkeletonGO.GetComponent<BoneController>();
m_SkeletonTracker.Add(humanBody.trackableId, boneController);
}
boneController.InitializeSkeletonJoints();
}
foreach (var humanBody in eventArgs.removed)
{
Debug.Log($"Removing a skeleton [{humanBody.trackableId}].");
if (m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
foreach (var humanBody in eventArgs.updated)
Destroy(boneController.gameObject);
m_SkeletonTracker.Remove(humanBody.trackableId);
if (m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
{
boneController.ApplyBodyPose(humanBody);
}
}
foreach (var humanBody in eventArgs.removed)
{
Debug.Log($"Removing a skeleton [{humanBody.trackableId}].");
if (m_SkeletonTracker.TryGetValue(humanBody.trackableId, out boneController))
{
Destroy(boneController.gameObject);
m_SkeletonTracker.Remove(humanBody.trackableId);
}
}
}

265
Assets/Scripts/LightEstimation.cs


using UnityEngine.Rendering;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// A component that can be used to access the most
/// recently received light estimation information
/// for the physical environment as observed by an
/// AR device.
/// </summary>
[RequireComponent(typeof(Light))]
public class LightEstimation : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events containing light estimation information.")]
ARCameraManager m_CameraManager;
[SerializeField]
Transform m_Arrow;
public Transform arrow
{
get => m_Arrow;
set => m_Arrow = value;
}
/// Get or set the <c>ARCameraManager</c>.
/// A component that can be used to access the most
/// recently received light estimation information
/// for the physical environment as observed by an
/// AR device.
public ARCameraManager cameraManager
[RequireComponent(typeof(Light))]
public class LightEstimation : MonoBehaviour
get { return m_CameraManager; }
set
{
if (m_CameraManager == value)
return;
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events containing light estimation information.")]
ARCameraManager m_CameraManager;
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
[SerializeField]
Transform m_Arrow;
m_CameraManager = value;
if (m_CameraManager != null & enabled)
m_CameraManager.frameReceived += FrameChanged;
public Transform arrow
{
get => m_Arrow;
set => m_Arrow = value;
}
/// <summary>
/// The estimated brightness of the physical environment, if available.
/// </summary>
public float? brightness { get; private set; }
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get { return m_CameraManager; }
set
{
if (m_CameraManager == value)
return;
/// <summary>
/// The estimated color temperature of the physical environment, if available.
/// </summary>
public float? colorTemperature { get; private set; }
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
/// <summary>
/// The estimated color correction value of the physical environment, if available.
/// </summary>
public Color? colorCorrection { get; private set; }
m_CameraManager = value;
/// <summary>
/// The estimated direction of the main light of the physical environment, if available.
/// </summary>
public Vector3? mainLightDirection { get; private set; }
if (m_CameraManager != null & enabled)
m_CameraManager.frameReceived += FrameChanged;
}
}
/// <summary>
/// The estimated color of the main light of the physical environment, if available.
/// </summary>
public Color? mainLightColor { get; private set; }
/// <summary>
/// The estimated brightness of the physical environment, if available.
/// </summary>
public float? brightness { get; private set; }
/// <summary>
/// The estimated intensity in lumens of main light of the physical environment, if available.
/// </summary>
public float? mainLightIntensityLumens { get; private set; }
/// <summary>
/// The estimated color temperature of the physical environment, if available.
/// </summary>
public float? colorTemperature { get; private set; }
/// <summary>
/// The estimated spherical harmonics coefficients of the physical environment, if available.
/// </summary>
public SphericalHarmonicsL2? sphericalHarmonics { get; private set; }
/// <summary>
/// The estimated color correction value of the physical environment, if available.
/// </summary>
public Color? colorCorrection { get; private set; }
void Awake ()
{
m_Light = GetComponent<Light>();
}
/// <summary>
/// The estimated direction of the main light of the physical environment, if available.
/// </summary>
public Vector3? mainLightDirection { get; private set; }
void OnEnable()
{
if (m_CameraManager != null)
m_CameraManager.frameReceived += FrameChanged;
/// <summary>
/// The estimated color of the main light of the physical environment, if available.
/// </summary>
public Color? mainLightColor { get; private set; }
// Disable the arrow to start; enable it later if we get directional light info
if (arrow)
{
arrow.gameObject.SetActive(false);
}
Application.onBeforeRender += OnBeforeRender;
}
/// <summary>
/// The estimated intensity in lumens of main light of the physical environment, if available.
/// </summary>
public float? mainLightIntensityLumens { get; private set; }
void OnDisable()
{
Application.onBeforeRender -= OnBeforeRender;
/// <summary>
/// The estimated spherical harmonics coefficients of the physical environment, if available.
/// </summary>
public SphericalHarmonicsL2? sphericalHarmonics { get; private set; }
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
}
void OnBeforeRender()
{
if (arrow && m_CameraManager)
void Awake ()
var cameraTransform = m_CameraManager.GetComponent<Camera>().transform;
arrow.position = cameraTransform.position + cameraTransform.forward * .25f;
m_Light = GetComponent<Light>();
}
void FrameChanged(ARCameraFrameEventArgs args)
{
if (args.lightEstimation.averageBrightness.HasValue)
void OnEnable()
brightness = args.lightEstimation.averageBrightness.Value;
m_Light.intensity = brightness.Value;
}
if (m_CameraManager != null)
m_CameraManager.frameReceived += FrameChanged;
if (args.lightEstimation.averageColorTemperature.HasValue)
{
colorTemperature = args.lightEstimation.averageColorTemperature.Value;
m_Light.colorTemperature = colorTemperature.Value;
// Disable the arrow to start; enable it later if we get directional light info
if (arrow)
{
arrow.gameObject.SetActive(false);
}
Application.onBeforeRender += OnBeforeRender;
if (args.lightEstimation.colorCorrection.HasValue)
void OnDisable()
colorCorrection = args.lightEstimation.colorCorrection.Value;
m_Light.color = colorCorrection.Value;
Application.onBeforeRender -= OnBeforeRender;
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
if (args.lightEstimation.mainLightDirection.HasValue)
void OnBeforeRender()
mainLightDirection = args.lightEstimation.mainLightDirection;
m_Light.transform.rotation = Quaternion.LookRotation(mainLightDirection.Value);
if (arrow)
if (arrow && m_CameraManager)
arrow.gameObject.SetActive(true);
arrow.rotation = Quaternion.LookRotation(mainLightDirection.Value);
var cameraTransform = m_CameraManager.GetComponent<Camera>().transform;
arrow.position = cameraTransform.position + cameraTransform.forward * .25f;
else
{
arrow?.gameObject.SetActive(false);
}
if (args.lightEstimation.mainLightColor.HasValue)
void FrameChanged(ARCameraFrameEventArgs args)
mainLightColor = args.lightEstimation.mainLightColor;
m_Light.color = mainLightColor.Value;
}
if (args.lightEstimation.averageBrightness.HasValue)
{
brightness = args.lightEstimation.averageBrightness.Value;
m_Light.intensity = brightness.Value;
}
if (args.lightEstimation.averageColorTemperature.HasValue)
{
colorTemperature = args.lightEstimation.averageColorTemperature.Value;
m_Light.colorTemperature = colorTemperature.Value;
}
if (args.lightEstimation.mainLightIntensityLumens.HasValue)
{
mainLightIntensityLumens = args.lightEstimation.mainLightIntensityLumens;
m_Light.intensity = args.lightEstimation.averageMainLightBrightness.Value;
}
if (args.lightEstimation.colorCorrection.HasValue)
{
colorCorrection = args.lightEstimation.colorCorrection.Value;
m_Light.color = colorCorrection.Value;
}
if (args.lightEstimation.ambientSphericalHarmonics.HasValue)
{
sphericalHarmonics = args.lightEstimation.ambientSphericalHarmonics;
RenderSettings.ambientMode = AmbientMode.Skybox;
RenderSettings.ambientProbe = sphericalHarmonics.Value;
if (args.lightEstimation.mainLightDirection.HasValue)
{
mainLightDirection = args.lightEstimation.mainLightDirection;
m_Light.transform.rotation = Quaternion.LookRotation(mainLightDirection.Value);
if (arrow)
{
arrow.gameObject.SetActive(true);
arrow.rotation = Quaternion.LookRotation(mainLightDirection.Value);
}
}
else
{
arrow?.gameObject.SetActive(false);
}
if (args.lightEstimation.mainLightColor.HasValue)
{
mainLightColor = args.lightEstimation.mainLightColor;
m_Light.color = mainLightColor.Value;
}
if (args.lightEstimation.mainLightIntensityLumens.HasValue)
{
mainLightIntensityLumens = args.lightEstimation.mainLightIntensityLumens;
m_Light.intensity = args.lightEstimation.averageMainLightBrightness.Value;
}
if (args.lightEstimation.ambientSphericalHarmonics.HasValue)
{
sphericalHarmonics = args.lightEstimation.ambientSphericalHarmonics;
RenderSettings.ambientMode = AmbientMode.Skybox;
RenderSettings.ambientProbe = sphericalHarmonics.Value;
}
}
Light m_Light;
}
Light m_Light;
}
}

227
Assets/Scripts/LightEstimationUI.cs


using UnityEngine.Rendering;
/// <summary>
/// A simple UI controller to display light estimation information.
/// </summary>
[RequireComponent(typeof(LightEstimation))]
public class LightEstimationUI : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Tooltip("The UI Text element used to display the estimated brightness in the physical environment.")]
[SerializeField]
Text m_BrightnessText;
/// The UI Text element used to display the estimated brightness value.
/// A simple UI controller to display light estimation information.
public Text brightnessText
[RequireComponent(typeof(LightEstimation))]
public class LightEstimationUI : MonoBehaviour
get { return m_BrightnessText; }
set { m_BrightnessText = brightnessText; }
}
[Tooltip("The UI Text element used to display the estimated brightness in the physical environment.")]
[SerializeField]
Text m_BrightnessText;
[Tooltip("The UI Text element used to display the estimated color temperature in the physical environment.")]
[SerializeField]
Text m_ColorTemperatureText;
/// <summary>
/// The UI Text element used to display the estimated brightness value.
/// </summary>
public Text brightnessText
{
get { return m_BrightnessText; }
set { m_BrightnessText = brightnessText; }
}
/// <summary>
/// The UI Text element used to display the estimated color temperature in the scene.
/// </summary>
public Text colorTemperatureText
{
get { return m_ColorTemperatureText; }
set { m_ColorTemperatureText = value; }
}
[Tooltip("The UI Text element used to display the estimated color temperature in the physical environment.")]
[SerializeField]
Text m_ColorTemperatureText;
[Tooltip("The UI Text element used to display the estimated color correction value for the physical environment.")]
[SerializeField]
Text m_ColorCorrectionText;
/// <summary>
/// The UI Text element used to display the estimated color temperature in the scene.
/// </summary>
public Text colorTemperatureText
{
get { return m_ColorTemperatureText; }
set { m_ColorTemperatureText = value; }
}
[Tooltip("The UI Text element used to display the estimated direction of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightDirectionText;
public Text mainLightDirectionText
{
get => m_MainLightDirectionText;
set => m_MainLightDirectionText = value;
}
[Tooltip("The UI Text element used to display the estimated color correction value for the physical environment.")]
[SerializeField]
Text m_ColorCorrectionText;
[Tooltip("The UI Text element used to display the estimated direction of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightDirectionText;
public Text mainLightDirectionText
{
get => m_MainLightDirectionText;
set => m_MainLightDirectionText = value;
}
[Tooltip("The UI Text element used to display the estimated intensity in lumens of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightIntensityLumens;
public Text mainLightIntensityLumens
{
get => m_MainLightIntensityLumens;
set => m_MainLightIntensityLumens = value;
}
[Tooltip("The UI Text element used to display the estimated intensity in lumens of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightIntensityLumens;
public Text mainLightIntensityLumens
{
get => m_MainLightIntensityLumens;
set => m_MainLightIntensityLumens = value;
}
[Tooltip("The UI Text element used to display the estimated color of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightColor;
public Text mainLightColorText
{
get => m_MainLightColor;
set => m_MainLightColor = value;
}
[Tooltip("The UI Text element used to display the estimated color of the main light for the physical environment.")]
[SerializeField]
Text m_MainLightColor;
public Text mainLightColorText
{
get => m_MainLightColor;
set => m_MainLightColor = value;
}
[Tooltip("The UI Text element used to display the estimated spherical harmonics coefficients for the physical environment.")]
[SerializeField]
Text m_SphericalHarmonicsText;
public Text ambientSphericalHarmonicsText
{
get => m_SphericalHarmonicsText;
set => m_SphericalHarmonicsText = value;
}
StringBuilder m_SphericalHarmonicsStringBuilder = new StringBuilder("");
[Tooltip("The UI Text element used to display the estimated spherical harmonics coefficients for the physical environment.")]
[SerializeField]
Text m_SphericalHarmonicsText;
public Text ambientSphericalHarmonicsText
{
get => m_SphericalHarmonicsText;
set => m_SphericalHarmonicsText = value;
}
StringBuilder m_SphericalHarmonicsStringBuilder = new StringBuilder("");
/// <summary>
/// The UI Text element used to display the estimated color correction value for the scene.
/// </summary>
public Text colorCorrectionText
{
get { return m_ColorCorrectionText; }
set { m_ColorCorrectionText = value; }
}
/// <summary>
/// The UI Text element used to display the estimated color correction value for the scene.
/// </summary>
public Text colorCorrectionText
{
get { return m_ColorCorrectionText; }
set { m_ColorCorrectionText = value; }
}
void Awake()
{
m_LightEstimation = GetComponent<LightEstimation>();
}
void Awake()
{
m_LightEstimation = GetComponent<LightEstimation>();
}
void Update()
{
SetUIValue(m_LightEstimation.brightness, brightnessText);
SetUIValue(m_LightEstimation.colorTemperature, colorTemperatureText);
SetUIValue(m_LightEstimation.colorCorrection, colorCorrectionText);
SetUIValue(m_LightEstimation.mainLightDirection, mainLightDirectionText);
SetUIValue(m_LightEstimation.mainLightColor, mainLightColorText);
SetUIValue(m_LightEstimation.mainLightIntensityLumens, mainLightIntensityLumens);
SetSphericalHarmonicsUIValue(m_LightEstimation.sphericalHarmonics, ambientSphericalHarmonicsText);
}
void Update()
{
SetUIValue(m_LightEstimation.brightness, brightnessText);
SetUIValue(m_LightEstimation.colorTemperature, colorTemperatureText);
SetUIValue(m_LightEstimation.colorCorrection, colorCorrectionText);
SetUIValue(m_LightEstimation.mainLightDirection, mainLightDirectionText);
SetUIValue(m_LightEstimation.mainLightColor, mainLightColorText);
SetUIValue(m_LightEstimation.mainLightIntensityLumens, mainLightIntensityLumens);
SetSphericalHarmonicsUIValue(m_LightEstimation.sphericalHarmonics, ambientSphericalHarmonicsText);
}
void SetSphericalHarmonicsUIValue(SphericalHarmonicsL2? maybeAmbientSphericalHarmonics, Text text)
{
if (text != null)
void SetSphericalHarmonicsUIValue(SphericalHarmonicsL2? maybeAmbientSphericalHarmonics, Text text)
if (maybeAmbientSphericalHarmonics.HasValue)
if (text != null)
m_SphericalHarmonicsStringBuilder.Clear();
for (int i = 0; i < 3; ++i)
if (maybeAmbientSphericalHarmonics.HasValue)
if (i == 0)
m_SphericalHarmonicsStringBuilder.Append("R:[");
else if (i == 1)
m_SphericalHarmonicsStringBuilder.Append("G:[");
else
m_SphericalHarmonicsStringBuilder.Append("B:[");
for (int j = 0; j < 9; ++j)
m_SphericalHarmonicsStringBuilder.Clear();
for (int i = 0; i < 3; ++i)
m_SphericalHarmonicsStringBuilder.Append(j != 8 ? $"{maybeAmbientSphericalHarmonics.Value[i, j]}, " : $"{maybeAmbientSphericalHarmonics.Value[i, j]}]\n");
if (i == 0)
m_SphericalHarmonicsStringBuilder.Append("R:[");
else if (i == 1)
m_SphericalHarmonicsStringBuilder.Append("G:[");
else
m_SphericalHarmonicsStringBuilder.Append("B:[");
for (int j = 0; j < 9; ++j)
{
m_SphericalHarmonicsStringBuilder.Append(j != 8 ? $"{maybeAmbientSphericalHarmonics.Value[i, j]}, " : $"{maybeAmbientSphericalHarmonics.Value[i, j]}]\n");
}
text.text = m_SphericalHarmonicsStringBuilder.ToString();
text.text = m_SphericalHarmonicsStringBuilder.ToString();
}
else
{
text.text = k_UnavailableText;
else
{
text.text = k_UnavailableText;
}
}
void SetUIValue<T>(T? displayValue, Text text) where T : struct
{
if (text != null)
text.text = displayValue.HasValue ? displayValue.Value.ToString(): k_UnavailableText;
}
void SetUIValue<T>(T? displayValue, Text text) where T : struct
{
if (text != null)
text.text = displayValue.HasValue ? displayValue.Value.ToString(): k_UnavailableText;
}
const string k_UnavailableText = "Unavailable";
const string k_UnavailableText = "Unavailable";
LightEstimation m_LightEstimation;
}
LightEstimation m_LightEstimation;
}
}

99
Assets/Scripts/Logger.cs


using UnityEngine;
using UnityEngine.UI;
public class Logger : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Text m_LogText;
public Text logText
public class Logger : MonoBehaviour
get { return s_LogText; }
set
[SerializeField]
Text m_LogText;
public Text logText
m_LogText = value;
s_LogText = value;
get { return s_LogText; }
set
{
m_LogText = value;
s_LogText = value;
}
}
[SerializeField]
int m_VisibleMessageCount = 40;
public int visibleMessageCount
{
get { return s_VisibleMessageCount; }
set
[SerializeField]
int m_VisibleMessageCount = 40;
public int visibleMessageCount
m_VisibleMessageCount = value;
s_VisibleMessageCount = value;
get { return s_VisibleMessageCount; }
set
{
m_VisibleMessageCount = value;
s_VisibleMessageCount = value;
}
}
int m_LastMessageCount;
int m_LastMessageCount;
static int s_VisibleMessageCount;
static int s_VisibleMessageCount;
static Text s_LogText;
static Text s_LogText;
static List<string> s_Log = new List<string>();
static List<string> s_Log = new List<string>();
static StringBuilder s_StringBuilder = new StringBuilder();
static StringBuilder s_StringBuilder = new StringBuilder();
void Awake()
{
s_LogText = m_LogText;
s_VisibleMessageCount = m_VisibleMessageCount;
Log("Log console initialized.");
}
void Awake()
{
s_LogText = m_LogText;
s_VisibleMessageCount = m_VisibleMessageCount;
Log("Log console initialized.");
}
void Update()
{
lock (s_Log)
void Update()
if (m_LastMessageCount != s_Log.Count)
lock (s_Log)
s_StringBuilder.Clear();
var startIndex = Mathf.Max(s_Log.Count - s_VisibleMessageCount, 0);
for (int i = startIndex; i < s_Log.Count; ++i)
if (m_LastMessageCount != s_Log.Count)
s_StringBuilder.Append($"{i:000}> {s_Log[i]}\n");
s_StringBuilder.Clear();
var startIndex = Mathf.Max(s_Log.Count - s_VisibleMessageCount, 0);
for (int i = startIndex; i < s_Log.Count; ++i)
{
s_StringBuilder.Append($"{i:000}> {s_Log[i]}\n");
}
s_LogText.text = s_StringBuilder.ToString();
s_LogText.text = s_StringBuilder.ToString();
m_LastMessageCount = s_Log.Count;
m_LastMessageCount = s_Log.Count;
}
public static void Log(string message)
{
lock (s_Log)
public static void Log(string message)
if (s_Log == null)
s_Log = new List<string>();
lock (s_Log)
{
if (s_Log == null)
s_Log = new List<string>();
s_Log.Add(message);
s_Log.Add(message);
}
}
}

109
Assets/Scripts/MakeAppearOnPlane.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Moves the ARSessionOrigin in such a way that it makes the given content appear to be
/// at a given location acquired via a raycast.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARRaycastManager))]
public class MakeAppearOnPlane : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("A transform which should be made to appear to be at the touch point.")]
Transform m_Content;
/// A transform which should be made to appear to be at the touch point.
/// Moves the ARSessionOrigin in such a way that it makes the given content appear to be
/// at a given location acquired via a raycast.
public Transform content
[RequireComponent(typeof(ARSessionOrigin))]
[RequireComponent(typeof(ARRaycastManager))]
public class MakeAppearOnPlane : MonoBehaviour
get { return m_Content; }
set { m_Content = value; }
}
[SerializeField]
[Tooltip("A transform which should be made to appear to be at the touch point.")]
Transform m_Content;
/// <summary>
/// A transform which should be made to appear to be at the touch point.
/// </summary>
public Transform content
{
get { return m_Content; }
set { m_Content = value; }
}
[SerializeField]
[Tooltip("The rotation the content should appear to have.")]
Quaternion m_Rotation;
[SerializeField]
[Tooltip("The rotation the content should appear to have.")]
Quaternion m_Rotation;
/// <summary>
/// The rotation the content should appear to have.
/// </summary>
public Quaternion rotation
{
get { return m_Rotation; }
set
/// <summary>
/// The rotation the content should appear to have.
/// </summary>
public Quaternion rotation
m_Rotation = value;
if (m_SessionOrigin != null)
m_SessionOrigin.MakeContentAppearAt(content, content.transform.position, m_Rotation);
get { return m_Rotation; }
set
{
m_Rotation = value;
if (m_SessionOrigin != null)
m_SessionOrigin.MakeContentAppearAt(content, content.transform.position, m_Rotation);
}
}
void Awake()
{
m_SessionOrigin = GetComponent<ARSessionOrigin>();
m_RaycastManager = GetComponent<ARRaycastManager>();
}
void Awake()
{
m_SessionOrigin = GetComponent<ARSessionOrigin>();
m_RaycastManager = GetComponent<ARRaycastManager>();
}
void Update()
{
if (Input.touchCount == 0 || m_Content == null)
return;
void Update()
{
if (Input.touchCount == 0 || m_Content == null)
return;
var touch = Input.GetTouch(0);
var touch = Input.GetTouch(0);
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.PlaneWithinPolygon))
{
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.PlaneWithinPolygon))
{
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
// This does not move the content; instead, it moves and orients the ARSessionOrigin
// such that the content appears to be at the raycast hit position.
m_SessionOrigin.MakeContentAppearAt(content, hitPose.position, m_Rotation);
// This does not move the content; instead, it moves and orients the ARSessionOrigin
// such that the content appears to be at the raycast hit position.
m_SessionOrigin.MakeContentAppearAt(content, hitPose.position, m_Rotation);
}
}
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
ARSessionOrigin m_SessionOrigin;
ARSessionOrigin m_SessionOrigin;
ARRaycastManager m_RaycastManager;
}
ARRaycastManager m_RaycastManager;
}
}

81
Assets/Scripts/PlaceMultipleObjectsOnPlane.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
[RequireComponent(typeof(ARRaycastManager))]
public class PlaceMultipleObjectsOnPlane : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("Instantiates this prefab on a plane at the touch location.")]
GameObject m_PlacedPrefab;
/// <summary>
/// The prefab to instantiate on touch.
/// </summary>
public GameObject placedPrefab
[RequireComponent(typeof(ARRaycastManager))]
public class PlaceMultipleObjectsOnPlane : MonoBehaviour
get { return m_PlacedPrefab; }
set { m_PlacedPrefab = value; }
}
[SerializeField]
[Tooltip("Instantiates this prefab on a plane at the touch location.")]
GameObject m_PlacedPrefab;
/// <summary>
/// The object instantiated as a result of a successful raycast intersection with a plane.
/// </summary>
public GameObject spawnedObject { get; private set; }
/// <summary>
/// The prefab to instantiate on touch.
/// </summary>
public GameObject placedPrefab
{
get { return m_PlacedPrefab; }
set { m_PlacedPrefab = value; }
}
/// <summary>
/// Invoked whenever an object is placed in on a plane.
/// </summary>
public static event Action onPlacedObject;
/// <summary>
/// The object instantiated as a result of a successful raycast intersection with a plane.
/// </summary>
public GameObject spawnedObject { get; private set; }
ARRaycastManager m_RaycastManager;
/// <summary>
/// Invoked whenever an object is placed in on a plane.
/// </summary>
public static event Action onPlacedObject;
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
ARRaycastManager m_RaycastManager;
void Awake()
{
m_RaycastManager = GetComponent<ARRaycastManager>();
}
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
void Update()
{
if (Input.touchCount > 0)
void Awake()
Touch touch = Input.GetTouch(0);
m_RaycastManager = GetComponent<ARRaycastManager>();
}
if (touch.phase == TouchPhase.Began)
void Update()
{
if (Input.touchCount > 0)
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.PlaneWithinPolygon))
Touch touch = Input.GetTouch(0);
if (touch.phase == TouchPhase.Began)
Pose hitPose = s_Hits[0].pose;
if (m_RaycastManager.Raycast(touch.position, s_Hits, TrackableType.PlaneWithinPolygon))
{
Pose hitPose = s_Hits[0].pose;
spawnedObject = Instantiate(m_PlacedPrefab, hitPose.position, hitPose.rotation);
spawnedObject = Instantiate(m_PlacedPrefab, hitPose.position, hitPose.rotation);
if (onPlacedObject != null)
{
onPlacedObject();
if (onPlacedObject != null)
{
onPlacedObject();
}
}
}

125
Assets/Scripts/PlaceOnPlane.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// Listens for touch events and performs an AR raycast from the screen touch point.
/// AR raycasts will only hit detected trackables like feature points and planes.
///
/// If a raycast hits a trackable, the <see cref="placedPrefab"/> is instantiated
/// and moved to the hit position.
/// </summary>
[RequireComponent(typeof(ARRaycastManager))]
public class PlaceOnPlane : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("Instantiates this prefab on a plane at the touch location.")]
GameObject m_PlacedPrefab;
/// The prefab to instantiate on touch.
/// Listens for touch events and performs an AR raycast from the screen touch point.
/// AR raycasts will only hit detected trackables like feature points and planes.
///
/// If a raycast hits a trackable, the <see cref="placedPrefab"/> is instantiated
/// and moved to the hit position.
public GameObject placedPrefab
[RequireComponent(typeof(ARRaycastManager))]
public class PlaceOnPlane : MonoBehaviour
get { return m_PlacedPrefab; }
set { m_PlacedPrefab = value; }
}
[SerializeField]
[Tooltip("Instantiates this prefab on a plane at the touch location.")]
GameObject m_PlacedPrefab;
/// <summary>
/// The object instantiated as a result of a successful raycast intersection with a plane.
/// </summary>
public GameObject spawnedObject { get; private set; }
/// <summary>
/// The prefab to instantiate on touch.
/// </summary>
public GameObject placedPrefab
{
get { return m_PlacedPrefab; }
set { m_PlacedPrefab = value; }
}
void Awake()
{
m_RaycastManager = GetComponent<ARRaycastManager>();
}
/// <summary>
/// The object instantiated as a result of a successful raycast intersection with a plane.
/// </summary>
public GameObject spawnedObject { get; private set; }
bool TryGetTouchPosition(out Vector2 touchPosition)
{
#if UNITY_EDITOR
if (Input.GetMouseButton(0))
void Awake()
var mousePosition = Input.mousePosition;
touchPosition = new Vector2(mousePosition.x, mousePosition.y);
return true;
m_RaycastManager = GetComponent<ARRaycastManager>();
#else
if (Input.touchCount > 0)
bool TryGetTouchPosition(out Vector2 touchPosition)
touchPosition = Input.GetTouch(0).position;
return true;
#if UNITY_EDITOR
if (Input.GetMouseButton(0))
{
var mousePosition = Input.mousePosition;
touchPosition = new Vector2(mousePosition.x, mousePosition.y);
return true;
}
#else
if (Input.touchCount > 0)
{
touchPosition = Input.GetTouch(0).position;
return true;
}
#endif
touchPosition = default;
return false;
#endif
touchPosition = default;
return false;
}
void Update()
{
if (!TryGetTouchPosition(out Vector2 touchPosition))
return;
if (m_RaycastManager.Raycast(touchPosition, s_Hits, TrackableType.PlaneWithinPolygon))
void Update()
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
if (!TryGetTouchPosition(out Vector2 touchPosition))
return;
if (spawnedObject == null)
if (m_RaycastManager.Raycast(touchPosition, s_Hits, TrackableType.PlaneWithinPolygon))
spawnedObject = Instantiate(m_PlacedPrefab, hitPose.position, hitPose.rotation);
}
else
{
spawnedObject.transform.position = hitPose.position;
// Raycast hits are sorted by distance, so the first one
// will be the closest hit.
var hitPose = s_Hits[0].pose;
if (spawnedObject == null)
{
spawnedObject = Instantiate(m_PlacedPrefab, hitPose.position, hitPose.rotation);
}
else
{
spawnedObject.transform.position = hitPose.position;
}
}
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
static List<ARRaycastHit> s_Hits = new List<ARRaycastHit>();
ARRaycastManager m_RaycastManager;
}
ARRaycastManager m_RaycastManager;
}
}

99
Assets/Scripts/PlaneDetectionController.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// This example demonstrates how to toggle plane detection,
/// and also hide or show the existing planes.
/// </summary>
[RequireComponent(typeof(ARPlaneManager))]
public class PlaneDetectionController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[Tooltip("The UI Text element used to display plane detection messages.")]
[SerializeField]
Text m_TogglePlaneDetectionText;
/// The UI Text element used to display plane detection messages.
/// This example demonstrates how to toggle plane detection,
/// and also hide or show the existing planes.
public Text togglePlaneDetectionText
[RequireComponent(typeof(ARPlaneManager))]
public class PlaneDetectionController : MonoBehaviour
get { return m_TogglePlaneDetectionText; }
set { m_TogglePlaneDetectionText = value; }
}
[Tooltip("The UI Text element used to display plane detection messages.")]
[SerializeField]
Text m_TogglePlaneDetectionText;
/// <summary>
/// Toggles plane detection and the visualization of the planes.
/// </summary>
public void TogglePlaneDetection()
{
m_ARPlaneManager.enabled = !m_ARPlaneManager.enabled;
/// <summary>
/// The UI Text element used to display plane detection messages.
/// </summary>
public Text togglePlaneDetectionText
{
get { return m_TogglePlaneDetectionText; }
set { m_TogglePlaneDetectionText = value; }
}
string planeDetectionMessage = "";
if (m_ARPlaneManager.enabled)
/// <summary>
/// Toggles plane detection and the visualization of the planes.
/// </summary>
public void TogglePlaneDetection()
planeDetectionMessage = "Disable Plane Detection and Hide Existing";
SetAllPlanesActive(true);
m_ARPlaneManager.enabled = !m_ARPlaneManager.enabled;
string planeDetectionMessage = "";
if (m_ARPlaneManager.enabled)
{
planeDetectionMessage = "Disable Plane Detection and Hide Existing";
SetAllPlanesActive(true);
}
else
{
planeDetectionMessage = "Enable Plane Detection and Show Existing";
SetAllPlanesActive(false);
}
if (togglePlaneDetectionText != null)
togglePlaneDetectionText.text = planeDetectionMessage;
else
/// <summary>
/// Iterates over all the existing planes and activates
/// or deactivates their <c>GameObject</c>s'.
/// </summary>
/// <param name="value">Each planes' GameObject is SetActive with this value.</param>
void SetAllPlanesActive(bool value)
planeDetectionMessage = "Enable Plane Detection and Show Existing";
SetAllPlanesActive(false);
foreach (var plane in m_ARPlaneManager.trackables)
plane.gameObject.SetActive(value);
if (togglePlaneDetectionText != null)
togglePlaneDetectionText.text = planeDetectionMessage;
}
/// <summary>
/// Iterates over all the existing planes and activates
/// or deactivates their <c>GameObject</c>s'.
/// </summary>
/// <param name="value">Each planes' GameObject is SetActive with this value.</param>
void SetAllPlanesActive(bool value)
{
foreach (var plane in m_ARPlaneManager.trackables)
plane.gameObject.SetActive(value);
}
void Awake()
{
m_ARPlaneManager = GetComponent<ARPlaneManager>();
}
void Awake()
{
m_ARPlaneManager = GetComponent<ARPlaneManager>();
ARPlaneManager m_ARPlaneManager;
ARPlaneManager m_ARPlaneManager;
}
}

165
Assets/Scripts/RotationController.cs


using UnityEngine;
using UnityEngine.UI;
/// <summary>
/// Controls the orientation of content place by the <see cref="MakeAppearOnPlane"/>
/// component using a UI.Slider to affect the rotation about the Y axis.
/// </summary>
[RequireComponent(typeof(MakeAppearOnPlane))]
public class RotationController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
MakeAppearOnPlane m_MakeAppearOnPlane;
[SerializeField]
[Tooltip("The slider used to control rotation.")]
Slider m_Slider;
/// The slider used to control scale.
/// Controls the orientation of content place by the <see cref="MakeAppearOnPlane"/>
/// component using a UI.Slider to affect the rotation about the Y axis.
public Slider slider
[RequireComponent(typeof(MakeAppearOnPlane))]
public class RotationController : MonoBehaviour
get { return m_Slider; }
set { m_Slider = value; }
}
MakeAppearOnPlane m_MakeAppearOnPlane;
[SerializeField]
[Tooltip("The slider used to control rotation.")]
Slider m_Slider;
[SerializeField]
[Tooltip("The text used to display the current rotation on the screen.")]
Text m_Text;
/// <summary>
/// The slider used to control scale.
/// </summary>
public Slider slider
{
get { return m_Slider; }
set { m_Slider = value; }
}
/// <summary>
/// The text used to display the current rotation on the screen.
/// </summary>
public Text text
{
get { return m_Text; }
set { m_Text = value; }
}
[SerializeField]
[Tooltip("The text used to display the current rotation on the screen.")]
Text m_Text;
[SerializeField]
[Tooltip("Minimum rotation angle in degrees.")]
public float m_Min = 0f;
/// <summary>
/// The text used to display the current rotation on the screen.
/// </summary>
public Text text
{
get { return m_Text; }
set { m_Text = value; }
}
/// <summary>
/// Minimum angle in degrees.
/// </summary>
public float min
{
get { return m_Min; }
set { m_Min = value; }
}
[SerializeField]
[Tooltip("Minimum rotation angle in degrees.")]
public float m_Min = 0f;
[SerializeField]
[Tooltip("Maximum angle in degrees.")]
public float m_Max = 360f;
/// <summary>
/// Minimum angle in degrees.
/// </summary>
public float min
{
get { return m_Min; }
set { m_Min = value; }
}
/// <summary>
/// Maximum angle in degrees.
/// </summary>
public float max
{
get { return m_Max; }
set { m_Max = value; }
}
[SerializeField]
[Tooltip("Maximum angle in degrees.")]
public float m_Max = 360f;
/// <summary>
/// Invoked when the slider's value changes
/// </summary>
public void OnSliderValueChanged()
{
if (slider != null)
angle = slider.value * (max - min) + min;
}
/// <summary>
/// Maximum angle in degrees.
/// </summary>
public float max
{
get { return m_Max; }
set { m_Max = value; }
}
float angle
{
get
/// <summary>
/// Invoked when the slider's value changes
/// </summary>
public void OnSliderValueChanged()
return m_MakeAppearOnPlane.rotation.eulerAngles.y;
if (slider != null)
angle = slider.value * (max - min) + min;
set
float angle
m_MakeAppearOnPlane.rotation = Quaternion.AngleAxis(value, Vector3.up);
UpdateText();
get
{
return m_MakeAppearOnPlane.rotation.eulerAngles.y;
}
set
{
m_MakeAppearOnPlane.rotation = Quaternion.AngleAxis(value, Vector3.up);
UpdateText();
}
}
void Awake()
{
m_MakeAppearOnPlane = GetComponent<MakeAppearOnPlane>();
}
void Awake()
{
m_MakeAppearOnPlane = GetComponent<MakeAppearOnPlane>();
}
void OnEnable()
{
if (slider != null)
slider.value = (angle - min) / (max - min);
UpdateText();
}
void OnEnable()
{
if (slider != null)
slider.value = (angle - min) / (max - min);
UpdateText();
}
void UpdateText()
{
if (m_Text != null)
m_Text.text = "Rotation: " + angle + " degrees";
void UpdateText()
{
if (m_Text != null)
m_Text.text = "Rotation: " + angle + " degrees";
}
}
}

163
Assets/Scripts/ScaleController.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Sets the scale of the ARSessionOrigin according to the value of a UI.Slider.
/// </summary>
[RequireComponent(typeof(ARSessionOrigin))]
public class ScaleController : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The slider used to control the scale factor.")]
Slider m_Slider;
/// The slider used to control the scale factor.
/// Sets the scale of the ARSessionOrigin according to the value of a UI.Slider.
public Slider slider
[RequireComponent(typeof(ARSessionOrigin))]
public class ScaleController : MonoBehaviour
get { return m_Slider; }
set { m_Slider = value; }
}
[SerializeField]
[Tooltip("The slider used to control the scale factor.")]
Slider m_Slider;
[SerializeField]
[Tooltip("The text used to display the current scale factor on the screen.")]
Text m_Text;
/// <summary>
/// The slider used to control the scale factor.
/// </summary>
public Slider slider
{
get { return m_Slider; }
set { m_Slider = value; }
}
/// <summary>
/// The text used to display the current scale factor on the screen.
/// </summary>
public Text text
{
get { return m_Text; }
set { m_Text = value; }
}
[SerializeField]
[Tooltip("The text used to display the current scale factor on the screen.")]
Text m_Text;
[SerializeField]
[Tooltip("Minimum scale factor.")]
public float m_Min = .1f;
/// <summary>
/// The text used to display the current scale factor on the screen.
/// </summary>
public Text text
{
get { return m_Text; }
set { m_Text = value; }
}
[SerializeField]
[Tooltip("Minimum scale factor.")]
public float m_Min = .1f;
/// <summary>
/// Minimum scale factor.
/// </summary>
public float min
{
get { return m_Min; }
set { m_Min = value; }
}
/// <summary>
/// Minimum scale factor.
/// </summary>
public float min
{
get { return m_Min; }
set { m_Min = value; }
}
[SerializeField]
[Tooltip("Maximum scale factor.")]
public float m_Max = 10f;
[SerializeField]
[Tooltip("Maximum scale factor.")]
public float m_Max = 10f;
/// <summary>
/// Maximum scale factor.
/// </summary>
public float max
{
get { return m_Max; }
set { m_Max = value; }
}
/// <summary>
/// Maximum scale factor.
/// </summary>
public float max
{
get { return m_Max; }
set { m_Max = value; }
}
/// <summary>
/// Invoked whenever the slider's value changes
/// </summary>
public void OnSliderValueChanged()
{
if (slider != null)
scale = slider.value * (max - min) + min;
}
/// <summary>
/// Invoked whenever the slider's value changes
/// </summary>
public void OnSliderValueChanged()
{
if (slider != null)
scale = slider.value * (max - min) + min;
}
float scale
{
get
{
return m_SessionOrigin.transform.localScale.x;
}
set
{
m_SessionOrigin.transform.localScale = Vector3.one * value;
UpdateText();
}
}
float scale
{
get
void Awake()
return m_SessionOrigin.transform.localScale.x;
m_SessionOrigin = GetComponent<ARSessionOrigin>();
set
void OnEnable()
m_SessionOrigin.transform.localScale = Vector3.one * value;
if (slider != null)
slider.value = (scale - min) / (max - min);
}
void Awake()
{
m_SessionOrigin = GetComponent<ARSessionOrigin>();
}
void OnEnable()
{
if (slider != null)
slider.value = (scale - min) / (max - min);
UpdateText();
}
void UpdateText()
{
if (text != null)
text.text = "Scale: " + scale;
}
void UpdateText()
{
if (text != null)
text.text = "Scale: " + scale;
ARSessionOrigin m_SessionOrigin;
ARSessionOrigin m_SessionOrigin;
}
}

241
Assets/Scripts/ScreenSpaceJointVisualizer.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
public class ScreenSpaceJointVisualizer : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
// 2D joint skeleton
enum JointIndices
public class ScreenSpaceJointVisualizer : MonoBehaviour
Invalid = -1,
Head = 0, // parent: Neck1 [1]
Neck1 = 1, // parent: Root [16]
RightShoulder1 = 2, // parent: Neck1 [1]
RightForearm = 3, // parent: RightShoulder1 [2]
RightHand = 4, // parent: RightForearm [3]
LeftShoulder1 = 5, // parent: Neck1 [1]
LeftForearm = 6, // parent: LeftShoulder1 [5]
LeftHand = 7, // parent: LeftForearm [6]
RightUpLeg = 8, // parent: Root [16]
RightLeg = 9, // parent: RightUpLeg [8]
RightFoot = 10, // parent: RightLeg [9]
LeftUpLeg = 11, // parent: Root [16]
LeftLeg = 12, // parent: LeftUpLeg [11]
LeftFoot = 13, // parent: LeftLeg [12]
RightEye = 14, // parent: Head [0]
LeftEye = 15, // parent: Head [0]
Root = 16, // parent: <none> [-1]
}
// 2D joint skeleton
enum JointIndices
{
Invalid = -1,
Head = 0, // parent: Neck1 [1]
Neck1 = 1, // parent: Root [16]
RightShoulder1 = 2, // parent: Neck1 [1]
RightForearm = 3, // parent: RightShoulder1 [2]
RightHand = 4, // parent: RightForearm [3]
LeftShoulder1 = 5, // parent: Neck1 [1]
LeftForearm = 6, // parent: LeftShoulder1 [5]
LeftHand = 7, // parent: LeftForearm [6]
RightUpLeg = 8, // parent: Root [16]
RightLeg = 9, // parent: RightUpLeg [8]
RightFoot = 10, // parent: RightLeg [9]
LeftUpLeg = 11, // parent: Root [16]
LeftLeg = 12, // parent: LeftUpLeg [11]
LeftFoot = 13, // parent: LeftLeg [12]
RightEye = 14, // parent: Head [0]
LeftEye = 15, // parent: Head [0]
Root = 16, // parent: <none> [-1]
}
[SerializeField]
[Tooltip("The AR camera being used in the scene.")]
Camera m_ARCamera;
[SerializeField]
[Tooltip("The AR camera being used in the scene.")]
Camera m_ARCamera;
/// <summary>
/// Get or set the <c>Camera</c>.
/// </summary>
public Camera arCamera
{
get { return m_ARCamera; }
set { m_ARCamera = value; }
}
/// <summary>
/// Get or set the <c>Camera</c>.
/// </summary>
public Camera arCamera
{
get { return m_ARCamera; }
set { m_ARCamera = value; }
}
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce human body anchors.")]
ARHumanBodyManager m_HumanBodyManager;
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce human body anchors.")]
ARHumanBodyManager m_HumanBodyManager;
/// <summary>
/// Get or set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
{
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
/// <summary>
/// Get or set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
{
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
[SerializeField]
[Tooltip("A prefab that contains a LineRenderer component that will be used for rendering lines, representing the skeleton joints.")]
GameObject m_LineRendererPrefab;
[SerializeField]
[Tooltip("A prefab that contains a LineRenderer component that will be used for rendering lines, representing the skeleton joints.")]
GameObject m_LineRendererPrefab;
/// <summary>
/// Get or set the Line Renderer prefab.
/// </summary>
public GameObject lineRendererPrefab
{
get { return m_LineRendererPrefab; }
set { m_LineRendererPrefab = value; }
}
/// <summary>
/// Get or set the Line Renderer prefab.
/// </summary>
public GameObject lineRendererPrefab
{
get { return m_LineRendererPrefab; }
set { m_LineRendererPrefab = value; }
}
Dictionary<int, GameObject> m_LineRenderers;
static HashSet<int> s_JointSet = new HashSet<int>();
Dictionary<int, GameObject> m_LineRenderers;
static HashSet<int> s_JointSet = new HashSet<int>();
void Awake()
{
m_LineRenderers = new Dictionary<int, GameObject>();
}
void Awake()
{
m_LineRenderers = new Dictionary<int, GameObject>();
}
void UpdateRenderer(NativeArray<XRHumanBodyPose2DJoint> joints, int index)
{
GameObject lineRendererGO;
if (!m_LineRenderers.TryGetValue(index, out lineRendererGO))
void UpdateRenderer(NativeArray<XRHumanBodyPose2DJoint> joints, int index)
lineRendererGO = Instantiate(m_LineRendererPrefab, transform);
m_LineRenderers.Add(index, lineRendererGO);
}
GameObject lineRendererGO;
if (!m_LineRenderers.TryGetValue(index, out lineRendererGO))
{
lineRendererGO = Instantiate(m_LineRendererPrefab, transform);
m_LineRenderers.Add(index, lineRendererGO);
}
var lineRenderer = lineRendererGO.GetComponent<LineRenderer>();
var lineRenderer = lineRendererGO.GetComponent<LineRenderer>();
// Traverse hierarchy to determine the longest line set that needs to be drawn.
var positions = new NativeArray<Vector2>(joints.Length, Allocator.Temp);
try
{
var boneIndex = index;
int jointCount = 0;
while (boneIndex >= 0)
// Traverse hierarchy to determine the longest line set that needs to be drawn.
var positions = new NativeArray<Vector2>(joints.Length, Allocator.Temp);
try
var joint = joints[boneIndex];
if (joint.tracked)
var boneIndex = index;
int jointCount = 0;
while (boneIndex >= 0)
positions[jointCount++] = joint.position;
if (!s_JointSet.Add(boneIndex))
var joint = joints[boneIndex];
if (joint.tracked)
{
positions[jointCount++] = joint.position;
if (!s_JointSet.Add(boneIndex))
break;
}
else
boneIndex = joint.parentIndex;
else
break;
boneIndex = joint.parentIndex;
// Render the joints as lines on the camera's near clip plane.
lineRenderer.positionCount = jointCount;
lineRenderer.startWidth = 0.001f;
lineRenderer.endWidth = 0.001f;
for (int i = 0; i < jointCount; ++i)
{
var position = positions[i];
var worldPosition = m_ARCamera.ViewportToWorldPoint(
new Vector3(position.x, position.y, m_ARCamera.nearClipPlane));
lineRenderer.SetPosition(i, worldPosition);
}
lineRendererGO.SetActive(true);
// Render the joints as lines on the camera's near clip plane.
lineRenderer.positionCount = jointCount;
lineRenderer.startWidth = 0.001f;
lineRenderer.endWidth = 0.001f;
for (int i = 0; i < jointCount; ++i)
finally
var position = positions[i];
var worldPosition = m_ARCamera.ViewportToWorldPoint(
new Vector3(position.x, position.y, m_ARCamera.nearClipPlane));
lineRenderer.SetPosition(i, worldPosition);
positions.Dispose();
lineRendererGO.SetActive(true);
finally
{
positions.Dispose();
}
}
void Update()
{
Debug.Assert(m_HumanBodyManager != null, "Human body manager cannot be null");
var joints = m_HumanBodyManager.GetHumanBodyPose2DJoints(Allocator.Temp);
if (!joints.IsCreated)
void Update()
HideJointLines();
return;
}
Debug.Assert(m_HumanBodyManager != null, "Human body manager cannot be null");
var joints = m_HumanBodyManager.GetHumanBodyPose2DJoints(Allocator.Temp);
if (!joints.IsCreated)
{
HideJointLines();
return;
}
using (joints)
{
s_JointSet.Clear();
for (int i = joints.Length - 1; i >= 0; --i)
using (joints)
if (joints[i].parentIndex != -1)
UpdateRenderer(joints, i);
s_JointSet.Clear();
for (int i = joints.Length - 1; i >= 0; --i)
{
if (joints[i].parentIndex != -1)
UpdateRenderer(joints, i);
}
}
void HideJointLines()
{
foreach (var lineRenderer in m_LineRenderers)
void HideJointLines()
lineRenderer.Value.SetActive(false);
foreach (var lineRenderer in m_LineRenderers)
{
lineRenderer.Value.SetActive(false);
}
}
}

197
Assets/Scripts/SupportChecker.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// This example shows how to check for AR support before the ARSession is enabled.
/// For ARCore in particular, it is possible for a device to support ARCore but not
/// have it installed. This example will detect this case and prompt the user to install ARCore.
/// To test this feature yourself, use a supported device and uninstall ARCore.
/// (Settings > Search for "ARCore" and uninstall or disable it.)
/// </summary>
public class SupportChecker : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
ARSession m_Session;
public ARSession session
/// <summary>
/// This example shows how to check for AR support before the ARSession is enabled.
/// For ARCore in particular, it is possible for a device to support ARCore but not
/// have it installed. This example will detect this case and prompt the user to install ARCore.
/// To test this feature yourself, use a supported device and uninstall ARCore.
/// (Settings > Search for "ARCore" and uninstall or disable it.)
/// </summary>
public class SupportChecker : MonoBehaviour
get { return m_Session; }
set { m_Session = value; }
}
[SerializeField]
ARSession m_Session;
[SerializeField]
Text m_LogText;
public ARSession session
{
get { return m_Session; }
set { m_Session = value; }
}
public Text logText
{
get { return m_LogText; }
set { m_LogText = value; }
}
[SerializeField]
Text m_LogText;
[SerializeField]
Button m_InstallButton;
public Text logText
{
get { return m_LogText; }
set { m_LogText = value; }
}
public Button installButton
{
get { return m_InstallButton; }
set { m_InstallButton = value; }
}
[SerializeField]
Button m_InstallButton;
void Log(string message)
{
m_LogText.text += $"{message}\n";
}
public Button installButton
{
get { return m_InstallButton; }
set { m_InstallButton = value; }
}
IEnumerator CheckSupport()
{
SetInstallButtonActive(false);
void Log(string message)
{
m_LogText.text += $"{message}\n";
}
Log("Checking for AR support...");
IEnumerator CheckSupport()
{
SetInstallButtonActive(false);
yield return ARSession.CheckAvailability();
Log("Checking for AR support...");
if (ARSession.state == ARSessionState.NeedsInstall)
{
Log("Your device supports AR, but requires a software update.");
Log("Attempting install...");
yield return ARSession.Install();
}
yield return ARSession.CheckAvailability();
if (ARSession.state == ARSessionState.Ready)
{
Log("Your device supports AR!");
Log("Starting AR session...");
if (ARSession.state == ARSessionState.NeedsInstall)
{
Log("Your device supports AR, but requires a software update.");
Log("Attempting install...");
yield return ARSession.Install();
}
// To start the ARSession, we just need to enable it.
m_Session.enabled = true;
}
else
{
switch (ARSession.state)
if (ARSession.state == ARSessionState.Ready)
case ARSessionState.Unsupported:
Log("Your device does not support AR.");
break;
case ARSessionState.NeedsInstall:
Log("The software update failed, or you declined the update.");
Log("Your device supports AR!");
Log("Starting AR session...");
// In this case, we enable a button which allows the user
// to try again in the event they decline the update the first time.
SetInstallButtonActive(true);
break;
// To start the ARSession, we just need to enable it.
m_Session.enabled = true;
else
{
switch (ARSession.state)
{
case ARSessionState.Unsupported:
Log("Your device does not support AR.");
break;
case ARSessionState.NeedsInstall:
Log("The software update failed, or you declined the update.");
Log("\n[Start non-AR experience instead]");
// In this case, we enable a button which allows the user
// to try again in the event they decline the update the first time.
SetInstallButtonActive(true);
break;
}
//
// Start a non-AR fallback experience here...
//
}
}
Log("\n[Start non-AR experience instead]");
void SetInstallButtonActive(bool active)
{
if (m_InstallButton != null)
m_InstallButton.gameObject.SetActive(active);
}
//
// Start a non-AR fallback experience here...
//
}
}
IEnumerator Install()
{
SetInstallButtonActive(false);
void SetInstallButtonActive(bool active)
{
if (m_InstallButton != null)
m_InstallButton.gameObject.SetActive(active);
}
if (ARSession.state == ARSessionState.NeedsInstall)
IEnumerator Install()
Log("Attempting install...");
yield return ARSession.Install();
SetInstallButtonActive(false);
Log("The software update failed, or you declined the update.");
SetInstallButtonActive(true);
Log("Attempting install...");
yield return ARSession.Install();
if (ARSession.state == ARSessionState.NeedsInstall)
{
Log("The software update failed, or you declined the update.");
SetInstallButtonActive(true);
}
else if (ARSession.state == ARSessionState.Ready)
{
Log("Success! Starting AR session...");
m_Session.enabled = true;
}
else if (ARSession.state == ARSessionState.Ready)
else
Log("Success! Starting AR session...");
m_Session.enabled = true;
Log("Error: ARSession does not require install.");
else
public void OnInstallButtonPressed()
Log("Error: ARSession does not require install.");
StartCoroutine(Install());
}
public void OnInstallButtonPressed()
{
StartCoroutine(Install());
}
void OnEnable()
{
StartCoroutine(CheckSupport());
void OnEnable()
{
StartCoroutine(CheckSupport());
}
}
}

93
Assets/Scripts/TestBodyAnchorScale.cs


using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
public class TestBodyAnchorScale : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce frame events.")]
ARHumanBodyManager m_HumanBodyManager;
/// <summary>
/// Get or set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
public class TestBodyAnchorScale : MonoBehaviour
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
[SerializeField]
[Tooltip("The ARHumanBodyManager which will produce frame events.")]
ARHumanBodyManager m_HumanBodyManager;
/// <summary>
/// Get or set the <c>ARHumanBodyManager</c>.
/// </summary>
public ARHumanBodyManager humanBodyManager
{
get { return m_HumanBodyManager; }
set { m_HumanBodyManager = value; }
}
[SerializeField]
Text m_ImageInfo;
[SerializeField]
Text m_ImageInfo;
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
}
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
}
void OnEnable()
{
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.enabled = true;
void OnEnable()
{
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.enabled = true;
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged += OnHumanBodiesChanged;
}
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged += OnHumanBodiesChanged;
}
void OnDisable()
{
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.enabled = false;
void OnDisable()
{
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.enabled = false;
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged -= OnHumanBodiesChanged;
}
Debug.Assert(m_HumanBodyManager != null, "Human body manager is required.");
m_HumanBodyManager.humanBodiesChanged -= OnHumanBodiesChanged;
}
void OnHumanBodiesChanged(ARHumanBodiesChangedEventArgs eventArgs)
{
// Currently, the ARKit provider only ever produces one body anchor, so just reference the first
float scale = ((eventArgs.added.Count > 0) ? eventArgs.added[0].estimatedHeightScaleFactor
: ((eventArgs.updated.Count > 0) ? eventArgs.updated[0].estimatedHeightScaleFactor
: Single.NaN));
void OnHumanBodiesChanged(ARHumanBodiesChangedEventArgs eventArgs)
{
// Currently, the ARKit provider only ever produces one body anchor, so just reference the first
float scale = ((eventArgs.added.Count > 0) ? eventArgs.added[0].estimatedHeightScaleFactor
: ((eventArgs.updated.Count > 0) ? eventArgs.updated[0].estimatedHeightScaleFactor
: Single.NaN));
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.text = scale.ToString("F10");
Debug.Assert(m_ImageInfo != null, "text field is required");
m_ImageInfo.text = scale.ToString("F10");
}
}
}

189
Assets/Scripts/TestDepthImage.cs


using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
/// </summary>
public class TestDepthImage : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The AROcclusionManager which will produce frame events.")]
AROcclusionManager m_OcclusionManager;
/// <summary>
/// Get or set the <c>AROcclusionManager</c>.
/// <summary>
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
public AROcclusionManager occlusionManager
public class TestDepthImage : MonoBehaviour
get { return m_OcclusionManager; }
set { m_OcclusionManager = value; }
}
[SerializeField]
[Tooltip("The AROcclusionManager which will produce frame events.")]
AROcclusionManager m_OcclusionManager;
[SerializeField]
RawImage m_RawImage;
/// <summary>
/// Get or set the <c>AROcclusionManager</c>.
/// </summary>
public AROcclusionManager occlusionManager
{
get { return m_OcclusionManager; }
set { m_OcclusionManager = value; }
}
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawImage
{
get { return m_RawImage; }
set { m_RawImage = value; }
}
[SerializeField]
RawImage m_RawImage;
[SerializeField]
Text m_ImageInfo;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawImage
{
get { return m_RawImage; }
set { m_RawImage = value; }
}
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
}
[SerializeField]
Text m_ImageInfo;
void LogTextureInfo(StringBuilder stringBuilder, string textureName, Texture2D texture)
{
stringBuilder.AppendFormat("texture : {0}\n", textureName);
if (texture == null)
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
stringBuilder.AppendFormat(" <null>\n");
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
else
void LogTextureInfo(StringBuilder stringBuilder, string textureName, Texture2D texture)
stringBuilder.AppendFormat(" format : {0}\n", texture.format.ToString());
stringBuilder.AppendFormat(" width : {0}\n", texture.width);
stringBuilder.AppendFormat(" height : {0}\n", texture.height);
stringBuilder.AppendFormat(" mipmap : {0}\n", texture.mipmapCount);
stringBuilder.AppendFormat("texture : {0}\n", textureName);
if (texture == null)
{
stringBuilder.AppendFormat(" <null>\n");
}
else
{
stringBuilder.AppendFormat(" format : {0}\n", texture.format.ToString());
stringBuilder.AppendFormat(" width : {0}\n", texture.width);
stringBuilder.AppendFormat(" height : {0}\n", texture.height);
stringBuilder.AppendFormat(" mipmap : {0}\n", texture.mipmapCount);
}
}
void Update()
{
Debug.Assert(m_OcclusionManager != null, "no occlusion manager");
var subsystem = m_OcclusionManager.subsystem;
if (subsystem == null)
void Update()
if (m_ImageInfo != null)
Debug.Assert(m_OcclusionManager != null, "no occlusion manager");
var subsystem = m_OcclusionManager.subsystem;
if (subsystem == null)
m_ImageInfo.text = "Human Segmentation not supported.";
if (m_ImageInfo != null)
{
m_ImageInfo.text = "Human Segmentation not supported.";
}
return;
return;
}
StringBuilder sb = new StringBuilder();
Texture2D humanStencil = m_OcclusionManager.humanStencilTexture;
Texture2D humanDepth = m_OcclusionManager.humanDepthTexture;
LogTextureInfo(sb, "stencil", humanStencil);
LogTextureInfo(sb, "depth", humanDepth);
StringBuilder sb = new StringBuilder();
Texture2D humanStencil = m_OcclusionManager.humanStencilTexture;
Texture2D humanDepth = m_OcclusionManager.humanDepthTexture;
LogTextureInfo(sb, "stencil", humanStencil);
LogTextureInfo(sb, "depth", humanDepth);
if (m_ImageInfo != null)
{
m_ImageInfo.text = sb.ToString();
}
else
{
Debug.Log(sb.ToString());
}
if (m_ImageInfo != null)
{
m_ImageInfo.text = sb.ToString();
}
else
{
Debug.Log(sb.ToString());
}
// To use the stencil, be sure the HumanSegmentationStencilMode property on the AROcclusionManager is set to a
// non-disabled value.
m_RawImage.texture = humanStencil;
// To use the stencil, be sure the HumanSegmentationStencilMode property on the AROcclusionManager is set to a
// non-disabled value.
m_RawImage.texture = humanStencil;
// To use the depth, be sure the HumanSegmentationDepthMode property on the AROcclusionManager is set to a
/// non-disabled value.
// m_RawImage.texture = eventArgs.humanDepth;
// To use the depth, be sure the HumanSegmentationDepthMode property on the AROcclusionManager is set to a
/// non-disabled value.
// m_RawImage.texture = eventArgs.humanDepth;
}
}
}

371
Assets/Scripts/UX/ARSceneSelectUI.cs


using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class ARSceneSelectUI : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_AllMenu;
public GameObject allMenu
public class ARSceneSelectUI : MonoBehaviour
get { return m_AllMenu; }
set { m_AllMenu = value; }
}
[SerializeField]
GameObject m_AllMenu;
public GameObject allMenu
{
get { return m_AllMenu; }
set { m_AllMenu = value; }
}
[SerializeField]
GameObject m_FaceTrackingMenu;
public GameObject faceTrackingMenu
{
get { return m_FaceTrackingMenu; }
set { m_FaceTrackingMenu = value; }
}
[SerializeField]
GameObject m_FaceTrackingMenu;
public GameObject faceTrackingMenu
{
get { return m_FaceTrackingMenu; }
set { m_FaceTrackingMenu = value; }
}
[SerializeField]
GameObject m_HumanSegmentationMenu;
public GameObject humanSegmentationMenu
{
get { return m_HumanSegmentationMenu; }
set { m_HumanSegmentationMenu = value; }
}
[SerializeField]
GameObject m_HumanSegmentationMenu;
public GameObject humanSegmentationMenu
{
get { return m_HumanSegmentationMenu; }
set { m_HumanSegmentationMenu = value; }
}
[SerializeField]
GameObject m_PlaneDetectionMenu;
public GameObject planeDetectionMenu
{
get { return m_PlaneDetectionMenu; }
set { m_PlaneDetectionMenu = value; }
}
[SerializeField]
GameObject m_PlaneDetectionMenu;
public GameObject planeDetectionMenu
{
get { return m_PlaneDetectionMenu; }
set { m_PlaneDetectionMenu = value; }
}
void Start()
{
if(ActiveMenu.currentMenu == MenuType.FaceTracking)
void Start()
m_FaceTrackingMenu.SetActive(true);
m_AllMenu.SetActive(false);
if(ActiveMenu.currentMenu == MenuType.FaceTracking)
{
m_FaceTrackingMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
else if(ActiveMenu.currentMenu == MenuType.PlaneDetection)
{
m_PlaneDetectionMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
else if(ActiveMenu.currentMenu == MenuType.HumanSegmentation)
{
m_HumanSegmentationMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
else if(ActiveMenu.currentMenu == MenuType.PlaneDetection)
public void SimpleARButtonPressed()
m_PlaneDetectionMenu.SetActive(true);
m_AllMenu.SetActive(false);
SceneManager.LoadScene("SimpleAR", LoadSceneMode.Single);
else if(ActiveMenu.currentMenu == MenuType.HumanSegmentation)
public void ImageTrackableButtonPressed()
m_HumanSegmentationMenu.SetActive(true);
m_AllMenu.SetActive(false);
SceneManager.LoadScene("ImageTracking", LoadSceneMode.Single);
}
public void SimpleARButtonPressed()
{
SceneManager.LoadScene("SimpleAR", LoadSceneMode.Single);
}
public void AnchorsButtonPressed()
{
SceneManager.LoadScene("Anchors", LoadSceneMode.Single);
}
public void ImageTrackableButtonPressed()
{
SceneManager.LoadScene("ImageTracking", LoadSceneMode.Single);
}
public void ARCollaborationDataButtonPressed()
{
SceneManager.LoadScene("ARCollaborationDataExample", LoadSceneMode.Single);
}
public void AnchorsButtonPressed()
{
SceneManager.LoadScene("Anchors", LoadSceneMode.Single);
}
public void ARKitCoachingOverlayButtonPressed()
{
SceneManager.LoadScene("ARKitCoachingOverlay", LoadSceneMode.Single);
}
public void ARCollaborationDataButtonPressed()
{
SceneManager.LoadScene("ARCollaborationDataExample", LoadSceneMode.Single);
}
public void ARWorldMapButtonPressed()
{
SceneManager.LoadScene("ARWorldMap", LoadSceneMode.Single);
}
public void ARKitCoachingOverlayButtonPressed()
{
SceneManager.LoadScene("ARKitCoachingOverlay", LoadSceneMode.Single);
}
public void CameraImageButtonPressed()
{
SceneManager.LoadScene("CameraImage", LoadSceneMode.Single);
}
public void ARWorldMapButtonPressed()
{
SceneManager.LoadScene("ARWorldMap", LoadSceneMode.Single);
}
public void CheckSupportButtonPressed()
{
SceneManager.LoadScene("Check Support", LoadSceneMode.Single);
}
public void CameraImageButtonPressed()
{
SceneManager.LoadScene("CameraImage", LoadSceneMode.Single);
}
public void EnvironmentProbesButtonPressed()
{
SceneManager.LoadScene("EnvironmentProbes", LoadSceneMode.Single);
}
public void CheckSupportButtonPressed()
{
SceneManager.LoadScene("Check Support", LoadSceneMode.Single);
}
public void ObjectTrackingButtonPressed()
{
SceneManager.LoadScene("ObjectTracking", LoadSceneMode.Single);
}
public void EnvironmentProbesButtonPressed()
{
SceneManager.LoadScene("EnvironmentProbes", LoadSceneMode.Single);
}
public void PlaneOcclusionButtonPressed()
{
SceneManager.LoadScene("PlaneOcclusion", LoadSceneMode.Single);
}
public void ObjectTrackingButtonPressed()
{
SceneManager.LoadScene("ObjectTracking", LoadSceneMode.Single);
}
public void PointCloudButtonPressed()
{
SceneManager.LoadScene("AllPointCloudPoints", LoadSceneMode.Single);
}
public void PlaneOcclusionButtonPressed()
{
SceneManager.LoadScene("PlaneOcclusion", LoadSceneMode.Single);
}
public void ScaleButtonPressed()
{
SceneManager.LoadScene("Scale", LoadSceneMode.Single);
}
public void PointCloudButtonPressed()
{
SceneManager.LoadScene("AllPointCloudPoints", LoadSceneMode.Single);
}
public void SampleUXButtonPressed()
{
SceneManager.LoadScene("SampleUXScene", LoadSceneMode.Single);
}
public void ScaleButtonPressed()
{
SceneManager.LoadScene("Scale", LoadSceneMode.Single);
}
public void FaceTrackingMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.FaceTracking;
m_FaceTrackingMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void ARCoreFaceRegionsButtonPressed()
{
SceneManager.LoadScene("ARCoreFaceRegions", LoadSceneMode.Single);
}
public void ARKitFaceBlendShapesButtonPressed()
{
SceneManager.LoadScene("ARKitFaceBlendShapes", LoadSceneMode.Single);
}
public void EyeLasersButtonPressed()
{
SceneManager.LoadScene("EyeLasers", LoadSceneMode.Single);
}
public void EyePosesButtonPressed()
{
SceneManager.LoadScene("EyePoses", LoadSceneMode.Single);
}
public void SampleUXButtonPressed()
{
SceneManager.LoadScene("SampleUXScene", LoadSceneMode.Single);
}
public void FaceMeshButtonPressed()
{
SceneManager.LoadScene("FaceMesh", LoadSceneMode.Single);
}
public void FaceTrackingMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.FaceTracking;
m_FaceTrackingMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void ARCoreFaceRegionsButtonPressed()
{
SceneManager.LoadScene("ARCoreFaceRegions", LoadSceneMode.Single);
}
public void ARKitFaceBlendShapesButtonPressed()
{
SceneManager.LoadScene("ARKitFaceBlendShapes", LoadSceneMode.Single);
}
public void EyeLasersButtonPressed()
{
SceneManager.LoadScene("EyeLasers", LoadSceneMode.Single);
}
public void EyePosesButtonPressed()
{
SceneManager.LoadScene("EyePoses", LoadSceneMode.Single);
}
public void FacePoseButtonPressed()
{
SceneManager.LoadScene("FacePose", LoadSceneMode.Single);
}
public void FaceMeshButtonPressed()
{
SceneManager.LoadScene("FaceMesh", LoadSceneMode.Single);
}
public void FixationPointButtonPressed()
{
SceneManager.LoadScene("FixationPoint", LoadSceneMode.Single);
}
public void FacePoseButtonPressed()
{
SceneManager.LoadScene("FacePose", LoadSceneMode.Single);
}
public void RearCameraWithFrontCameraFaceMeshButtonPressed()
{
SceneManager.LoadScene("WorldCameraWithUserFacingFaceTracking", LoadSceneMode.Single);
}
public void FixationPointButtonPressed()
{
SceneManager.LoadScene("FixationPoint", LoadSceneMode.Single);
}
public void RearCameraWithFrontCameraFaceMeshButtonPressed()
{
SceneManager.LoadScene("WorldCameraWithUserFacingFaceTracking", LoadSceneMode.Single);
}
public void HumanSegmentationMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.HumanSegmentation;
m_HumanSegmentationMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void HumanSegmentation2DButtonPressed()
{
SceneManager.LoadScene("HumanBodyTracking2D", LoadSceneMode.Single);
}
public void HumanSegmentation3DButtonPressed()
{
SceneManager.LoadScene("HumanBodyTracking3D", LoadSceneMode.Single);
}
public void HumanSegmentationImagesButtonPressed()
{
SceneManager.LoadScene("HumanSegmentationImages", LoadSceneMode.Single);
}
public void HumanSegmentationMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.HumanSegmentation;
m_HumanSegmentationMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void HumanSegmentation2DButtonPressed()
{
SceneManager.LoadScene("HumanBodyTracking2D", LoadSceneMode.Single);
}
public void HumanSegmentation3DButtonPressed()
{
SceneManager.LoadScene("HumanBodyTracking3D", LoadSceneMode.Single);
}
public void HumanSegmentationImagesButtonPressed()
{
SceneManager.LoadScene("HumanSegmentationImages", LoadSceneMode.Single);
}
public void LightEstimationButtonPressed()
{
SceneManager.LoadScene("LightEstimation", LoadSceneMode.Single);
}
public void LightEstimationButtonPressed()
{
SceneManager.LoadScene("LightEstimation", LoadSceneMode.Single);
}
public void PlaneDetectionMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.PlaneDetection;
m_PlaneDetectionMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void FeatheredPlanesButtonPressed()
{
SceneManager.LoadScene("FeatheredPlanes", LoadSceneMode.Single);
}
public void PlaneDetectionMenuButtonPressed()
{
ActiveMenu.currentMenu = MenuType.PlaneDetection;
m_PlaneDetectionMenu.SetActive(true);
m_AllMenu.SetActive(false);
}
public void FeatheredPlanesButtonPressed()
{
SceneManager.LoadScene("FeatheredPlanes", LoadSceneMode.Single);
}
public void PlaneClassificationButtonPressed()
{
SceneManager.LoadScene("PlaneClassification", LoadSceneMode.Single);
}
public void PlaneClassificationButtonPressed()
{
SceneManager.LoadScene("PlaneClassification", LoadSceneMode.Single);
}
public void TogglePlaneDetectionButtonPressed()
{
SceneManager.LoadScene("TogglePlaneDetection", LoadSceneMode.Single);
}
public void TogglePlaneDetectionButtonPressed()
{
SceneManager.LoadScene("TogglePlaneDetection", LoadSceneMode.Single);
}
public void BackButtonPressed()
{
ActiveMenu.currentMenu = MenuType.Main;
m_FaceTrackingMenu.SetActive(false);
m_PlaneDetectionMenu.SetActive(false);
m_HumanSegmentationMenu.SetActive(false);
m_AllMenu.SetActive(true);
public void BackButtonPressed()
{
ActiveMenu.currentMenu = MenuType.Main;
m_FaceTrackingMenu.SetActive(false);
m_PlaneDetectionMenu.SetActive(false);
m_HumanSegmentationMenu.SetActive(false);
m_AllMenu.SetActive(true);
}
}
}

24
Assets/Scripts/UX/ActiveMenu.cs


using System.Collections.Generic;
using UnityEngine;
public enum MenuType
namespace UnityEngine.XR.ARFoundation.Samples
Main,
FaceTracking,
PlaneDetection,
HumanSegmentation,
}
public enum MenuType
{
Main,
FaceTracking,
PlaneDetection,
HumanSegmentation,
}
public static class ActiveMenu
{
public static MenuType currentMenu { get; set; }
public static class ActiveMenu
{
public static MenuType currentMenu { get; set; }
}
}
}

41
Assets/Scripts/UX/BackButton.cs


using UnityEngine;
using UnityEngine.SceneManagement;
public class BackButton : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_BackButton;
public GameObject backButton
public class BackButton : MonoBehaviour
get { return m_BackButton; }
set { m_BackButton = value; }
}
void Start()
{
if (Application.CanStreamedLevelBeLoaded("Menu"))
{
m_BackButton.SetActive(true);
}
}
[SerializeField]
GameObject m_BackButton;
public GameObject backButton
{
get { return m_BackButton; }
set { m_BackButton = value; }
}
void Start()
{
if (Application.CanStreamedLevelBeLoaded("Menu"))
{
m_BackButton.SetActive(true);
}
}
public void BackButtonPressed()
{
SceneManager.LoadScene("Menu", LoadSceneMode.Single);
public void BackButtonPressed()
{
SceneManager.LoadScene("Menu", LoadSceneMode.Single);
}
}
}

481
Assets/Scripts/UX/CheckAvailableFeatures.cs


using UnityEngine.XR.ARKit;
#endif
public class CheckAvailableFeatures : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
Button m_SimpleAR;
public Button simpleAR
public class CheckAvailableFeatures : MonoBehaviour
get { return m_SimpleAR; }
set { m_SimpleAR = value; }
}
[SerializeField]
Button m_SimpleAR;
public Button simpleAR
{
get { return m_SimpleAR; }
set { m_SimpleAR = value; }
}
[SerializeField]
Button m_ImageTracking;
public Button imageTracking
{
get { return m_ImageTracking; }
set { m_ImageTracking = value; }
}
[SerializeField]
Button m_Anchors;
public Button anchors
{
get { return m_Anchors; }
set { m_Anchors = value; }
}
[SerializeField]
Button m_ImageTracking;
public Button imageTracking
{
get { return m_ImageTracking; }
set { m_ImageTracking = value; }
}
[SerializeField]
Button m_ARWorldMap;
public Button ARWorldMap
{
get { return m_ARWorldMap; }
set { m_ARWorldMap = value; }
}
[SerializeField]
Button m_Anchors;
public Button anchors
{
get { return m_Anchors; }
set { m_Anchors = value; }
}
[SerializeField]
Button m_ARWorldMap;
public Button ARWorldMap
{
get { return m_ARWorldMap; }
set { m_ARWorldMap = value; }
}
[SerializeField]
Button m_CameraImage;
public Button cameraImage
{
get { return m_CameraImage; }
set { m_CameraImage = value; }
}
[SerializeField]
Button m_CameraImage;
public Button cameraImage
{
get { return m_CameraImage; }
set { m_CameraImage = value; }
}
[SerializeField]
Button m_EnvironmentProbes;
public Button environmentProbes
{
get { return m_EnvironmentProbes; }
set { m_EnvironmentProbes = value; }
}
[SerializeField]
Button m_EnvironmentProbes;
public Button environmentProbes
{
get { return m_EnvironmentProbes; }
set { m_EnvironmentProbes = value; }
}
[SerializeField]
Button m_ARCollaborationData;
public Button ARCollaborationData
{
get { return m_ARCollaborationData; }
set { m_ARCollaborationData = value; }
}
[SerializeField]
Button m_ARCollaborationData;
public Button ARCollaborationData
{
get { return m_ARCollaborationData; }
set { m_ARCollaborationData = value; }
}
[SerializeField]
Button m_ARKitCoachingOverlay;
public Button ARKitCoachingOverlay
{
get { return m_ARKitCoachingOverlay; }
set { m_ARKitCoachingOverlay = value; }
}
[SerializeField]
Button m_ARKitCoachingOverlay;
public Button ARKitCoachingOverlay
{
get { return m_ARKitCoachingOverlay; }
set { m_ARKitCoachingOverlay = value; }
}
[SerializeField]
Button m_Scale;
public Button scale
{
get { return m_Scale; }
set { m_Scale = value; }
}
[SerializeField]
Button m_Scale;
public Button scale
{
get { return m_Scale; }
set { m_Scale = value; }
}
[SerializeField]
Button m_ObjectTracking;
public Button objectTracking
{
get { return m_ObjectTracking; }
set { m_ObjectTracking = value; }
}
[SerializeField]
Button m_ObjectTracking;
public Button objectTracking
{
get { return m_ObjectTracking; }
set { m_ObjectTracking = value; }
}
[SerializeField]
Button m_PlaneOcclusion;
public Button planeOcclusion
{
get { return m_PlaneOcclusion; }
set { m_PlaneOcclusion = value; }
}
[SerializeField]
Button m_PlaneOcclusion;
public Button planeOcclusion
{
get { return m_PlaneOcclusion; }
set { m_PlaneOcclusion = value; }
}
[SerializeField]
Button m_PointCloud;
public Button pointCloud
{
get { return m_PointCloud; }
set { m_PointCloud = value; }
}
[SerializeField]
Button m_PointCloud;
public Button pointCloud
{
get { return m_PointCloud; }
set { m_PointCloud = value; }
}
[SerializeField]
Button m_FaceTracking;
public Button faceTracking
{
get { return m_FaceTracking; }
set { m_FaceTracking = value; }
}
[SerializeField]
Button m_FaceTracking;
public Button faceTracking
{
get { return m_FaceTracking; }
set { m_FaceTracking = value; }
}
[SerializeField]
Button m_FaceBlendShapes;
public Button faceBlendShapes
{
get { return m_FaceBlendShapes; }
set { m_FaceBlendShapes = value; }
}
[SerializeField]
Button m_FaceBlendShapes;
public Button faceBlendShapes
{
get { return m_FaceBlendShapes; }
set { m_FaceBlendShapes = value; }
}
[SerializeField]
Button m_HumanSegmentation;
public Button humanSegmentation
{
get { return m_HumanSegmentation; }
set { m_HumanSegmentation = value; }
}
[SerializeField]
Button m_HumanSegmentation;
public Button humanSegmentation
{
get { return m_HumanSegmentation; }
set { m_HumanSegmentation = value; }
}
[SerializeField]
Button m_LightEstimation;
public Button lightEstimation
{
get { return m_LightEstimation; }
set { m_LightEstimation = value; }
}
[SerializeField]
Button m_LightEstimation;
public Button lightEstimation
{
get { return m_LightEstimation; }
set { m_LightEstimation = value; }
}
[SerializeField]
Button m_PlaneDetection;
public Button planeDetection
{
get { return m_PlaneDetection; }
set { m_PlaneDetection = value; }
}
[SerializeField]
Button m_PlaneDetection;
public Button planeDetection
{
get { return m_PlaneDetection; }
set { m_PlaneDetection = value; }
}
[SerializeField]
Button m_PlaneClassification;
public Button planeClassification
{
get { return m_PlaneClassification; }
set { m_PlaneClassification = value; }
}
[SerializeField]
Button m_PlaneClassification;
public Button planeClassification
{
get { return m_PlaneClassification; }
set { m_PlaneClassification = value; }
}
// Start is called before the first frame update
void Start()
{
List<XRPlaneSubsystemDescriptor> planeDescriptors = new List<XRPlaneSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRPlaneSubsystemDescriptor>(planeDescriptors);
// Start is called before the first frame update
void Start()
{
List<XRPlaneSubsystemDescriptor> planeDescriptors = new List<XRPlaneSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRPlaneSubsystemDescriptor>(planeDescriptors);
List<XRRaycastSubsystemDescriptor> rayCastDescriptors = new List<XRRaycastSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRRaycastSubsystemDescriptor>(rayCastDescriptors);
List<XRRaycastSubsystemDescriptor> rayCastDescriptors = new List<XRRaycastSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRRaycastSubsystemDescriptor>(rayCastDescriptors);
List<XRFaceSubsystemDescriptor> faceDescriptors = new List<XRFaceSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRFaceSubsystemDescriptor>(faceDescriptors);
List<XRFaceSubsystemDescriptor> faceDescriptors = new List<XRFaceSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRFaceSubsystemDescriptor>(faceDescriptors);
List<XRImageTrackingSubsystemDescriptor> imageDescriptors = new List<XRImageTrackingSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRImageTrackingSubsystemDescriptor>(imageDescriptors);
List<XRImageTrackingSubsystemDescriptor> imageDescriptors = new List<XRImageTrackingSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRImageTrackingSubsystemDescriptor>(imageDescriptors);
List<XREnvironmentProbeSubsystemDescriptor> envDescriptors = new List<XREnvironmentProbeSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XREnvironmentProbeSubsystemDescriptor>(envDescriptors);
List<XREnvironmentProbeSubsystemDescriptor> envDescriptors = new List<XREnvironmentProbeSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XREnvironmentProbeSubsystemDescriptor>(envDescriptors);
List<XRAnchorSubsystemDescriptor> anchorDescriptors = new List<XRAnchorSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRAnchorSubsystemDescriptor>(anchorDescriptors);
List<XRAnchorSubsystemDescriptor> anchorDescriptors = new List<XRAnchorSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRAnchorSubsystemDescriptor>(anchorDescriptors);
List<XRObjectTrackingSubsystemDescriptor> objectDescriptors = new List<XRObjectTrackingSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRObjectTrackingSubsystemDescriptor>(objectDescriptors);
List<XRObjectTrackingSubsystemDescriptor> objectDescriptors = new List<XRObjectTrackingSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRObjectTrackingSubsystemDescriptor>(objectDescriptors);
List<XRParticipantSubsystemDescriptor> participantDescriptors = new List<XRParticipantSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRParticipantSubsystemDescriptor>(participantDescriptors);
List<XRParticipantSubsystemDescriptor> participantDescriptors = new List<XRParticipantSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRParticipantSubsystemDescriptor>(participantDescriptors);
List<XRDepthSubsystemDescriptor> depthDescriptors = new List<XRDepthSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRDepthSubsystemDescriptor>(depthDescriptors);
List<XRDepthSubsystemDescriptor> depthDescriptors = new List<XRDepthSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRDepthSubsystemDescriptor>(depthDescriptors);
List<XROcclusionSubsystemDescriptor> occlusionDescriptors = new List<XROcclusionSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XROcclusionSubsystemDescriptor>(occlusionDescriptors);
List<XROcclusionSubsystemDescriptor> occlusionDescriptors = new List<XROcclusionSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XROcclusionSubsystemDescriptor>(occlusionDescriptors);
List<XRCameraSubsystemDescriptor> cameraDescriptors = new List<XRCameraSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRCameraSubsystemDescriptor>(cameraDescriptors);
List<XRCameraSubsystemDescriptor> cameraDescriptors = new List<XRCameraSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRCameraSubsystemDescriptor>(cameraDescriptors);
List<XRSessionSubsystemDescriptor> sessionDescriptors = new List<XRSessionSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRSessionSubsystemDescriptor>(sessionDescriptors);
List<XRSessionSubsystemDescriptor> sessionDescriptors = new List<XRSessionSubsystemDescriptor>();
SubsystemManager.GetSubsystemDescriptors<XRSessionSubsystemDescriptor>(sessionDescriptors);
if(planeDescriptors.Count > 0 && rayCastDescriptors.Count > 0)
{
m_SimpleAR.interactable = true;
m_Scale.interactable = true;
}
if(faceDescriptors.Count > 0)
{
m_FaceTracking.interactable = true;
#if UNITY_IOS
m_FaceBlendShapes.interactable = true;
#endif
if(planeDescriptors.Count > 0 && rayCastDescriptors.Count > 0)
{
m_SimpleAR.interactable = true;
m_Scale.interactable = true;
}
if(faceDescriptors.Count > 0)
{
m_FaceTracking.interactable = true;
#if UNITY_IOS
m_FaceBlendShapes.interactable = true;
#endif
}
if(occlusionDescriptors.Count > 0)
{
foreach(XROcclusionSubsystemDescriptor occlusionDescriptor in occlusionDescriptors)
}
if(occlusionDescriptors.Count > 0)
if(occlusionDescriptor.supportsHumanSegmentationDepthImage && occlusionDescriptor.supportsHumanSegmentationStencilImage)
foreach(XROcclusionSubsystemDescriptor occlusionDescriptor in occlusionDescriptors)
m_HumanSegmentation.interactable = true;
break;
if(occlusionDescriptor.supportsHumanSegmentationDepthImage && occlusionDescriptor.supportsHumanSegmentationStencilImage)
{
m_HumanSegmentation.interactable = true;
break;
}
}
if(cameraDescriptors.Count > 0)
{
foreach(var cameraDescriptor in cameraDescriptors)
if(cameraDescriptors.Count > 0)
if((cameraDescriptor.supportsAverageBrightness || cameraDescriptor.supportsAverageIntensityInLumens) &&
cameraDescriptor.supportsAverageColorTemperature && cameraDescriptor.supportsCameraConfigurations &&
cameraDescriptor.supportsCameraImage)
foreach(var cameraDescriptor in cameraDescriptors)
m_LightEstimation.interactable = true;
if((cameraDescriptor.supportsAverageBrightness || cameraDescriptor.supportsAverageIntensityInLumens) &&
cameraDescriptor.supportsAverageColorTemperature && cameraDescriptor.supportsCameraConfigurations &&
cameraDescriptor.supportsCameraImage)
{
m_LightEstimation.interactable = true;
}
}
if(imageDescriptors.Count > 0)
{
m_ImageTracking.interactable = true;
}
if(envDescriptors.Count > 0)
{
m_EnvironmentProbes.interactable = true;
}
if(imageDescriptors.Count > 0)
{
m_ImageTracking.interactable = true;
}
if(envDescriptors.Count > 0)
{
m_EnvironmentProbes.interactable = true;
}
if(planeDescriptors.Count > 0)
{
m_PlaneDetection.interactable = true;
foreach(var planeDescriptor in planeDescriptors)
if(planeDescriptors.Count > 0)
if(planeDescriptor.supportsClassification)
m_PlaneDetection.interactable = true;
foreach(var planeDescriptor in planeDescriptors)
m_PlaneClassification.interactable = true;
break;
if(planeDescriptor.supportsClassification)
{
m_PlaneClassification.interactable = true;
break;
}
}
}
if(anchorDescriptors.Count > 0)
{
m_Anchors.interactable = true;
}
if(objectDescriptors.Count > 0)
{
m_ObjectTracking.interactable = true;
}
if(cameraDescriptors.Count > 0)
{
foreach(var cameraDescriptor in cameraDescriptors)
}
if(anchorDescriptors.Count > 0)
{
m_Anchors.interactable = true;
}
if(objectDescriptors.Count > 0)
if(cameraDescriptor.supportsCameraImage)
m_ObjectTracking.interactable = true;
}
if(cameraDescriptors.Count > 0)
{
foreach(var cameraDescriptor in cameraDescriptors)
m_CameraImage.interactable = true;
break;
if(cameraDescriptor.supportsCameraImage)
{
m_CameraImage.interactable = true;
break;
}
}
#if UNITY_IOS
if(sessionDescriptors.Count > 0 && ARKitSessionSubsystem.worldMapSupported)
{
m_ARWorldMap.interactable = true;
}
#if UNITY_IOS
if(sessionDescriptors.Count > 0 && ARKitSessionSubsystem.worldMapSupported)
{
m_ARWorldMap.interactable = true;
}
if(planeDescriptors.Count > 0 && rayCastDescriptors.Count > 0 && participantDescriptors.Count > 0 && ARKitSessionSubsystem.supportsCollaboration)
{
m_ARCollaborationData.interactable = true;
}
if(planeDescriptors.Count > 0 && rayCastDescriptors.Count > 0 && participantDescriptors.Count > 0 && ARKitSessionSubsystem.supportsCollaboration)
{
m_ARCollaborationData.interactable = true;
}
if(sessionDescriptors.Count > 0 && ARKitSessionSubsystem.coachingOverlaySupported)
{
m_ARKitCoachingOverlay.interactable = true;
}
if(sessionDescriptors.Count > 0 && ARKitSessionSubsystem.coachingOverlaySupported)
{
m_ARKitCoachingOverlay.interactable = true;
}
#endif
#endif
if(depthDescriptors.Count > 0)
{
m_PointCloud.interactable = true;
}
if(depthDescriptors.Count > 0)
{
m_PointCloud.interactable = true;
if(planeDescriptors.Count > 0)
{
m_PlaneOcclusion.interactable = true;
}
if(planeDescriptors.Count > 0)
{
m_PlaneOcclusion.interactable = true;
}
}
}

77
Assets/Scripts/UX/FadePlaneOnBoundaryChange.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
[RequireComponent(typeof(ARPlane))]
[RequireComponent(typeof(Animator))]
public class FadePlaneOnBoundaryChange : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
const string k_FadeOffAnim = "FadeOff";
const string k_FadeOnAnim = "FadeOn";
const float k_TimeOut = 2.0f;
Animator m_Animator;
ARPlane m_Plane;
[RequireComponent(typeof(ARPlane))]
[RequireComponent(typeof(Animator))]
public class FadePlaneOnBoundaryChange : MonoBehaviour
{
const string k_FadeOffAnim = "FadeOff";
const string k_FadeOnAnim = "FadeOn";
const float k_TimeOut = 2.0f;
Animator m_Animator;
ARPlane m_Plane;
float m_ShowTime = 0;
bool m_UpdatingPlane = false;
float m_ShowTime = 0;
bool m_UpdatingPlane = false;
void OnEnable()
{
m_Plane = GetComponent<ARPlane>();
m_Animator = GetComponent<Animator>();
m_Plane.boundaryChanged += PlaneOnBoundaryChanged;
}
void OnEnable()
{
m_Plane = GetComponent<ARPlane>();
m_Animator = GetComponent<Animator>();
void OnDisable()
{
m_Plane.boundaryChanged -= PlaneOnBoundaryChanged;
}
m_Plane.boundaryChanged += PlaneOnBoundaryChanged;
}
void Update()
{
if (m_UpdatingPlane)
void OnDisable()
m_ShowTime -= Time.deltaTime;
m_Plane.boundaryChanged -= PlaneOnBoundaryChanged;
}
if (m_ShowTime <= 0)
void Update()
{
if (m_UpdatingPlane)
m_UpdatingPlane = false;
m_Animator.SetBool(k_FadeOffAnim, true);
m_Animator.SetBool(k_FadeOnAnim, false);
m_ShowTime -= Time.deltaTime;
if (m_ShowTime <= 0)
{
m_UpdatingPlane = false;
m_Animator.SetBool(k_FadeOffAnim, true);
m_Animator.SetBool(k_FadeOnAnim, false);
}
}
void PlaneOnBoundaryChanged(ARPlaneBoundaryChangedEventArgs obj)
{
m_Animator.SetBool(k_FadeOffAnim, false);
m_Animator.SetBool(k_FadeOnAnim, true);
m_UpdatingPlane = true;
m_ShowTime = k_TimeOut;
void PlaneOnBoundaryChanged(ARPlaneBoundaryChangedEventArgs obj)
{
m_Animator.SetBool(k_FadeOffAnim, false);
m_Animator.SetBool(k_FadeOnAnim, true);
m_UpdatingPlane = true;
m_ShowTime = k_TimeOut;
}
}
}

61
Assets/Scripts/UX/Tooltip.cs


using UnityEngine.EventSystems;
using UnityEngine.UI;
public class Tooltip : MonoBehaviour, IPointerEnterHandler, IPointerExitHandler
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
GameObject m_Tooltip;
public GameObject toolTip
public class Tooltip : MonoBehaviour, IPointerEnterHandler, IPointerExitHandler
get { return m_Tooltip; }
set { m_Tooltip = value; }
}
bool m_EnteredButton;
Vector3 m_ToolTipOffset;
[SerializeField]
GameObject m_Tooltip;
public GameObject toolTip
{
get { return m_Tooltip; }
set { m_Tooltip = value; }
}
bool m_EnteredButton;
Vector3 m_ToolTipOffset;
void Start()
{
m_ToolTipOffset = new Vector3(-50,100,0);
}
void Start()
{
m_ToolTipOffset = new Vector3(-50,100,0);
}
void Update()
{
if(m_EnteredButton)
void Update()
m_Tooltip.transform.position = Input.mousePosition + m_ToolTipOffset;
if(m_EnteredButton)
{
m_Tooltip.transform.position = Input.mousePosition + m_ToolTipOffset;
}
}
public void OnPointerEnter(PointerEventData eventData)
{
m_EnteredButton = true;
if(!gameObject.GetComponent<Button>().interactable)
public void OnPointerEnter(PointerEventData eventData)
m_Tooltip.SetActive(true);
m_EnteredButton = true;
if(!gameObject.GetComponent<Button>().interactable)
{
m_Tooltip.SetActive(true);
}
}
public void OnPointerExit(PointerEventData eventData)
{
m_EnteredButton = false;
m_Tooltip.SetActive(false);
public void OnPointerExit(PointerEventData eventData)
{
m_EnteredButton = false;
m_Tooltip.SetActive(false);
}
}
}

163
Assets/Scripts/UX/UIManager.cs


using UnityEngine;
using UnityEngine.XR.ARFoundation;
public class UIManager : MonoBehaviour
namespace UnityEngine.XR.ARFoundation.Samples
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
public class UIManager : MonoBehaviour
{
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get { return m_CameraManager; }
set
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
if (m_CameraManager == value)
return;
get { return m_CameraManager; }
set
{
if (m_CameraManager == value)
return;
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
m_CameraManager = value;
m_CameraManager = value;
if (m_CameraManager != null & enabled)
m_CameraManager.frameReceived += FrameChanged;
if (m_CameraManager != null & enabled)
m_CameraManager.frameReceived += FrameChanged;
}
}
const string k_FadeOffAnim = "FadeOff";
const string k_FadeOnAnim = "FadeOn";
const string k_FadeOffAnim = "FadeOff";
const string k_FadeOnAnim = "FadeOn";
[SerializeField]
ARPlaneManager m_PlaneManager;
[SerializeField]
ARPlaneManager m_PlaneManager;
public ARPlaneManager planeManager
{
get { return m_PlaneManager; }
set { m_PlaneManager = value; }
}
public ARPlaneManager planeManager
{
get { return m_PlaneManager; }
set { m_PlaneManager = value; }
}
[SerializeField]
Animator m_MoveDeviceAnimation;
[SerializeField]
Animator m_MoveDeviceAnimation;
public Animator moveDeviceAnimation
{
get { return m_MoveDeviceAnimation; }
set { m_MoveDeviceAnimation = value; }
}
public Animator moveDeviceAnimation
{
get { return m_MoveDeviceAnimation; }
set { m_MoveDeviceAnimation = value; }
}
[SerializeField]
Animator m_TapToPlaceAnimation;
[SerializeField]
Animator m_TapToPlaceAnimation;
public Animator tapToPlaceAnimation
{
get { return m_TapToPlaceAnimation; }
set { m_TapToPlaceAnimation = value; }
}
public Animator tapToPlaceAnimation
{
get { return m_TapToPlaceAnimation; }
set { m_TapToPlaceAnimation = value; }
}
static List<ARPlane> s_Planes = new List<ARPlane>();
static List<ARPlane> s_Planes = new List<ARPlane>();
bool m_ShowingTapToPlace = false;
bool m_ShowingTapToPlace = false;
bool m_ShowingMoveDevice = true;
bool m_ShowingMoveDevice = true;
void OnEnable()
{
if (m_CameraManager != null)
m_CameraManager.frameReceived += FrameChanged;
void OnEnable()
{
if (m_CameraManager != null)
m_CameraManager.frameReceived += FrameChanged;
PlaceMultipleObjectsOnPlane.onPlacedObject += PlacedObject;
}
PlaceMultipleObjectsOnPlane.onPlacedObject += PlacedObject;
}
void OnDisable()
{
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
void OnDisable()
{
if (m_CameraManager != null)
m_CameraManager.frameReceived -= FrameChanged;
PlaceMultipleObjectsOnPlane.onPlacedObject -= PlacedObject;
}
PlaceMultipleObjectsOnPlane.onPlacedObject -= PlacedObject;
}
void FrameChanged(ARCameraFrameEventArgs args)
{
if (PlanesFound() && m_ShowingMoveDevice)
void FrameChanged(ARCameraFrameEventArgs args)
if (moveDeviceAnimation)
moveDeviceAnimation.SetTrigger(k_FadeOffAnim);
if (PlanesFound() && m_ShowingMoveDevice)
{
if (moveDeviceAnimation)
moveDeviceAnimation.SetTrigger(k_FadeOffAnim);
if (tapToPlaceAnimation)
tapToPlaceAnimation.SetTrigger(k_FadeOnAnim);
if (tapToPlaceAnimation)
tapToPlaceAnimation.SetTrigger(k_FadeOnAnim);
m_ShowingTapToPlace = true;
m_ShowingMoveDevice = false;
m_ShowingTapToPlace = true;
m_ShowingMoveDevice = false;
}
}
bool PlanesFound()
{
if (planeManager == null)
return false;
bool PlanesFound()
{
if (planeManager == null)
return false;
return planeManager.trackables.count > 0;
}
return planeManager.trackables.count > 0;
}
void PlacedObject()
{
if (m_ShowingTapToPlace)
void PlacedObject()
if (tapToPlaceAnimation)
tapToPlaceAnimation.SetTrigger(k_FadeOffAnim);
if (m_ShowingTapToPlace)
{
if (tapToPlaceAnimation)
tapToPlaceAnimation.SetTrigger(k_FadeOffAnim);
m_ShowingTapToPlace = false;
m_ShowingTapToPlace = false;
}
}
}
正在加载...
取消
保存