浏览代码

Remove (temporarily) unsupported samples

/1.5-preview
Tim Mowrer 6 年前
当前提交
475f7859
共有 10 个文件被更改,包括 0 次插入1456 次删除
  1. 1001
      Assets/Scenes/CameraImage.unity
  2. 7
      Assets/Scenes/CameraImage.unity.meta
  3. 8
      Assets/Scenes/FaceTracking.meta
  4. 118
      Assets/Scripts/TestCameraImage.cs
  5. 11
      Assets/Scripts/TestCameraImage.cs.meta
  6. 89
      Assets/Scripts/CameraConfigController.cs
  7. 11
      Assets/Scripts/CameraConfigController.cs.meta
  8. 200
      Assets/Scripts/ARFaceArkitBlendShapeVisualizer.cs
  9. 11
      Assets/Scripts/ARFaceArkitBlendShapeVisualizer.cs.meta

1001
Assets/Scenes/CameraImage.unity
文件差异内容过多而无法显示
查看文件

7
Assets/Scenes/CameraImage.unity.meta


fileFormatVersion: 2
guid: e769916924fd3814ab1d9474de816b22
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

8
Assets/Scenes/FaceTracking.meta


fileFormatVersion: 2
guid: 8c1909290913d4c389b1517a0146bfca
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

118
Assets/Scripts/TestCameraImage.cs


using System;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARExtensions;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
/// </summary>
public class TestCameraImage : MonoBehaviour
{
[SerializeField]
RawImage m_RawImage;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawImage
{
get { return m_RawImage; }
set { m_RawImage = value; }
}
[SerializeField]
Text m_ImageInfo;
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
}
void OnEnable()
{
ARSubsystemManager.cameraFrameReceived += OnCameraFrameReceived;
}
void OnDisable()
{
ARSubsystemManager.cameraFrameReceived -= OnCameraFrameReceived;
}
unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
// Attempt to get the latest camera image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
CameraImage image;
if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
return;
// Display some information about the camera image
m_ImageInfo.text = string.Format(
"Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
image.width, image.height, image.planeCount, image.timestamp, image.format);
// Once we have a valid CameraImage, we can access the individual image "planes"
// (the separate channels in the image). CameraImage.GetPlane provides
// low-overhead access to this data. This could then be passed to a
// computer vision algorithm. Here, we will convert the camera image
// to an RGBA texture and draw it on the screen.
// Choose an RGBA format.
// See CameraImage.FormatSupported for a complete list of supported formats.
var format = TextureFormat.RGBA32;
if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
m_Texture = new Texture2D(image.width, image.height, format, false);
// Convert the image to format, flipping the image across the Y axis.
// We can also get a sub rectangle, but we'll get the full image here.
var conversionParams = new CameraImageConversionParams(image, format, CameraImageTransformation.MirrorY);
// Texture2D allows us write directly to the raw texture data
// This allows us to do the conversion in-place without making any copies.
var rawTextureData = m_Texture.GetRawTextureData<byte>();
try
{
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
}
finally
{
// We must dispose of the CameraImage after we're finished
// with it to avoid leaking native resources.
image.Dispose();
}
// Apply the updated texture data to our texture
m_Texture.Apply();
// Set the RawImage's texture so we can visualize it.
m_RawImage.texture = m_Texture;
}
Texture2D m_Texture;
}

11
Assets/Scripts/TestCameraImage.cs.meta


fileFormatVersion: 2
guid: c6e58200659977344ad301e3a0f6e8d7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

89
Assets/Scripts/CameraConfigController.cs


using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARExtensions;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Populates a drop down UI element with all the supported
/// camera configurations and changes the active camera
/// configuration when the user changes the selection in the dropdown.
///
/// The camera configuration affects the resolution (and possibly framerate)
/// of the hardware camera during an AR session.
/// </summary>
[RequireComponent(typeof(Dropdown))]
public class CameraConfigController : MonoBehaviour
{
List<string> m_ConfigurationNames;
Dropdown m_Dropdown;
/// <summary>
/// Callback invoked when <see cref="m_Dropdown"/> changes. This
/// lets us change the camera configuration when the user changes
/// the selection in the UI.
/// </summary>
/// <param name="dropdown">The <c>Dropdown</c> which changed.</param>
public void OnValueChanged(Dropdown dropdown)
{
var cameraSubsystem = ARSubsystemManager.cameraSubsystem;
if (cameraSubsystem == null)
return;
var configurationIndex = dropdown.value;
// Check that the value makes sense
var configurations = cameraSubsystem.Configurations();
if (configurationIndex >= configurations.count)
return;
// Get that configuration by index
var configuration = configurations[configurationIndex];
// Make it the active one
cameraSubsystem.SetCurrentConfiguration(configuration);
}
void Awake()
{
m_Dropdown = GetComponent<Dropdown>();
m_Dropdown.ClearOptions();
m_ConfigurationNames = new List<string>();
}
void PopulateDropdown()
{
var cameraSubsystem = ARSubsystemManager.cameraSubsystem;
if (cameraSubsystem == null)
return;
// No configurations available probably means this feature
// isn't supported by the current device.
var configurations = cameraSubsystem.Configurations();
if (configurations.count == 0)
return;
// There are two ways to enumerate the camera configurations.
// 1. Use a foreach to iterate over all the available configurations
foreach (var config in configurations)
m_ConfigurationNames.Add(config.ToString());
m_Dropdown.AddOptions(m_ConfigurationNames);
// 2. Use a normal for...loop
var currentConfig = cameraSubsystem.GetCurrentConfiguration();
for (int i = 0; i < configurations.count; i++)
{
// Find the current configuration and update the drop down value
if (currentConfig == configurations[i])
m_Dropdown.value = i;
}
}
void Update()
{
if (m_ConfigurationNames.Count == 0)
PopulateDropdown();
}
}

11
Assets/Scripts/CameraConfigController.cs.meta


fileFormatVersion: 2
guid: 94b653a1faa6f4749ad83e77bcddfab4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

200
Assets/Scripts/ARFaceArkitBlendShapeVisualizer.cs


using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Experimental.XR;
using UnityEngine.XR.ARFoundation;
#if UNITY_IOS
using UnityEngine.XR.ARKit;
#endif
/// <summary>
/// Populates the action unit coefficients for an <see cref="ARFace"/>.
/// </summary>
/// <remarks>
/// If this <c>GameObject</c> has a <c>SkinnedMeshRenderer</c>,
/// this component will generate the blend shape coefficients from the underlying <c>ARFace</c>.
///
/// </remarks>
[RequireComponent(typeof(ARFace))]
public class ARFaceArkitBlendShapeVisualizer : MonoBehaviour
{
[SerializeField]
float m_CoefficientScale = 100.0f;
public float coefficientScale
{
get { return m_CoefficientScale; }
set { m_CoefficientScale = value; }
}
[SerializeField]
SkinnedMeshRenderer m_SkinnedMeshRenderer;
public SkinnedMeshRenderer skinnedMeshRenderer
{
get
{
return m_SkinnedMeshRenderer;
}
set
{
m_SkinnedMeshRenderer = value;
CreateFeatureBlendMapping();
}
}
#if UNITY_IOS
static List<XRFaceArkitBlendShapeCoefficient> s_FaceArkitBlendShapeCoefficients;
ARKitFaceSubsystem m_ArkitFaceSubsystem;
Dictionary<XRArkitBlendShapeLocation, int> m_FaceArkitBlendShapeIndexMap;
#endif
ARFace m_Face;
void Awake()
{
#if UNITY_IOS
s_FaceArkitBlendShapeCoefficients = new List<XRFaceArkitBlendShapeCoefficient>();
#endif
m_Face = GetComponent<ARFace>();
CreateFeatureBlendMapping();
}
void CreateFeatureBlendMapping()
{
if (skinnedMeshRenderer == null || skinnedMeshRenderer.sharedMesh == null)
{
return;
}
#if UNITY_IOS
const string strPrefix = "blendShape2.";
m_FaceArkitBlendShapeIndexMap = new Dictionary<XRArkitBlendShapeLocation, int>();
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.BrowDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.BrowDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browDown_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.BrowInnerUp ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browInnerUp");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.BrowOuterUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.BrowOuterUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "browOuterUp_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.CheekPuff ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekPuff");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.CheekSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.CheekSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "cheekSquint_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeBlinkLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeBlinkRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeBlink_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookDown_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookInLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookInRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookIn_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookOutLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookOutRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookOut_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeLookUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeLookUp_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeSquintLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeSquintRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeSquint_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeWideLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.EyeWideRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "eyeWide_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.JawForward ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawForward");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.JawLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawLeft");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.JawOpen ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawOpen");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.JawRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "jawRight");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthClose ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthClose");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthDimpleLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthDimpleRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthDimple_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthFrownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthFrownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFrown_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthFunnel ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthFunnel");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLeft");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthLowerDownLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthLowerDownRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthLowerDown_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthPressLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthPressRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPress_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthPucker ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthPucker");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRight");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthRollLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollLower");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthRollUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthRollUpper");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthShrugLower ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugLower");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthShrugUpper ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthShrugUpper");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthSmileLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthSmileRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthSmile_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthStretchLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthStretchRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthStretch_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthUpperUpLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.MouthUpperUpRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "mouthUpperUp_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.NoseSneerLeft ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_L");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.NoseSneerRight ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "noseSneer_R");
m_FaceArkitBlendShapeIndexMap[XRArkitBlendShapeLocation.TongueOut ] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex(strPrefix + "tongueOut");
#endif
}
void SetVisible(bool visible)
{
if (skinnedMeshRenderer == null) return;
skinnedMeshRenderer.enabled = visible;
}
void UpdateVisibility()
{
var visible = enabled &&
(m_Face.trackingState != TrackingState.Unavailable) &&
(ARSubsystemManager.systemState > ARSystemState.Ready);
SetVisible(visible);
}
void OnEnable()
{
#if UNITY_IOS
m_ArkitFaceSubsystem = (ARKitFaceSubsystem) ARSubsystemManager.faceSubsystem;
if (m_ArkitFaceSubsystem == null)
return;
m_Face.updated += OnUpdated;
ARSubsystemManager.systemStateChanged += OnSystemStateChanged;
UpdateVisibility();
#endif
}
void OnDisable()
{
m_Face.updated -= OnUpdated;
ARSubsystemManager.systemStateChanged -= OnSystemStateChanged;
}
void OnSystemStateChanged(ARSystemStateChangedEventArgs eventArgs)
{
UpdateVisibility();
}
void OnUpdated(ARFace face)
{
UpdateVisibility();
UpdateFaceFeatures();
}
void UpdateFaceFeatures()
{
if (skinnedMeshRenderer == null || !skinnedMeshRenderer.enabled || skinnedMeshRenderer.sharedMesh == null)
{
return;
}
#if UNITY_IOS
if (!m_ArkitFaceSubsystem.TryGetFaceARKitBlendShapeCoefficients(m_Face.xrFace.trackableId,s_FaceArkitBlendShapeCoefficients))
return;
foreach (var xrFaceFeatureCoefficient in s_FaceArkitBlendShapeCoefficients)
{
int mappedBlendShapeIndex;
if (m_FaceArkitBlendShapeIndexMap.TryGetValue(xrFaceFeatureCoefficient.arkitBlendShapeLocation, out mappedBlendShapeIndex))
{
if (mappedBlendShapeIndex >= 0 )
{
skinnedMeshRenderer.SetBlendShapeWeight (mappedBlendShapeIndex, xrFaceFeatureCoefficient.coefficient * coefficientScale);
}
}
}
#endif
}
}

11
Assets/Scripts/ARFaceArkitBlendShapeVisualizer.cs.meta


fileFormatVersion: 2
guid: 77136493c3caf4805ac5a602f75072e0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存