浏览代码

Resurrecting the camera image scene with the new camera image functionality.

/1.5-preview
Todd Stinson 6 年前
当前提交
6e44ca68
共有 11 个文件被更改,包括 1332 次插入8 次删除
  1. 1
      .gitignore
  2. 5
      Packages/manifest.json
  3. 44
      ProjectSettings/EditorBuildSettings.asset
  4. 1
      ProjectSettings/ProjectSettings.asset
  5. 4
      ProjectSettings/ProjectVersion.txt
  6. 1001
      Assets/Scenes/CameraImage.unity
  7. 7
      Assets/Scenes/CameraImage.unity.meta
  8. 114
      Assets/Scripts/CameraConfigController.cs
  9. 11
      Assets/Scripts/CameraConfigController.cs.meta
  10. 141
      Assets/Scripts/TestCameraImage.cs
  11. 11
      Assets/Scripts/TestCameraImage.cs.meta

1
.gitignore


[Ll]ibrary/
[Ll]ogs/
[Tt]emp/
[Oo]bj/
[Bb]uild/

5
Packages/manifest.json


{
"dependencies": {
"com.unity.2d.sprite": "1.0.0",
"com.unity.ads": "2.3.1",
"com.unity.ads": "3.0.3",
"com.unity.analytics": "3.2.2",
"com.unity.collab-proxy": "1.2.16",
"com.unity.multiplayer-hlapi": "1.0.1",

"com.unity.xr.arcore": "2.0.2",
"com.unity.xr.arfoundation": "2.0.2",
"com.unity.xr.arkit": "2.0.1",
"com.unity.xr.legacyinputhelpers": "1.0.0",
"com.unity.xr.facesubsystem": "1.0.0-preview.5",
"com.unity.xr.legacyinputhelpers": "2.0.2",
"com.unity.modules.ai": "1.0.0",
"com.unity.modules.androidjni": "1.0.0",
"com.unity.modules.animation": "1.0.0",

44
ProjectSettings/EditorBuildSettings.asset


m_ObjectHideFlags: 0
serializedVersion: 2
m_Scenes:
- enabled: 1
path: Assets/Scenes/SampleScene.unity
guid: 99c9720ab356a0642a771bea13969a05
- enabled: 0
path: Assets/Scenes/ARWorldMap.unity
guid: 8d8902fd0b55449dcbf3572b49d6b1b1
- enabled: 0
path: Assets/Scenes/CameraImage.unity
guid: e769916924fd3814ab1d9474de816b22
path: Assets/Scenes/FeatheredPlaneScene.unity
path: Assets/Scenes/EnvironmentProbes.unity
guid: d568d188cadf141e38769dbabd3fe385
- enabled: 0
path: Assets/Scenes/FaceTracking/ARCoreFaceRegions.unity
guid: d1739ac106ca65e4b83528a440314bd9
- enabled: 0
path: Assets/Scenes/FaceTracking/ARKitFaceBlendShapes.unity
guid: e9280441e79e84bdbbf6b724880ae2d9
- enabled: 0
path: Assets/Scenes/FaceTracking/FaceMesh.unity
guid: 047514b96a5d44d6bb0acbfa06ace943
- enabled: 0
path: Assets/Scenes/FaceTracking/FacePose.unity
guid: d8b65509a10894e1dbb93c799ececfd9
- enabled: 0
path: Assets/Scenes/ImageTracking/ImageTracking.unity
guid: e0b018a3c3b5cfd4c8e316706ec0a18c
- enabled: 0
path: Assets/Scenes/LightEstimation.unity
guid: 2f0cbc82480584c258f99f0d9e4ba302
- enabled: 0
path: Assets/Scenes/Plane Detection/FeatheredPlanes.unity
- enabled: 0
path: Assets/Scenes/Plane Detection/TogglePlaneDetection.unity
guid: eb4bbb3a5e81acc41a903b1c5f1c827b
- enabled: 0
path: Assets/Scenes/Scale.unity
guid: 13ac91f677dc22f4ca320ef8ae727b68
- enabled: 0
path: Assets/Scenes/SimpleAR.unity
guid: 99c9720ab356a0642a771bea13969a05
- enabled: 0
path: Assets/Scenes/UX/SampleUXScene.unity
guid: 3b42d67a72bbc4004bf713db0e312326
m_configObjects:
com.unity.xr.arkit.PlayerSettings: {fileID: 11400000, guid: 2d5fb8e61571b4e3897f221189089fd6,
type: 2}

1
ProjectSettings/ProjectSettings.asset


oculus:
sharedDepthBuffer: 0
dashSupport: 0
lowOverheadMode: 0
enable360StereoCapture: 0
isWsaHolographicRemotingEnabled: 0
protectGraphicsMemory: 0

4
ProjectSettings/ProjectVersion.txt


m_EditorVersion: 2019.2.0a8
m_EditorVersionWithRevision: 2019.2.0a8 (18a4512f903f)
m_EditorVersion: 2019.2.0a11
m_EditorVersionWithRevision: 2019.2.0a11 (50bfd5f1a2f4)

1001
Assets/Scenes/CameraImage.unity
文件差异内容过多而无法显示
查看文件

7
Assets/Scenes/CameraImage.unity.meta


fileFormatVersion: 2
guid: e769916924fd3814ab1d9474de816b22
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

114
Assets/Scripts/CameraConfigController.cs


using System.Collections.Generic;
using Unity.Collections;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
/// <summary>
/// Populates a drop down UI element with all the supported
/// camera configurations and changes the active camera
/// configuration when the user changes the selection in the dropdown.
///
/// The camera configuration affects the resolution (and possibly framerate)
/// of the hardware camera during an AR session.
/// </summary>
[RequireComponent(typeof(Dropdown))]
public class CameraConfigController : MonoBehaviour
{
List<string> m_ConfigurationNames;
Dropdown m_Dropdown;
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get { return m_CameraManager; }
set { m_CameraManager = value; }
}
/// <summary>
/// Callback invoked when <see cref="m_Dropdown"/> changes. This
/// lets us change the camera configuration when the user changes
/// the selection in the UI.
/// </summary>
/// <param name="dropdown">The <c>Dropdown</c> which changed.</param>
public void OnValueChanged(Dropdown dropdown)
{
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
{
return;
}
var configurationIndex = dropdown.value;
// Check that the value makes sense
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
{
if (configurationIndex >= configurations.Length)
{
return;
}
// Get that configuration by index
var configuration = configurations[configurationIndex];
// Make it the active one
cameraManager.currentConfiguration = configuration;
}
}
void Awake()
{
m_Dropdown = GetComponent<Dropdown>();
m_Dropdown.ClearOptions();
m_ConfigurationNames = new List<string>();
}
void PopulateDropdown()
{
if ((cameraManager == null) || (cameraManager.subsystem == null) || !cameraManager.subsystem.running)
return;
// No configurations available probably means this feature
// isn't supported by the current device.
using (var configurations = cameraManager.GetConfigurations(Allocator.Temp))
{
if (!configurations.IsCreated || (configurations.Length <= 0))
{
return;
}
// There are two ways to enumerate the camera configurations.
// 1. Use a foreach to iterate over all the available configurations
foreach (var config in configurations)
{
m_ConfigurationNames.Add(config.ToString());
}
m_Dropdown.AddOptions(m_ConfigurationNames);
// 2. Use a normal for...loop
var currentConfig = cameraManager.currentConfiguration;
for (int i = 0; i < configurations.Length; ++i)
{
// Find the current configuration and update the drop down value
if (currentConfig == configurations[i])
{
m_Dropdown.value = i;
}
}
}
}
void Update()
{
if (m_ConfigurationNames.Count == 0)
PopulateDropdown();
}
}

11
Assets/Scripts/CameraConfigController.cs.meta


fileFormatVersion: 2
guid: 94b653a1faa6f4749ad83e77bcddfab4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

141
Assets/Scripts/TestCameraImage.cs


using System;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
/// <summary>
/// This component tests getting the latest camera image
/// and converting it to RGBA format. If successful,
/// it displays the image on the screen as a RawImage
/// and also displays information about the image.
///
/// This is useful for computer vision applications where
/// you need to access the raw pixels from camera image
/// on the CPU.
///
/// This is different from the ARCameraBackground component, which
/// efficiently displays the camera image on the screen. If you
/// just want to blit the camera texture to the screen, use
/// the ARCameraBackground, or use Graphics.Blit to create
/// a GPU-friendly RenderTexture.
///
/// In this example, we get the camera image data on the CPU,
/// convert it to an RGBA format, then display it on the screen
/// as a RawImage texture to demonstrate it is working.
/// This is done as an example; do not use this technique simply
/// to render the camera image on screen.
/// </summary>
public class TestCameraImage : MonoBehaviour
{
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager;
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get { return m_CameraManager; }
set { m_CameraManager = value; }
}
[SerializeField]
RawImage m_RawImage;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawImage
{
get { return m_RawImage; }
set { m_RawImage = value; }
}
[SerializeField]
Text m_ImageInfo;
/// <summary>
/// The UI Text used to display information about the image on screen.
/// </summary>
public Text imageInfo
{
get { return m_ImageInfo; }
set { m_ImageInfo = value; }
}
void OnEnable()
{
if (m_CameraManager != null)
{
m_CameraManager.frameReceived += OnCameraFrameReceived;
}
}
void OnDisable()
{
if (m_CameraManager != null)
{
m_CameraManager.frameReceived -= OnCameraFrameReceived;
}
}
unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
// Attempt to get the latest camera image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
XRCameraImage image;
if (!cameraManager.TryGetLatestImage(out image))
{
return;
}
// Display some information about the camera image
m_ImageInfo.text = string.Format(
"Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
image.width, image.height, image.planeCount, image.timestamp, image.format);
// Once we have a valid XRCameraImage, we can access the individual image "planes"
// (the separate channels in the image). XRCameraImage.GetPlane provides
// low-overhead access to this data. This could then be passed to a
// computer vision algorithm. Here, we will convert the camera image
// to an RGBA texture and draw it on the screen.
// Choose an RGBA format.
// See XRCameraImage.FormatSupported for a complete list of supported formats.
var format = TextureFormat.RGBA32;
if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
{
m_Texture = new Texture2D(image.width, image.height, format, false);
}
// Convert the image to format, flipping the image across the Y axis.
// We can also get a sub rectangle, but we'll get the full image here.
var conversionParams = new XRCameraImageConversionParams(image, format, CameraImageTransformation.MirrorY);
// Texture2D allows us write directly to the raw texture data
// This allows us to do the conversion in-place without making any copies.
var rawTextureData = m_Texture.GetRawTextureData<byte>();
try
{
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
}
finally
{
// We must dispose of the XRCameraImage after we're finished
// with it to avoid leaking native resources.
image.Dispose();
}
// Apply the updated texture data to our texture
m_Texture.Apply();
// Set the RawImage's texture so we can visualize it.
m_RawImage.texture = m_Texture;
}
Texture2D m_Texture;
}

11
Assets/Scripts/TestCameraImage.cs.meta


fileFormatVersion: 2
guid: c6e58200659977344ad301e3a0f6e8d7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存