浏览代码

Merge pull request #38 from unity/fix_multi_cam

Fix multi cam
/0.9.0.preview.1_staging
GitHub Enterprise 3 年前
当前提交
43db91ba
共有 16 个文件被更改,包括 2172 次插入6 次删除
  1. 7
      TestProjects/PerceptionURP/Packages/packages-lock.json
  2. 2
      com.unity.perception/CHANGELOG.md
  3. 6
      com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs
  4. 6
      com.unity.perception/Runtime/GroundTruth/RenderPasses/CrossPipelinePasses/SemanticSegmentationCrossPipelinePass.cs
  5. 3
      com.unity.perception/Runtime/GroundTruth/RenderPasses/HdrpPasses/InstanceSegmentationPass.cs
  6. 3
      com.unity.perception/Runtime/GroundTruth/RenderPasses/HdrpPasses/SemanticSegmentationPass.cs
  7. 1
      com.unity.perception/Runtime/GroundTruth/RenderPasses/UrpPasses/InstanceSegmentationUrpPass.cs
  8. 1
      com.unity.perception/Runtime/GroundTruth/RenderPasses/UrpPasses/SemanticSegmentationUrpPass.cs
  9. 54
      com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs
  10. 1001
      TestProjects/PerceptionHDRP/Assets/Scenes/MultiCameraExample.unity
  11. 7
      TestProjects/PerceptionHDRP/Assets/Scenes/MultiCameraExample.unity.meta
  12. 1001
      TestProjects/PerceptionURP/Assets/Scenes/MultiCamera.unity
  13. 7
      TestProjects/PerceptionURP/Assets/Scenes/MultiCamera.unity.meta
  14. 63
      TestProjects/PerceptionURP/Assets/Scenes/MultiCameraSettings.lighting
  15. 8
      TestProjects/PerceptionURP/Assets/Scenes/MultiCameraSettings.lighting.meta
  16. 8
      TestProjects/PerceptionURP/Assets/Scenes/SampleScene.meta

7
TestProjects/PerceptionURP/Packages/packages-lock.json


},
"url": "https://packages.unity.com"
},
"com.unity.scripting.python": {
"version": "4.0.0-exp.5",
"depth": 0,
"source": "registry",
"dependencies": {},
"url": "https://packages.unity.com"
},
"com.unity.searcher": {
"version": "4.3.2",
"depth": 2,

2
com.unity.perception/CHANGELOG.md


Users can now delay the current iteration for one frame from within randomizers by calling the `DelayIteration` function of the active scenario.
Added support for multiple cameras capturing simultaneously (visualization not yet supported)
### Changed
Changed the JSON serialization key of Normal Sampler's standard deviation property from "standardDeviation" to "stddev". Scneario JSON configurations that were generated using previous versions will need to be manually updated to reflect this change.

6
com.unity.perception/Runtime/GroundTruth/Labelers/InstanceSegmentationLabeler.cs


/// <inheritdoc/>
protected override bool supportsVisualization => true;
static readonly string k_Directory = "InstanceSegmentation" + Guid.NewGuid().ToString();
string directory = "InstanceSegmentation" + Guid.NewGuid().ToString();
const string k_FilePrefix = "Instance_";
/// <summary>

{
m_CurrentTexture = renderTexture;
m_InstancePath = $"{k_Directory}/{k_FilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(k_Directory)}/{k_FilePrefix}{frameCount}.png";
m_InstancePath = $"{directory}/{k_FilePrefix}{frameCount}.png";
var localPath = $"{Manager.Instance.GetDirectoryFor(directory)}/{k_FilePrefix}{frameCount}.png";
var colors = new NativeArray<Color32>(data, Allocator.Persistent);

6
com.unity.perception/Runtime/GroundTruth/RenderPasses/CrossPipelinePasses/SemanticSegmentationCrossPipelinePass.cs


const string k_ShaderName = "Perception/SemanticSegmentation";
static readonly int k_LabelingId = Shader.PropertyToID("LabelingId");
static int s_LastFrameExecuted = -1;
int lastFrameExecuted = -1;
SemanticSegmentationLabelConfig m_LabelConfig;

protected override void ExecutePass(
ScriptableRenderContext renderContext, CommandBuffer cmd, Camera camera, CullingResults cullingResult)
{
if (s_LastFrameExecuted == Time.frameCount)
if (lastFrameExecuted == Time.frameCount)
s_LastFrameExecuted = Time.frameCount;
lastFrameExecuted = Time.frameCount;
var renderList = CreateRendererListDesc(camera, cullingResult, "FirstPass", 0, m_OverrideMaterial, -1);
cmd.ClearRenderTarget(true, true, m_LabelConfig.skyColor);
DrawRendererList(renderContext, cmd, RendererList.Create(renderList));

3
com.unity.perception/Runtime/GroundTruth/RenderPasses/HdrpPasses/InstanceSegmentationPass.cs


protected override void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult)
{
#endif
if (targetCamera != hdCamera.camera)
return;
CoreUtils.SetRenderTarget(cmd, targetTexture, ClearFlag.All);
m_InstanceSegmentationCrossPipelinePass.Execute(renderContext, cmd, hdCamera.camera, cullingResult);
}

3
com.unity.perception/Runtime/GroundTruth/RenderPasses/HdrpPasses/SemanticSegmentationPass.cs


protected override void Execute(ScriptableRenderContext renderContext, CommandBuffer cmd, HDCamera hdCamera, CullingResults cullingResult)
{
#endif
if (targetCamera != hdCamera.camera)
return;
CoreUtils.SetRenderTarget(cmd, targetTexture);
m_SemanticSegmentationCrossPipelinePass.Execute(renderContext, cmd, hdCamera.camera, cullingResult);
}

1
com.unity.perception/Runtime/GroundTruth/RenderPasses/UrpPasses/InstanceSegmentationUrpPass.cs


{
m_InstanceSegmentationPass = new InstanceSegmentationCrossPipelinePass(camera);
ConfigureTarget(targetTexture, targetTexture.depthBuffer);
ConfigureClear(ClearFlag.None, Color.black);
m_InstanceSegmentationPass.Setup();
}

1
com.unity.perception/Runtime/GroundTruth/RenderPasses/UrpPasses/SemanticSegmentationUrpPass.cs


{
m_SemanticSegmentationCrossPipelinePass = new SemanticSegmentationCrossPipelinePass(camera, labelConfig);
ConfigureTarget(targetTexture, targetTexture.depthBuffer);
ConfigureClear(ClearFlag.None, Color.black);
m_SemanticSegmentationCrossPipelinePass.Setup();
}

54
com.unity.perception/Tests/Runtime/GroundTruthTests/SegmentationGroundTruthTests.cs


Assert.AreEqual(1, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SegmentationPass_WithMultiplePerceptionCameras_ProducesCorrectValues(
[Values(SegmentationKind.Instance, SegmentationKind.Semantic)] SegmentationKind segmentationKind)
{
int timesSegmentationImageReceived = 0;
var color1 = segmentationKind == SegmentationKind.Instance ?
k_InstanceSegmentationPixelValue :
k_SemanticPixelValue;
var color2 = segmentationKind == SegmentationKind.Instance ?
new Color32(0, 74, Byte.MaxValue, Byte.MaxValue) :
new Color32(0, 0, 0, Byte.MaxValue);
void OnCam1SegmentationImageReceived(NativeArray<Color32> data)
{
CollectionAssert.AreEqual(Enumerable.Repeat(color1, data.Length), data);
timesSegmentationImageReceived++;
}
void OnCam2SegmentationImageReceived(NativeArray<Color32> data)
{
Assert.AreEqual(color1, data[data.Length / 4]);
Assert.AreEqual(color2, data[data.Length * 3 / 4]);
timesSegmentationImageReceived++;
}
GameObject cameraObject;
GameObject cameraObject2;
if (segmentationKind == SegmentationKind.Instance)
{
cameraObject = SetupCameraInstanceSegmentation((frame, data, renderTexture) => OnCam1SegmentationImageReceived(data));
cameraObject2 = SetupCameraInstanceSegmentation((frame, data, renderTexture) => OnCam2SegmentationImageReceived(data));
}
else
{
cameraObject = SetupCameraSemanticSegmentation((args) => OnCam1SegmentationImageReceived(args.data), false);
cameraObject2 = SetupCameraSemanticSegmentation((args) => OnCam2SegmentationImageReceived(args.data), false);
}
//position camera to point straight at the top edge of plane1, such that plane1 takes up the bottom half of
//the image and plane2 takes up the top half
cameraObject2.transform.localPosition = Vector3.up * 2.5f;
var plane1 = TestHelper.CreateLabeledPlane(.5f);
var plane2 = TestHelper.CreateLabeledPlane(.5f, "label2");
plane2.transform.localPosition = plane2.transform.localPosition + Vector3.up * 5f;
AddTestObjectForCleanup(plane1);
AddTestObjectForCleanup(plane2);
yield return null;
//destroy the object to force all pending segmented image readbacks to finish and events to be fired.
DestroyTestObject(cameraObject);
DestroyTestObject(cameraObject2);
Assert.AreEqual(2, timesSegmentationImageReceived);
}
[UnityTest]
public IEnumerator SegmentationPassProducesCorrectValuesEachFrame(
[Values(SegmentationKind.Instance, SegmentationKind.Semantic)] SegmentationKind segmentationKind)

1001
TestProjects/PerceptionHDRP/Assets/Scenes/MultiCameraExample.unity
文件差异内容过多而无法显示
查看文件

7
TestProjects/PerceptionHDRP/Assets/Scenes/MultiCameraExample.unity.meta


fileFormatVersion: 2
guid: c422f81d640566845a68cab39c36e776
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1001
TestProjects/PerceptionURP/Assets/Scenes/MultiCamera.unity
文件差异内容过多而无法显示
查看文件

7
TestProjects/PerceptionURP/Assets/Scenes/MultiCamera.unity.meta


fileFormatVersion: 2
guid: dedd985999ab74747b95df66909e8029
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

63
TestProjects/PerceptionURP/Assets/Scenes/MultiCameraSettings.lighting


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!850595691 &4890085278179872738
LightingSettings:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: MultiCameraSettings
serializedVersion: 3
m_GIWorkflowMode: 1
m_EnableBakedLightmaps: 0
m_EnableRealtimeLightmaps: 1
m_RealtimeEnvironmentLighting: 1
m_BounceScale: 1
m_AlbedoBoost: 1
m_IndirectOutputScale: 1
m_UsingShadowmask: 1
m_BakeBackend: 1
m_LightmapMaxSize: 1024
m_BakeResolution: 40
m_Padding: 2
m_TextureCompression: 1
m_AO: 0
m_AOMaxDistance: 1
m_CompAOExponent: 1
m_CompAOExponentDirect: 0
m_ExtractAO: 0
m_MixedBakeMode: 2
m_LightmapsBakeMode: 1
m_FilterMode: 1
m_LightmapParameters: {fileID: 15204, guid: 0000000000000000f000000000000000, type: 0}
m_ExportTrainingData: 0
m_TrainingDataDestination: TrainingData
m_RealtimeResolution: 2
m_ForceWhiteAlbedo: 0
m_ForceUpdates: 0
m_FinalGather: 0
m_FinalGatherRayCount: 256
m_FinalGatherFiltering: 1
m_PVRCulling: 1
m_PVRSampling: 1
m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVREnvironmentSampleCount: 500
m_PVREnvironmentReferencePointCount: 2048
m_LightProbeSampleCountMultiplier: 4
m_PVRBounces: 2
m_PVRMinBounces: 2
m_PVREnvironmentMIS: 0
m_PVRFilteringMode: 2
m_PVRDenoiserTypeDirect: 0
m_PVRDenoiserTypeIndirect: 0
m_PVRDenoiserTypeAO: 0
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringGaussRadiusDirect: 1
m_PVRFilteringGaussRadiusIndirect: 5
m_PVRFilteringGaussRadiusAO: 2
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1

8
TestProjects/PerceptionURP/Assets/Scenes/MultiCameraSettings.lighting.meta


fileFormatVersion: 2
guid: 75bbcdcbda2820b4a933657ce3456134
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 4890085278179872738
userData:
assetBundleName:
assetBundleVariant:

8
TestProjects/PerceptionURP/Assets/Scenes/SampleScene.meta


fileFormatVersion: 2
guid: bce914f0f01b5644c9e61e46a023e596
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存