浏览代码

Merge branch 'HDRP/refactor-reflectionProbe/merge-probe-editors' of https://github.com/Unity-Technologies/ScriptableRenderPipeline into HDRP/refactor-reflectionProbe/merge-probe-editors

/main
Frédéric Vauchelles 6 年前
当前提交
0b5f6f18
共有 14 个文件被更改,包括 265 次插入61 次删除
  1. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Gizmos.cs
  2. 5
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Handles.cs
  3. 34
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Preview.cs
  4. 28
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.cs
  5. 16
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.cs
  6. 15
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDAdditionalReflectionData.cs
  7. 5
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDProbe.cs
  8. 3
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbe.cs
  9. 16
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ProbeWrapper.cs
  10. 10
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystem.cs
  11. 166
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs
  12. 14
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystemParameters.cs
  13. 8
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs
  14. 4
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipelineAsset.cs

2
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Gizmos.cs


{
material = new Material(Shader.Find("Debug/ReflectionProbePreview"));
}
material.SetTexture("_Cubemap", p.texture);
material.SetTexture("_Cubemap", e.GetTexture());
material.SetPass(0);
Graphics.DrawMeshNow(sphere, Matrix4x4.TRS(p.transform.position, Quaternion.identity, Vector3.one));
}

5
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Handles.cs


var p = m_SerializedHDProbe as SerializedHDReflectionProbe;
var o = this;
BakeRealtimeProbeIfPositionChanged(s, p, o);
if(EditMode.editMode == EditMode.SceneViewEditMode.ReflectionProbeOrigin)
{
Handle_OriginEditing(s, p, o);
}
}
static void Handle_OriginEditing(HDReflectionProbeUI s, SerializedHDReflectionProbe sp, Editor o)

34
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Preview.cs


using UnityEngine;
using UnityEngine.Experimental.Rendering.HDPipeline;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{

return false; // We only handle one preview for reflection probes
// Ensure valid cube map editor (if possible)
Texture texture = GetTexture();
if (m_CubemapEditor != null && m_CubemapEditor.target as Texture != texture)
{
DestroyImmediate(m_CubemapEditor);
m_CubemapEditor = null;
}
CreateCachedEditor(((ReflectionProbe)target).texture, typeof(HDCubemapInspector), ref editor);
CreateCachedEditor(GetTexture(), typeof(HDCubemapInspector), ref editor);
m_CubemapEditor = editor as HDCubemapInspector;
}

GUILayout.EndHorizontal();
return;
}
var p = target as ReflectionProbe;
if (p != null && p.texture != null && targets.Length == 1)
Texture tex = GetTexture();
if (tex != null && targets.Length == 1)
var p = target as ReflectionProbe;
return p != null && p.texture != null;
return GetTexture() != null;
}
Texture GetTexture()
{
HDProbe additional = GetTarget(target);
if (additional != null && additional.mode == UnityEngine.Rendering.ReflectionProbeMode.Realtime)
{
return additional.realtimeTexture;
}
else
{
var p = target as ReflectionProbe;
if (p != null)
return p.texture;
}
return null;
}
private void OnDestroy()

28
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.cs


return editMode == EditMode.SceneViewEditMode.ReflectionProbeBox || editMode == EditMode.SceneViewEditMode.Collider || editMode == EditMode.SceneViewEditMode.GridBox ||
editMode == EditMode.SceneViewEditMode.ReflectionProbeOrigin;
}
void BakeRealtimeProbeIfPositionChanged(HDReflectionProbeUI s, SerializedHDReflectionProbe sp, Editor o)
{
if (Application.isPlaying
|| ((ReflectionProbeMode)sp.mode.intValue) != ReflectionProbeMode.Realtime)
{
m_PositionHash = 0;
return;
}
var hash = 0;
for (var i = 0; i < sp.serializedLegacyObject.targetObjects.Length; i++)
{
var p = (ReflectionProbe)sp.serializedLegacyObject.targetObjects[i];
var tr = p.GetComponent<Transform>();
hash ^= tr.position.GetHashCode();
}
if (hash != m_PositionHash)
{
m_PositionHash = hash;
for (var i = 0; i < sp.serializedLegacyObject.targetObjects.Length; i++)
{
var p = (ReflectionProbe)sp.serializedLegacyObject.targetObjects[i];
p.RenderProbe();
}
}
}
}
}

16
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.cs


serialized.Apply();
}
GUI.enabled = false;
EditorGUILayout.LabelField(resolutionContent, CoreEditorUtils.GetContent(((int)hdrp.GetRenderPipelineSettings().lightLoopSettings.reflectionCubemapSize).ToString()));
EditorGUILayout.LabelField(shadowDistanceContent, EditorStyles.label);
EditorGUILayout.LabelField(cullingMaskContent, EditorStyles.label);
EditorGUILayout.LabelField(useOcclusionCullingContent, EditorStyles.label);
EditorGUILayout.LabelField(nearClipCullingContent, EditorStyles.label);
EditorGUILayout.LabelField(farClipCullingContent, EditorStyles.label);
GUI.enabled = true;
//GUI.enabled = false;
//EditorGUILayout.LabelField(resolutionContent, CoreEditorUtils.GetContent(((int)hdrp.GetRenderPipelineSettings().lightLoopSettings.reflectionCubemapSize).ToString()));
//EditorGUILayout.LabelField(shadowDistanceContent, EditorStyles.label);
//EditorGUILayout.LabelField(cullingMaskContent, EditorStyles.label);
//EditorGUILayout.LabelField(useOcclusionCullingContent, EditorStyles.label);
//EditorGUILayout.LabelField(nearClipCullingContent, EditorStyles.label);
//EditorGUILayout.LabelField(farClipCullingContent, EditorStyles.label);
//GUI.enabled = true;
}
internal PlanarReflectionProbeUI()

15
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDAdditionalReflectionData.cs


MigrateToUseInfluenceVolume();
if (needMigrateToMergeEditors)
MigrateToMergeEditors();
ReflectionSystem.RegisterProbe(this);
}
void OnDisable()
{
ReflectionSystem.UnregisterProbe(this);
}
void OnValidate()
{
ReflectionSystem.UnregisterProbe(this);
if (isActiveAndEnabled)
ReflectionSystem.RegisterProbe(this);
}
void MigrateToHDProbeChild()

5
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDProbe.cs


ReflectionProbeMode m_Mode = ReflectionProbeMode.Baked;
[SerializeField]
ReflectionProbeRefreshMode m_RefreshMode = ReflectionProbeRefreshMode.OnAwake;
RenderTexture m_RealtimeTexture = null;
/// <summary>ProxyVolume currently used by this probe.</summary>
public ReflectionProxyVolumeComponent proxyVolume { get { return m_ProxyVolume; } }

/// <summary>Weight for blending amongst probes (non PBR parameter).</summary>
public float weight { get { return m_Weight; } set { m_Weight = value; } }
/// <summary>Get the realtime acquired Render Texture</summary>
public RenderTexture realtimeTexture { get { return m_RealtimeTexture; } internal set { m_RealtimeTexture = value; } }
/// <summary>The capture mode.</summary>
public virtual ReflectionProbeMode mode

3
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbe.cs


[Range(0, 180)]
float m_FieldOfViewOverride = 90;
RenderTexture m_RealtimeTexture;
public bool overrideFieldOfView { get { return m_OverrideFieldOfView; } }
public float fieldOfViewOverride { get { return m_FieldOfViewOverride; } }

}
public Texture customTexture { get { return m_CustomTexture; } set { m_CustomTexture = value; } }
public Texture bakedTexture { get { return m_BakedTexture; } set { m_BakedTexture = value; }}
public RenderTexture realtimeTexture { get { return m_RealtimeTexture; } internal set { m_RealtimeTexture = value; } }
public float captureNearPlane { get { return m_CaptureNearPlane; } }
public float captureFarPlane { get { return m_CaptureFarPlane; } }
public CapturePositionMode capturePositionMode { get { return m_CapturePositionMode; } }

16
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ProbeWrapper.cs


}
}
public override Texture texture { get { return probe.texture; } }
public override Texture texture
{
get
{
if(additional.mode == ReflectionProbeMode.Realtime)
{
return additional.realtimeTexture;
}
else
{
return probe.texture;
}
}
}
public override ReflectionProbeMode mode { get { return probe.probe.mode; } }
public override EnvShapeType influenceShapeType { get { return ConvertShape(additional.influenceVolume.shape); } }
public override float weight { get { return additional.weight; } }

10
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystem.cs


s_Instance.RegisterProbe(planarProbe);
}
public static void RegisterProbe(HDAdditionalReflectionData probe)
{
s_Instance.RegisterProbe(probe);
}
}
public static void UnregisterProbe(HDAdditionalReflectionData probe)
{
s_Instance.UnregisterProbe(probe);
}
public static void RequestRealtimeRender(PlanarReflectionProbe probe)

166
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs


{
static Camera s_RenderCamera = null;
static HDAdditionalCameraData s_RenderCameraData;
static int frame = Time.frameCount;
HashSet<HDAdditionalReflectionData> m_AdditionalDataReflectionProbes;
HashSet<HDAdditionalReflectionData> m_AdditionalDataReflectionProbe_RealtimeUpdate;
HashSet<HDAdditionalReflectionData> m_AdditionalDataReflectionProbe_RequestRealtimeRender;
HDAdditionalReflectionData[] m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray;
HashSet<PlanarReflectionProbe> m_PlanarReflectionProbes;
HashSet<PlanarReflectionProbe> m_PlanarReflectionProbe_DirtyBounds;

m_PlanarReflectionProbesArray = new PlanarReflectionProbe[parameters.maxActivePlanarReflectionProbe];
m_PlanarReflectionProbeBoundsArray = new BoundingSphere[parameters.maxActivePlanarReflectionProbe];
m_PlanarReflectionProbe_RealtimeUpdate_WorkArray = new PlanarReflectionProbe[parameters.maxPlanarReflectionProbePerCamera];
m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray = new HDAdditionalReflectionData[parameters.maxPlanarReflectionProbePerCamera]; ;
m_AdditionalDataReflectionProbes = new HashSet<HDAdditionalReflectionData>();
m_AdditionalDataReflectionProbe_RealtimeUpdate = new HashSet<HDAdditionalReflectionData>();
m_AdditionalDataReflectionProbe_RequestRealtimeRender = new HashSet<HDAdditionalReflectionData>();
m_PlanarReflectionProbes = new HashSet<PlanarReflectionProbe>();
m_PlanarReflectionProbe_DirtyBounds = new HashSet<PlanarReflectionProbe>();
m_PlanarReflectionProbe_RequestRealtimeRender = new HashSet<PlanarReflectionProbe>();

if (previous != null)
{
m_PlanarReflectionProbes.UnionWith(previous.m_PlanarReflectionProbes);
m_PlanarReflectionProbes.UnionWith(previous.m_PlanarReflectionProbes);
m_AdditionalDataReflectionProbe_RequestRealtimeRender.UnionWith(previous.m_AdditionalDataReflectionProbe_RequestRealtimeRender);
m_AdditionalDataReflectionProbe_RealtimeUpdate.UnionWith(previous.m_AdditionalDataReflectionProbe_RealtimeUpdate);
m_PlanarReflectionProbe_DirtyBounds.UnionWith(m_PlanarReflectionProbes);
m_PlanarReflectionProbe_RequestRealtimeRender.UnionWith(previous.m_PlanarReflectionProbe_RequestRealtimeRender);
m_PlanarReflectionProbe_RealtimeUpdate.UnionWith(previous.m_PlanarReflectionProbe_RealtimeUpdate);

}
}
public void RegisterProbe(HDAdditionalReflectionData additional)
{
m_AdditionalDataReflectionProbes.Add(additional);
//SetProbeBoundsDirty(probe);
if (additional.mode == ReflectionProbeMode.Realtime)
{
switch (additional.refreshMode)
{
case ReflectionProbeRefreshMode.OnAwake:
m_AdditionalDataReflectionProbe_RequestRealtimeRender.Add(additional);
break;
case ReflectionProbeRefreshMode.EveryFrame:
m_AdditionalDataReflectionProbe_RealtimeUpdate.Add(additional);
break;
}
}
}
public void UnregisterProbe(PlanarReflectionProbe planarProbe)
{
m_PlanarReflectionProbes.Remove(planarProbe);

m_PlanarReflectionProbe_PerCamera_RealtimeUpdate.Remove(planarProbe);
}
public void UnregisterProbe(HDAdditionalReflectionData additional)
{
m_AdditionalDataReflectionProbes.Remove(additional);
m_AdditionalDataReflectionProbe_RequestRealtimeRender.Remove(additional);
m_AdditionalDataReflectionProbe_RealtimeUpdate.Remove(additional);
}
public void PrepareCull(Camera camera, ReflectionProbeCullResults results)
{
UpdateAllPlanarReflectionProbeBounds();

{
var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i];
if (!IsRealtimeTextureValid(probe.realtimeTexture))
if (!IsRealtimeTextureValid(probe.realtimeTexture, true))
{
if (probe.realtimeTexture != null)
probe.realtimeTexture.Release();

{
var probe = m_PlanarReflectionProbe_RealtimeUpdate_WorkArray[i];
if (!IsRealtimeTextureValid(probe.realtimeTexture))
if (!IsRealtimeTextureValid(probe.realtimeTexture, true))
{
if (probe.realtimeTexture != null)
probe.realtimeTexture.Release();

Render(probe, probe.realtimeTexture);
}
}
if ((probeTypes & ReflectionProbeType.ReflectionProbe) != 0 && frame != Time.frameCount)
{
//do only one per frame
frame = Time.frameCount;
// Discard disabled probes in requested render probes
m_AdditionalDataReflectionProbe_RequestRealtimeRender.IntersectWith(m_AdditionalDataReflectionProbes);
// Include all realtime probe modes
m_AdditionalDataReflectionProbe_RequestRealtimeRender.UnionWith(m_AdditionalDataReflectionProbe_RealtimeUpdate);
var length = Mathf.Min(m_AdditionalDataReflectionProbe_RequestRealtimeRender.Count, m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray.Length);
m_AdditionalDataReflectionProbe_RequestRealtimeRender.CopyTo(m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray);
m_AdditionalDataReflectionProbe_RequestRealtimeRender.Clear();
// 1. Allocate if necessary target texture
for (var i = 0; i < length; i++)
{
var additional = m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray[i];
if (!IsRealtimeTextureValid(additional.realtimeTexture, false))
{
if (additional.realtimeTexture != null)
additional.realtimeTexture.Release();
additional.realtimeTexture = NewRenderTarget(additional);
}
}
// 2. Render
for (var i = 0; i < length; i++)
{
var additional = m_AdditionalDataReflectionProbe_RealtimeUpdate_WorkArray[i];
Render(additional, additional.realtimeTexture);
}
}
}
public RenderTexture NewRenderTarget(PlanarReflectionProbe probe)

return rt;
}
public RenderTexture NewRenderTarget(HDAdditionalReflectionData probe)
{
var rt = new RenderTexture(m_Parameters.reflectionProbeSize, m_Parameters.reflectionProbeSize, 0, RenderTextureFormat.ARGBHalf);
// No hide and don't save for this one
rt.useMipMap = true;
rt.autoGenerateMips = false;
rt.name = CoreUtils.GetRenderTargetAutoName(m_Parameters.reflectionProbeSize, m_Parameters.reflectionProbeSize, 1, RenderTextureFormat.ARGBHalf, "ProbeRT");
rt.dimension = TextureDimension.Cube;
rt.Create();
return rt;
}
//public float GetCaptureCameraFOVFor(PlanarReflectionProbe probe, Camera viewerCamera)
//{
// switch (probe.influenceVolume.shapeType)

// }
//}
bool IsRealtimeTextureValid(RenderTexture renderTexture)
bool IsRealtimeTextureValid(RenderTexture renderTexture, bool isPlanar)
return renderTexture != null
&& renderTexture.width == m_Parameters.planarReflectionProbeSize
&& renderTexture.height == m_Parameters.planarReflectionProbeSize
&& renderTexture.format == RenderTextureFormat.ARGBHalf
&& renderTexture.useMipMap;
if(isPlanar)
return renderTexture != null
&& renderTexture.width == m_Parameters.planarReflectionProbeSize
&& renderTexture.height == m_Parameters.planarReflectionProbeSize
&& renderTexture.format == RenderTextureFormat.ARGBHalf
&& renderTexture.useMipMap;
else
return renderTexture != null
&& renderTexture.width == m_Parameters.reflectionProbeSize
&& renderTexture.height == m_Parameters.reflectionProbeSize
&& renderTexture.format == RenderTextureFormat.ARGBHalf
&& renderTexture.useMipMap;
}
public void RequestRealtimeRender(PlanarReflectionProbe probe)

renderCamera.Render();
GL.invertCulling = false;
renderCamera.targetTexture = null;
target.IncrementUpdateCount();
}
public void Render(HDAdditionalReflectionData additional, RenderTexture target, Camera viewerCamera = null)
{
var renderCamera = GetRenderCamera();
// Copy current frameSettings of this probe to the HDAdditionalData of the render camera
//probe.frameSettings.CopyTo(s_RenderCameraData.GetFrameSettings());
SetupCameraForRender(renderCamera, additional, viewerCamera);
renderCamera.RenderToCubemap(target);
target.IncrementUpdateCount();
}

ctr.rotation = captureRotation;
}
static void SetupCameraForRender(Camera camera, HDAdditionalReflectionData additional, Camera viewerCamera = null)
{
float nearClipPlane, farClipPlane, aspect, fov;
Color backgroundColor;
CameraClearFlags clearFlags;
Vector3 capturePosition;
Quaternion captureRotation;
Matrix4x4 worldToCamera, projection;
CalculateCaptureCameraProperties(additional,
out nearClipPlane, out farClipPlane,
out aspect, out fov, out clearFlags, out backgroundColor,
out worldToCamera, out projection,
out capturePosition, out captureRotation, viewerCamera);
camera.farClipPlane = farClipPlane;
camera.nearClipPlane = nearClipPlane;
camera.fieldOfView = fov;
camera.aspect = aspect;
camera.clearFlags = clearFlags;
camera.backgroundColor = camera.backgroundColor;
camera.projectionMatrix = projection;
camera.worldToCameraMatrix = worldToCamera;
var ctr = camera.transform;
ctr.position = capturePosition;
ctr.rotation = captureRotation;
}
public static void CalculateCaptureCameraViewProj(PlanarReflectionProbe probe, out Matrix4x4 worldToCamera, out Matrix4x4 projection, out Vector3 capturePosition, out Quaternion captureRotation, Camera viewerCamera = null)
{
float nearClipPlane, farClipPlane, aspect, fov;

CalculateMirroredCaptureCameraProperties(probe, viewerCamera, out nearClipPlane, out farClipPlane, out aspect, out fov, out clearFlags, out backgroundColor, out worldToCamera, out projection, out capturePosition, out captureRotation);
else
CalculateStaticCaptureCameraProperties(probe, out nearClipPlane, out farClipPlane, out aspect, out fov, out clearFlags, out backgroundColor, out worldToCamera, out projection, out capturePosition, out captureRotation);
}
public static void CalculateCaptureCameraProperties(HDAdditionalReflectionData additional, out float nearClipPlane, out float farClipPlane, out float aspect, out float fov, out CameraClearFlags clearFlags, out Color backgroundColor, out Matrix4x4 worldToCamera, out Matrix4x4 projection, out Vector3 capturePosition, out Quaternion captureRotation, Camera viewerCamera = null)
{
nearClipPlane = additional.reflectionProbe.nearClipPlane;
farClipPlane = additional.reflectionProbe.farClipPlane;
aspect = 1f;
fov = 90f;
clearFlags = CameraClearFlags.Nothing;
backgroundColor = Color.white;
capturePosition = additional.transform.TransformPoint(additional.transform.position);
captureRotation = Quaternion.identity;
worldToCamera = GeometryUtils.CalculateWorldToCameraMatrixRHS(capturePosition, captureRotation);
projection = Matrix4x4.Perspective(fov, aspect, nearClipPlane, farClipPlane);
}
static bool IsProbeCaptureMirrored(PlanarReflectionProbe probe, Camera viewerCamera)

14
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ReflectionSystemParameters.cs


{
maxPlanarReflectionProbePerCamera = 128,
maxActivePlanarReflectionProbe = 512,
planarReflectionProbeSize = 128
planarReflectionProbeSize = 128,
maxActiveReflectionProbe = 512,
reflectionProbeSize = 128
};
/// <summary>

/// Size of the planar probe textures.
/// </summary>
public int planarReflectionProbeSize;
/// <summary>
/// Maximum number of active non planar reflection in the world.
/// </summary>
public int maxActiveReflectionProbe;
/// <summary>
/// Size of the non planar probe textures.
/// </summary>
public int reflectionProbeSize;
}
}

8
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


}
// TODO: Render only visible probes
var isReflection = cameras.Any(c => c.cameraType == CameraType.Reflection);
if (!isReflection)
ReflectionSystem.RenderAllRealtimeProbes(ReflectionProbeType.PlanarReflection);
var probeTypeToRender = ReflectionProbeType.ReflectionProbe;
var isPlanarReflection = cameras.Any(c => c.cameraType == CameraType.Reflection);
if (!isPlanarReflection)
probeTypeToRender |= ReflectionProbeType.PlanarReflection;
ReflectionSystem.RenderAllRealtimeProbes(probeTypeToRender);
// We first update the state of asset frame settings as they can be use by various camera
// but we keep the dirty state to correctly reset other camera that use RenderingPath.Default.

4
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipelineAsset.cs


{
maxPlanarReflectionProbePerCamera = renderPipelineSettings.lightLoopSettings.planarReflectionProbeCacheSize,
maxActivePlanarReflectionProbe = 512,
planarReflectionProbeSize = (int)renderPipelineSettings.lightLoopSettings.planarReflectionTextureSize
planarReflectionProbeSize = (int)renderPipelineSettings.lightLoopSettings.planarReflectionTextureSize,
maxActiveReflectionProbe = 512,
reflectionProbeSize = (int)renderPipelineSettings.lightLoopSettings.reflectionCubemapSize
};
}
}

正在加载...
取消
保存