浏览代码

Merge upstream/master

/main
Evgenii Golubev 6 年前
当前提交
8d663ac4
共有 236 个文件被更改,包括 5066 次插入3764 次删除
  1. 12
      CHANGELOG.md
  2. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png
  3. 998
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png
  4. 998
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png
  5. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png
  6. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png
  7. 999
      ImageTemplates/HDRenderPipeline/Scenes/9xxx_Other/9002_Deferred-and-Forward.unity.png
  8. 4
      ScriptableRenderPipeline/Core/CHANGELOG.md
  9. 16
      ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopy.compute
  10. 81
      ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopy.cs
  11. 109
      ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopyAsset.cs
  12. 1
      ScriptableRenderPipeline/Core/CoreRP/Debugging/DebugUI.Panel.cs
  13. 3
      ScriptableRenderPipeline/Core/CoreRP/Debugging/DebugUI.cs
  14. 31
      ScriptableRenderPipeline/Core/CoreRP/Editor/Debugging/DebugWindow.cs
  15. 5
      ScriptableRenderPipeline/Core/CoreRP/Editor/ShaderGenerator/CSharpToHLSL.cs
  16. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  17. 2
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/EntityLighting.hlsl
  18. 15
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDAdditionalCameraData.cs
  19. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugColorPicker.shader
  20. 295
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
  21. 121
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs.hlsl
  22. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl
  23. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplayLatlong.shader
  24. 84
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader
  25. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugViewMaterialGBuffer.shader
  26. 20
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs
  27. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs.hlsl
  28. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalProjectorComponent.cs
  29. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs
  30. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/SerializedHDCamera.cs
  31. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/EditorRenderPipelineResources/ReflectionProbesPreview.shader
  32. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs
  33. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs
  34. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs
  35. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs
  36. 45
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/LayeredLit/LayeredLitUI.cs
  37. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/LitUI.cs
  38. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/GlobalLightLoopSettingsUI.cs
  39. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineEditor.cs
  40. 208
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  41. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.asset
  42. 28
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  43. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  44. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs
  45. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
  46. 96
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  47. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs.hlsl
  48. 161
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.hlsl
  49. 24
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
  50. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-bigtile.compute
  51. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute
  52. 21
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild.compute
  53. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/materialflags.compute
  54. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/scrbound.compute
  55. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  56. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta
  57. 324
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  58. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl
  59. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs
  60. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.hlsl
  61. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalData.hlsl
  62. 48
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalUtilities.hlsl
  63. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs
  64. 79
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs
  65. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLit.shader
  66. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitData.hlsl
  67. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitTessellation.shader
  68. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs
  69. 15
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs.hlsl
  70. 177
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  71. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.shader
  72. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitData.hlsl
  73. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitDataIndividualLayer.hlsl
  74. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitTessellation.shader
  75. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/ShaderPass/LitDepthPass.hlsl
  76. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/ShaderPass/LitVelocityPass.hlsl
  77. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  78. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Unlit/Unlit.shader
  79. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/ApplyDistorsion.compute
  80. 128
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  81. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/CopyDepthBuffer.shader
  82. 58
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/DepthPyramid.compute
  83. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset
  84. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs
  85. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPass.cs
  86. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPass.cs.hlsl
  87. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/HDRISky/HDRISkyRenderer.cs
  88. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/ProceduralSky/ProceduralSkyRenderer.cs
  89. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs
  90. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderer.cs
  91. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderingContext.cs
  92. 39
      ScriptableRenderPipeline/LightweightPipeline/LWRP/LightweightPipeline.cs
  93. 7
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Core.hlsl
  94. 53
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/InputBuiltin.hlsl
  95. 19
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/InputSurface.hlsl
  96. 19
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Lighting.hlsl
  97. 66
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassLit.hlsl
  98. 15
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassMeta.hlsl
  99. 10
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Shadows.hlsl
  100. 3
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandard.shader

12
CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
### Changed
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
## [0.1.6] - 2018-xx-yy
### Changelog starting

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png
文件差异内容过多而无法显示
查看文件

998
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png
文件差异内容过多而无法显示
查看文件

998
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/9xxx_Other/9002_Deferred-and-Forward.unity.png
文件差异内容过多而无法显示
查看文件

4
ScriptableRenderPipeline/Core/CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Changed
- Moved root files into folders for easier maintenance
## [0.1.6] - 2018-xx-yy
### Changelog starting

16
ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopy.compute


// Autogenerated file. Do not edit by hand
#pragma only_renderers d3d11 ps4 xboxone vulkan metal
#pragma only_renderers d3d11 ps4 xboxone vulkan metal
CBUFFER_START (UnityCBuffer)
uint2 _RectOffset;
CBUFFER_END
#pragma kernel KSampleCopy4_1_x
[numthreads(8, 8, 1)]
void KSampleCopy4_1_x(uint2 dispatchThreadId : SV_DispatchThreadID)
#pragma kernel KSampleCopy4_1_x_8 KERNEL_NAME=KSampleCopy4_1_x_8 KERNEL_SIZE=8
#pragma kernel KSampleCopy4_1_x_1 KERNEL_NAME=KSampleCopy4_1_x_1 KERNEL_SIZE=1
[numthreads(KERNEL_SIZE, KERNEL_SIZE, 1)]
void KERNEL_NAME(uint2 dispatchThreadId : SV_DispatchThreadID)
_Result1[dispatchThreadId] = LOAD_TEXTURE2D(_Source4, dispatchThreadId).x;
_Result1[_RectOffset + dispatchThreadId] = LOAD_TEXTURE2D(_Source4, _RectOffset + dispatchThreadId).x;
}

81
ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopy.cs


public class GPUCopy
{
ComputeShader m_Shader;
int k_SampleKernel_xyzw2x;
int k_SampleKernel_xyzw2x_8;
int k_SampleKernel_xyzw2x_1;
k_SampleKernel_xyzw2x = m_Shader.FindKernel("KSampleCopy4_1_x");
k_SampleKernel_xyzw2x_8 = m_Shader.FindKernel("KSampleCopy4_1_x_8");
k_SampleKernel_xyzw2x_1 = m_Shader.FindKernel("KSampleCopy4_1_x_1");
static readonly int _RectOffset = Shader.PropertyToID("_RectOffset");
public void SampleCopyChannel_xyzw2x(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier target, Vector2 size)
void SampleCopyChannel(
CommandBuffer cmd,
RectInt rect,
int _source,
RenderTargetIdentifier source,
int _target,
RenderTargetIdentifier target,
int kernel8,
int kernel1)
{
RectInt main, topRow, rightCol, topRight;
unsafe
cmd.SetComputeTextureParam(m_Shader, k_SampleKernel_xyzw2x, _Source4, source);
cmd.SetComputeTextureParam(m_Shader, k_SampleKernel_xyzw2x, _Result1, target);
cmd.DispatchCompute(m_Shader, k_SampleKernel_xyzw2x, (int)Mathf.Max((size.x) / 8, 1), (int)Mathf.Max((size.y) / 8, 1), 1);
RectInt* dispatch1Rects = stackalloc RectInt[3];
int dispatch1RectCount = 0;
RectInt dispatch8Rect = RectInt.zero;
if (TileLayoutUtils.TryLayoutByTiles(
rect,
8,
out main,
out topRow,
out rightCol,
out topRight))
{
if (topRow.width > 0 && topRow.height > 0)
{
dispatch1Rects[dispatch1RectCount] = topRow;
++dispatch1RectCount;
}
if (rightCol.width > 0 && rightCol.height > 0)
{
dispatch1Rects[dispatch1RectCount] = rightCol;
++dispatch1RectCount;
}
if (topRight.width > 0 && topRight.height > 0)
{
dispatch1Rects[dispatch1RectCount] = topRight;
++dispatch1RectCount;
}
dispatch8Rect = main;
}
else if (rect.width > 0 && rect.height > 0)
{
dispatch1Rects[dispatch1RectCount] = rect;
++dispatch1RectCount;
}
cmd.SetComputeTextureParam(m_Shader, kernel8, _source, source);
cmd.SetComputeTextureParam(m_Shader, kernel1, _source, source);
cmd.SetComputeTextureParam(m_Shader, kernel8, _target, target);
cmd.SetComputeTextureParam(m_Shader, kernel1, _target, target);
if (dispatch8Rect.width > 0 && dispatch8Rect.height > 0)
{
var r = dispatch8Rect;
cmd.SetComputeIntParams(m_Shader, _RectOffset, (int)r.x, (int)r.y);
cmd.DispatchCompute(m_Shader, kernel8, (int)Mathf.Max(r.width / 8, 1), (int)Mathf.Max(r.height / 8, 1), 1);
}
for (int i = 0, c = dispatch1RectCount; i < c; ++i)
{
var r = dispatch1Rects[i];
cmd.SetComputeIntParams(m_Shader, _RectOffset, (int)r.x, (int)r.y);
cmd.DispatchCompute(m_Shader, kernel1, (int)Mathf.Max(r.width, 1), (int)Mathf.Max(r.height, 1), 1);
}
}
public void SampleCopyChannel_xyzw2x(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier target, RectInt rect)
{
SampleCopyChannel(cmd, rect, _Source4, source, _Result1, target, k_SampleKernel_xyzw2x_8, k_SampleKernel_xyzw2x_1);
}
}
}

109
ScriptableRenderPipeline/Core/CoreRP/CoreResources/GPUCopyAsset.cs


}
ccp.AppendLine();
ccp.AppendLine("CBUFFER_START (UnityCBuffer)");
ccp.AppendLine(" uint2 _RectOffset;");
ccp.AppendLine("CBUFFER_END");
ccp.AppendLine();
csm.AppendLine(" static readonly int _RectOffset = Shader.PropertyToID(\"_RectOffset\");");
ccp.AppendLine();
ccp.AppendLine();
csm.AppendLine(@" void SampleCopyChannel(
CommandBuffer cmd,
RectInt rect,
int _source,
RenderTargetIdentifier source,
int _target,
RenderTargetIdentifier target,
int kernel8,
int kernel1)
{
RectInt main, topRow, rightCol, topRight;
unsafe
{
RectInt* dispatch1Rects = stackalloc RectInt[3];
int dispatch1RectCount = 0;
RectInt dispatch8Rect = RectInt.zero;
if (TileLayoutUtils.TryLayoutByTiles(
rect,
8,
out main,
out topRow,
out rightCol,
out topRight))
{
if (topRow.width > 0 && topRow.height > 0)
{
dispatch1Rects[dispatch1RectCount] = topRow;
++dispatch1RectCount;
}
if (rightCol.width > 0 && rightCol.height > 0)
{
dispatch1Rects[dispatch1RectCount] = rightCol;
++dispatch1RectCount;
}
if (topRight.width > 0 && topRight.height > 0)
{
dispatch1Rects[dispatch1RectCount] = topRight;
++dispatch1RectCount;
}
dispatch8Rect = main;
}
else if (rect.width > 0 && rect.height > 0)
{
dispatch1Rects[dispatch1RectCount] = rect;
++dispatch1RectCount;
}
cmd.SetComputeTextureParam(m_Shader, kernel8, _source, source);
cmd.SetComputeTextureParam(m_Shader, kernel1, _source, source);
cmd.SetComputeTextureParam(m_Shader, kernel8, _target, target);
cmd.SetComputeTextureParam(m_Shader, kernel1, _target, target);
if (dispatch8Rect.width > 0 && dispatch8Rect.height > 0)
{
var r = dispatch8Rect;
cmd.SetComputeIntParams(m_Shader, _RectOffset, (int)r.x, (int)r.y);
cmd.DispatchCompute(m_Shader, kernel8, (int)Mathf.Max(r.width / 8, 1), (int)Mathf.Max(r.height / 8, 1), 1);
}
for (int i = 0, c = dispatch1RectCount; i < c; ++i)
{
var r = dispatch1Rects[i];
cmd.SetComputeIntParams(m_Shader, _RectOffset, (int)r.x, (int)r.y);
cmd.DispatchCompute(m_Shader, kernel1, (int)Mathf.Max(r.width, 1), (int)Mathf.Max(r.height, 1), 1);
}
}
}");
csc.AppendLine(" public GPUCopy(ComputeShader shader)");
csc.AppendLine(" {");

var o = operations[i];
// Compute kernel
var kernelName = string.Format("KSampleCopy{0}_{1}_{2}", o.sourceChannel.ToString(), o.targetChannel.ToString(), o.subscript);
cck.AppendLine(string.Format("#pragma kernel {0}", kernelName));
cck.AppendLine(string.Format(@"[numthreads({0}, {0}, 1)]",
k_KernelSize.ToString(), k_KernelSize.ToString()));
cck.AppendLine(string.Format(@"void {0}(uint2 dispatchThreadId : SV_DispatchThreadID)", kernelName));
var kernelName8 = string.Format("KSampleCopy{0}_{1}_{2}_8", o.sourceChannel.ToString(), o.targetChannel.ToString(), o.subscript);
var kernelName1 = string.Format("KSampleCopy{0}_{1}_{2}_1", o.sourceChannel.ToString(), o.targetChannel.ToString(), o.subscript);
cck.AppendLine(string.Format("#pragma kernel {0} KERNEL_NAME={0} KERNEL_SIZE=8", kernelName8));
cck.AppendLine(string.Format("#pragma kernel {0} KERNEL_NAME={0} KERNEL_SIZE=1", kernelName1));
cck.AppendLine(@"[numthreads(KERNEL_SIZE, KERNEL_SIZE, 1)]");
cck.AppendLine(@"void KERNEL_NAME(uint2 dispatchThreadId : SV_DispatchThreadID)");
cck.AppendLine(string.Format(" _Result{0}[dispatchThreadId] = LOAD_TEXTURE2D(_Source{1}, dispatchThreadId).{2};",
cck.AppendLine(string.Format(" _Result{0}[_RectOffset + dispatchThreadId] = LOAD_TEXTURE2D(_Source{1}, _RectOffset + dispatchThreadId).{2};",
o.targetChannel.ToString(), o.sourceChannel.ToString(), o.subscript));
cck.AppendLine("}");
cck.AppendLine();

var kernelIndexName = string.Format("k_SampleKernel_{0}2{1}", channelName, o.subscript);
csp.AppendLine(string.Format(" int {0};", kernelIndexName));
var kernelIndexName8 = string.Format("k_SampleKernel_{0}2{1}_8", channelName, o.subscript);
var kernelIndexName1 = string.Format("k_SampleKernel_{0}2{1}_1", channelName, o.subscript);
csp.AppendLine(string.Format(" int {0};", kernelIndexName8));
csp.AppendLine(string.Format(" int {0};", kernelIndexName1));
csc.AppendLine(string.Format(" {0} = m_Shader.FindKernel(\"{1}\");", kernelIndexName, kernelName));
csc.AppendLine(string.Format(" {0} = m_Shader.FindKernel(\"{1}\");", kernelIndexName8, kernelName8));
csc.AppendLine(string.Format(" {0} = m_Shader.FindKernel(\"{1}\");", kernelIndexName1, kernelName1));
csm.AppendLine(string.Format(@" public void SampleCopyChannel_{0}2{1}(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier target, Vector2 size)", channelName, o.subscript));
csm.AppendLine(" {");
csm.AppendLine(string.Format(" cmd.SetComputeTextureParam(m_Shader, {0}, _Source{1}, source);", kernelIndexName, o.sourceChannel.ToString()));
csm.AppendLine(string.Format(" cmd.SetComputeTextureParam(m_Shader, {0}, _Result{1}, target);", kernelIndexName, o.targetChannel.ToString()));
csm.AppendLine(string.Format(" cmd.DispatchCompute(m_Shader, {0}, (int)Mathf.Max((size.x) / {1}, 1), (int)Mathf.Max((size.y) / {1}, 1), 1);", kernelIndexName, k_KernelSize.ToString()));
csm.AppendLine(" }");
csm.AppendLine(string.Format(@" public void SampleCopyChannel_{0}2{1}(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier target, RectInt rect)", channelName, o.subscript));
csm.AppendLine (" {");
csm.AppendLine(string.Format(" SampleCopyChannel(cmd, rect, _Source{0}, source, _Result{1}, target, {2}, {3});", o.sourceChannel.ToString(), o.targetChannel.ToString(), kernelIndexName8, kernelIndexName1));
csm.AppendLine (" }");
}
csc.AppendLine(" }");

cc.AppendLine("#pragma only_renderers d3d11 ps4 xboxone vulkan metal");
cc.AppendLine(@"#include ""../ShaderLibrary/Common.hlsl""");
cc.AppendLine(ccp.ToString()); // Properties
cc.AppendLine(cck.ToString()); // Kernels

1
ScriptableRenderPipeline/Core/CoreRP/Debugging/DebugUI.Panel.cs


public bool isEditorOnly { get { return (flags & Flags.EditorOnly) != 0; } }
public bool isRuntimeOnly { get { return (flags & Flags.RuntimeOnly) != 0; } }
public bool editorForceUpdate { get { return (flags & Flags.EditorForceUpdate) != 0; } }
public ObservableList<Widget> children { get; private set; }
public event Action<Panel> onSetDirty = delegate { };

3
ScriptableRenderPipeline/Core/CoreRP/Debugging/DebugUI.cs


{
None = 0,
EditorOnly = 1 << 1,
RuntimeOnly = 1 << 2
RuntimeOnly = 1 << 2,
EditorForceUpdate = 1 << 3
}
// Base class for all debug UI widgets

31
ScriptableRenderPipeline/Core/CoreRP/Editor/Debugging/DebugWindow.cs


if (m_Settings == null)
m_Settings = CreateInstance<DebugWindowSettings>();
if (m_WidgetStates == null)
// States are ScriptableObjects (necessary for Undo/Redo) but are not saved on disk so when the editor is closed then reopened, any existing debug window will have its states set to null
// Since we don't care about persistance in this case, we just re-init everything.
if (m_WidgetStates == null || !AreWidgetStatesValid())
m_WidgetStates = new WidgetStateDictionary();
if (s_WidgetStateMap == null || s_WidgetDrawerMap == null || s_TypeMapDirty)

// First init
m_DebugTreeState = DebugManager.instance.GetState();
UpdateWidgetStates();
EditorApplication.update -= Repaint;
var panels = DebugManager.instance.panels;
var selectedPanelIndex = m_Settings.selectedPanel;
if (selectedPanelIndex >= 0
&& selectedPanelIndex < panels.Count
&& panels[selectedPanelIndex].editorForceUpdate)
EditorApplication.update += Repaint;
}
// Note: this won't get called if the window is opened when the editor itself is closed

m_WidgetStates.Clear();
}
}
bool AreWidgetStatesValid()
{
foreach (var state in m_WidgetStates)
{
if(state.Value == null)
{
return false;
}
}
return true;
}
void MarkDirty()

if (EditorGUI.EndChangeCheck())
{
Undo.RegisterCompleteObjectUndo(m_Settings, "Debug Panel Selection");
var previousPanel = m_Settings.selectedPanel >= 0 && m_Settings.selectedPanel < panels.Count
? panels[m_Settings.selectedPanel]
: null;
if (previousPanel != null && previousPanel.editorForceUpdate && !panel.editorForceUpdate)
EditorApplication.update -= Repaint;
else if ((previousPanel == null || !previousPanel.editorForceUpdate) && panel.editorForceUpdate)
EditorApplication.update += Repaint;
m_Settings.selectedPanel = i;
}
}

5
ScriptableRenderPipeline/Core/CoreRP/Editor/ShaderGenerator/CSharpToHLSL.cs


s_TypeName = new Dictionary<string, ShaderTypeGenerator>();
// Iterate over assemblyList, discover all applicable types with fully qualified names
var assemblyList = AppDomain.CurrentDomain.GetAssemblies().ToList();
var assemblyList = AppDomain.CurrentDomain.GetAssemblies()
// We need to exclude dynamic assemblies (their type can't be queried, throwing an exception below)
.Where(ass => !(ass.ManifestModule is System.Reflection.Emit.ModuleBuilder))
.ToList();
foreach (var assembly in assemblyList)
{

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


#define real3x3 half3x3
#define real3x4 half3x4
#define real4x3 half4x3
#define real4x4 half4x4
#define real4x4 half4x4
#define half min16float
#define half2 min16float2
#define half3 min16float3
#define half4 min16float4
#define half2x2 min16float2x2
#define half2x3 min16float2x3
#define half3x2 min16float3x2
#define half3x3 min16float3x3
#define half3x4 min16float3x4
#define half4x3 min16float4x3
#define half4x4 min16float4x4
#define REAL_MIN HALF_MIN
#define REAL_MAX HALF_MAX

2
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/EntityLighting.hlsl


#ifndef UNITY_ENTITY_LIGHTING_INCLUDED
#define UNITY_ENTITY_LIGHTING_INCLUDED
#include "common.hlsl"
#include "Common.hlsl"
// TODO: Check if PI is correctly handled!

15
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDAdditionalCameraData.cs


// Tag as dirty so frameSettings are correctly initialize at next HDRenderPipeline.Render() call
m_frameSettingsIsDirty = true;
}
// This is called at the creation of the HD Additional Camera Data, to convert the legacy camera settings to HD
public static void InitDefaultHDAdditionalCameraData(HDAdditionalCameraData cameraData)
{
var camera = cameraData.gameObject.GetComponent<Camera>();
cameraData.clearDepth = camera.clearFlags != CameraClearFlags.Nothing;
if (camera.clearFlags == CameraClearFlags.Skybox)
cameraData.clearColorMode = ClearColorMode.Sky;
else if (camera.clearFlags == CameraClearFlags.SolidColor)
cameraData.clearColorMode = ClearColorMode.BackgroundColor;
else // None
cameraData.clearColorMode = ClearColorMode.None;
}
}
}

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugColorPicker.shader


}
float4 mouseResult = SAMPLE_TEXTURE2D(_DebugColorPickerTexture, sampler_DebugColorPickerTexture, mousePixelCoord.zw);
// Reverse debug exposure in order to display the real values.
// _DebugExposure will be set to zero if the debug view does not need it so we don't need to make a special case here. It's handled in only one place in C#
mouseResult = mouseResult / exp2(_DebugExposure);
float4 finalResult = DisplayPixelInformationAtMousePosition(input, result, mouseResult, mousePixelCoord);
return finalResult;

295
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs


using System;
using System.Collections.Generic;
using System.Linq;
using UnityEditor;
using UnityEngine.Experimental.Rendering.HDPipeline.Attributes;

PreRefractionColorPyramid,
DepthPyramid,
FinalColorPyramid,
ScreenSpaceTracing,
MaxLightingFullScreenDebug,
// Rendering

MaxRenderingFullScreenDebug
}
[GenerateHLSL]
public struct ScreenSpaceTracingDebug
{
// Used to debug SSRay model
// 1x32 bits
public Lit.RefractionSSRayModel tracingModel;
// 6x32 bits
public uint loopStartPositionSSX; // Proxy, HiZ
public uint loopStartPositionSSY; // Proxy, HiZ
public float loopStartLinearDepth; // Proxy, HiZ
public Vector3 loopRayDirectionSS; // HiZ
public uint loopMipLevelMax; // HiZ
public uint loopIterationMax; // HiZ
// 9x32 bits
public Vector3 iterationPositionSS; // HiZ
public uint iterationMipLevel; // HiZ
public uint iteration; // HiZ
public float iterationLinearDepthBuffer; // HiZ
public Lit.HiZIntersectionKind iterationIntersectionKind; // HiZ
public uint iterationCellSizeW; // HiZ
public uint iterationCellSizeH; // HiZ
public EnvShapeType proxyShapeType; // Proxy
public float projectionDistance; // Proxy
// 4x32 bits
public bool endHitSuccess; // Proxy, HiZ
public float endLinearDepth; // Proxy, HiZ
public uint endPositionSSX; // Proxy, HiZ
public uint endPositionSSY; // Proxy, HiZ
// 0x32 bits (padding)
public Vector2 loopStartPositionSS { get { return new Vector2(loopStartPositionSSX, loopStartPositionSSY); } }
public Vector2 endPositionSS { get { return new Vector2(endPositionSSX, endPositionSSY); } }
public Vector2 iterationCellId { get { return new Vector2(((int)iterationPositionSS.x) >> (int)iterationMipLevel, ((int)iterationPositionSS.y) >> (int)iterationMipLevel); } }
public Vector2 iterationCellSize { get { return new Vector2(iterationCellSizeW, iterationCellSizeH); } }
}
public class DebugDisplaySettings
{
public static string k_PanelDisplayStats = "Display Stats";

public static string k_PanelScreenSpaceTracing = "Screen Space Tracing";
static readonly string[] k_HiZIntersectionKind = { "None", "Depth", "Cell" };
DebugUI.Widget[] m_DebugScreenSpaceTracingItems;
public bool showSSRayGrid = true;
public bool showSSRayDepthPyramid = true;
public MaterialDebugSettings materialDebugSettings = new MaterialDebugSettings();
public LightingDebugSettings lightingDebugSettings = new LightingDebugSettings();

public static int[] lightingFullScreenDebugValues = null;
public static GUIContent[] renderingFullScreenDebugStrings = null;
public static int[] renderingFullScreenDebugValues = null;
public static GUIContent[] debugScreenSpaceTracingProxyStrings = null;
public static int[] debugScreenSpaceTracingProxyValues = null;
public static GUIContent[] debugScreenSpaceTracingHiZStrings = null;
public static int[] debugScreenSpaceTracingHiZValues = null;
Lit.RefractionSSRayModel m_LastSSRayModel = Lit.RefractionSSRayModel.None;
ScreenSpaceTracingDebug m_ScreenSpaceTracingDebugData;
public ScreenSpaceTracingDebug screenSpaceTracingDebugData
{
get { return m_ScreenSpaceTracingDebugData; }
internal set
{
m_ScreenSpaceTracingDebugData = value;
if (m_LastSSRayModel != m_ScreenSpaceTracingDebugData.tracingModel)
{
m_LastSSRayModel = m_ScreenSpaceTracingDebugData.tracingModel;
RefreshScreenSpaceTracingDebug<Lit.RefractionSSRayModel>(null, m_LastSSRayModel);
}
if (m_ScreenSpaceTracingDebugData.tracingModel != Lit.RefractionSSRayModel.HiZ)
{
showSSRayDepthPyramid = false;
showSSRayGrid = false;
}
}
}
var debugScreenSpaceTracingStrings = Enum.GetNames(typeof(DebugScreenSpaceTracing))
.Select(s => new GUIContent(s))
.ToArray();
var debugScreenSpaceTracingValues = (int[])Enum.GetValues(typeof(DebugScreenSpaceTracing));
var debugScreenSpaceTracingHiZStringsList = new List<GUIContent>();
var debugScreenSpaceTracingProxyStringsList = new List<GUIContent>();
var debugScreenSpaceTracingHiZValueList = new List<int>();
var debugScreenSpaceTracingProxyValueList = new List<int>();
for (int i = 0, c = debugScreenSpaceTracingStrings.Length; i < c; ++i)
{
var g = debugScreenSpaceTracingStrings[i];
var v = debugScreenSpaceTracingValues[i];
if (!g.text.StartsWith("Proxy"))
{
debugScreenSpaceTracingHiZStringsList.Add(g);
debugScreenSpaceTracingHiZValueList.Add(v);
}
if (!g.text.StartsWith("HiZ"))
{
debugScreenSpaceTracingProxyStringsList.Add(g);
debugScreenSpaceTracingProxyValueList.Add(v);
}
}
debugScreenSpaceTracingHiZStrings = debugScreenSpaceTracingHiZStringsList.ToArray();
debugScreenSpaceTracingHiZValues = debugScreenSpaceTracingHiZValueList.ToArray();
debugScreenSpaceTracingProxyStrings = debugScreenSpaceTracingProxyStringsList.ToArray();
debugScreenSpaceTracingProxyValues = debugScreenSpaceTracingProxyValueList.ToArray();
}
public int GetDebugMaterialIndex()

return lightingDebugSettings.debugLightingMode;
}
public int GetDebugLightingSubMode()
{
switch (lightingDebugSettings.debugLightingMode)
{
case DebugLightingMode.ScreenSpaceTracingRefraction:
case DebugLightingMode.ScreenSpaceTracingReflection:
return (int)lightingDebugSettings.debugScreenSpaceTracingMode;
default:
return 0;
}
}
public DebugMipMapMode GetDebugMipMapMode()
{
return mipMapDebugSettings.debugMipMapMode;

{
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayEnabled() || mipMapDebugSettings.IsDebugDisplayEnabled();
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayEnabled() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();
}
public bool IsDebugMaterialDisplayEnabled()

public bool IsDebugFullScreenEnabled()
{
return fullScreenDebugMode != FullScreenDebugMode.None;
}
public bool IsDebugMipMapDisplayEnabled()
{
return mipMapDebugSettings.IsDebugDisplayEnabled();

mipMapDebugSettings.debugMipMapMode = value;
}
bool IsScreenSpaceTracingRefractionDebugEnabled()
{
return fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction;
}
void SetScreenSpaceTracingRefractionDebugEnabled(bool value)
{
if (value)
{
lightingDebugSettings.debugLightingMode = DebugLightingMode.ScreenSpaceTracingRefraction;
fullScreenDebugMode = FullScreenDebugMode.ScreenSpaceTracing;
}
else
{
lightingDebugSettings.debugLightingMode = DebugLightingMode.None;
fullScreenDebugMode = FullScreenDebugMode.None;
}
}
void SetScreenSpaceTracingRefractionDebugMode(int value)
{
var val = (DebugScreenSpaceTracing)value;
if (val != DebugScreenSpaceTracing.None)
{
lightingDebugSettings.debugLightingMode = DebugLightingMode.ScreenSpaceTracingRefraction;
lightingDebugSettings.debugScreenSpaceTracingMode = (DebugScreenSpaceTracing)value;
}
else
{
lightingDebugSettings.debugLightingMode = DebugLightingMode.None;
lightingDebugSettings.debugScreenSpaceTracingMode = DebugScreenSpaceTracing.None;
}
}
public void UpdateMaterials()
{
//if (mipMapDebugSettings.debugMipMapMode != 0)

public bool DebugNeedsExposure()
{
DebugLightingMode debugLighting = lightingDebugSettings.debugLightingMode;
DebugViewGbuffer debugGBuffer = (DebugViewGbuffer)materialDebugSettings.debugViewGBuffer;
return (debugLighting == DebugLightingMode.DiffuseLighting || debugLighting == DebugLightingMode.SpecularLighting) ||
(debugGBuffer == DebugViewGbuffer.BakeDiffuseLightingWithAlbedoPlusEmissive) ||
(fullScreenDebugMode == FullScreenDebugMode.PreRefractionColorPyramid || fullScreenDebugMode == FullScreenDebugMode.FinalColorPyramid);
}
void RegisterDisplayStatsDebug()
{
m_DebugDisplayStatsItems = new DebugUI.Widget[]

panel.children.Add(m_DebugDisplayStatsItems);
}
void RegisterScreenSpaceTracingDebug()
{
var list = new List<DebugUI.Container>();
var refractionContainer = new DebugUI.Container
{
displayName = "Refraction",
children =
{
new DebugUI.BoolField { displayName = "Debug Enabled", getter = IsScreenSpaceTracingRefractionDebugEnabled, setter = SetScreenSpaceTracingRefractionDebugEnabled, onValueChanged = RefreshScreenSpaceTracingDebug },
}
};
list.Add(refractionContainer);
if (IsScreenSpaceTracingRefractionDebugEnabled())
{
var debugSettingsContainer = new DebugUI.Container
{
displayName = "Debug Settings",
children =
{
new DebugUI.Value { displayName = string.Empty, getter = () => "Click in the scene view, or press 'End' key to select the pixel under the mouse in the scene view to debug." },
new DebugUI.Value { displayName = "SSRay Model", getter = () => screenSpaceTracingDebugData.tracingModel }
}
};
refractionContainer.children.Add(debugSettingsContainer);
switch (screenSpaceTracingDebugData.tracingModel)
{
case Lit.RefractionSSRayModel.Proxy:
{
debugSettingsContainer.children.Add(
new DebugUI.EnumField { displayName = "Debug Mode", getter = GetDebugLightingSubMode, setter = SetScreenSpaceTracingRefractionDebugMode, enumNames = debugScreenSpaceTracingProxyStrings, enumValues = debugScreenSpaceTracingProxyValues, onValueChanged = RefreshScreenSpaceTracingDebug }
);
refractionContainer.children.Add(
new DebugUI.Container
{
displayName = "Debug Values",
children =
{
new DebugUI.Value { displayName = "Hit Success", getter = () => screenSpaceTracingDebugData.endHitSuccess },
new DebugUI.Value { displayName = "Proxy Shape", getter = () => screenSpaceTracingDebugData.proxyShapeType },
new DebugUI.Value { displayName = "Projection Distance", getter = () => screenSpaceTracingDebugData.projectionDistance },
new DebugUI.Value { displayName = "Start Position", getter = () => screenSpaceTracingDebugData.loopStartPositionSS },
new DebugUI.Value { displayName = "Start Linear Depth", getter = () => screenSpaceTracingDebugData.loopStartLinearDepth },
new DebugUI.Value { displayName = "End Linear Depth", getter = () => screenSpaceTracingDebugData.endLinearDepth },
new DebugUI.Value { displayName = "End Position", getter = () => screenSpaceTracingDebugData.endPositionSS },
}
}
);
break;
}
case Lit.RefractionSSRayModel.HiZ:
{
debugSettingsContainer.children.Insert(1, new DebugUI.Value { displayName = string.Empty, getter = () => "Press PageUp/PageDown to Increase/Decrease the HiZ step." });
debugSettingsContainer.children.Add(
new DebugUI.EnumField { displayName = "Debug Mode", getter = GetDebugLightingSubMode, setter = SetScreenSpaceTracingRefractionDebugMode, enumNames = debugScreenSpaceTracingHiZStrings, enumValues = debugScreenSpaceTracingHiZValues, onValueChanged = RefreshScreenSpaceTracingDebug },
new DebugUI.BoolField { displayName = "Display Grid", getter = () => showSSRayGrid, setter = v => showSSRayGrid = v },
new DebugUI.BoolField { displayName = "Display Depth", getter = () => showSSRayDepthPyramid, setter = v => showSSRayDepthPyramid = v }
);
refractionContainer.children.Add(
new DebugUI.Container
{
displayName = "Debug Values (loop)",
children =
{
new DebugUI.Value { displayName = "Hit Success", getter = () => screenSpaceTracingDebugData.endHitSuccess },
new DebugUI.Value { displayName = "Start Position", getter = () => screenSpaceTracingDebugData.loopStartPositionSS },
new DebugUI.Value { displayName = "Start Linear Depth", getter = () => screenSpaceTracingDebugData.loopStartLinearDepth },
new DebugUI.Value { displayName = "Ray Direction SS", getter = () => new Vector2(screenSpaceTracingDebugData.loopRayDirectionSS.x, screenSpaceTracingDebugData.loopRayDirectionSS.y) },
new DebugUI.Value { displayName = "Ray Depth", getter = () => 1f / screenSpaceTracingDebugData.loopRayDirectionSS.z },
new DebugUI.Value { displayName = "End Position", getter = () => screenSpaceTracingDebugData.endPositionSS },
new DebugUI.Value { displayName = "End Linear Depth", getter = () => screenSpaceTracingDebugData.endLinearDepth },
}
},
new DebugUI.Container
{
displayName = "Debug Values (iteration)",
children =
{
new DebugUI.Value { displayName = "Iteration", getter = () => string.Format("{0}/{1}", screenSpaceTracingDebugData.iteration, screenSpaceTracingDebugData.loopIterationMax) },
new DebugUI.Value { displayName = "Position SS", getter = () => new Vector2(screenSpaceTracingDebugData.iterationPositionSS.x, screenSpaceTracingDebugData.iterationPositionSS.y) },
new DebugUI.Value { displayName = "Depth", getter = () => 1f / screenSpaceTracingDebugData.iterationPositionSS.z },
new DebugUI.Value { displayName = "Depth Buffer", getter = () => screenSpaceTracingDebugData.iterationLinearDepthBuffer },
new DebugUI.Value { displayName = "Mip Level", getter = () => string.Format("{0}/{1}", screenSpaceTracingDebugData.iterationMipLevel, screenSpaceTracingDebugData.loopMipLevelMax) },
new DebugUI.Value { displayName = "Intersection kind", getter = () => screenSpaceTracingDebugData.iterationIntersectionKind },
new DebugUI.Value { displayName = "Cell Id", getter = () => screenSpaceTracingDebugData.iterationCellId },
new DebugUI.Value { displayName = "Cell Size", getter = () => screenSpaceTracingDebugData.iterationCellSize },
}
}
);
break;
}
}
}
m_DebugScreenSpaceTracingItems = list.ToArray();
var panel = DebugManager.instance.GetPanel(k_PanelScreenSpaceTracing, true);
panel.flags |= DebugUI.Flags.EditorForceUpdate;
panel.children.Add(m_DebugScreenSpaceTracingItems);
}
public void RegisterMaterialDebug()
{
m_DebugMaterialItems = new DebugUI.Widget[]

RegisterLightingDebug();
}
void RefreshScreenSpaceTracingDebug<T>(DebugUI.Field<T> field, T value)
{
UnregisterDebugItems(k_PanelScreenSpaceTracing, m_DebugScreenSpaceTracingItems);
RegisterScreenSpaceTracingDebug();
}
public void RegisterLightingDebug()
{
var list = new List<DebugUI.Widget>();

{
case FullScreenDebugMode.FinalColorPyramid:
case FullScreenDebugMode.PreRefractionColorPyramid:
id = HDShaderIDs._GaussianPyramidColorMipSize;
id = HDShaderIDs._ColorPyramidScale;
id = HDShaderIDs._DepthPyramidMipSize;
id = HDShaderIDs._DepthPyramidScale;
break;
}
var size = Shader.GetGlobalVector(id);

{
case FullScreenDebugMode.FinalColorPyramid:
case FullScreenDebugMode.PreRefractionColorPyramid:
id = HDShaderIDs._GaussianPyramidColorMipSize;
id = HDShaderIDs._ColorPyramidScale;
id = HDShaderIDs._DepthPyramidMipSize;
id = HDShaderIDs._DepthPyramidScale;
break;
}
var size = Shader.GetGlobalVector(id);

{
case FullScreenDebugMode.FinalColorPyramid:
case FullScreenDebugMode.PreRefractionColorPyramid:
id = HDShaderIDs._GaussianPyramidColorMipSize;
id = HDShaderIDs._ColorPyramidScale;
id = HDShaderIDs._DepthPyramidMipSize;
id = HDShaderIDs._DepthPyramidScale;
break;
}
var size = Shader.GetGlobalVector(id);

});
}
if (DebugNeedsExposure())
list.Add(new DebugUI.FloatField { displayName = "Debug Exposure", getter = () => lightingDebugSettings.debugExposure, setter = value => lightingDebugSettings.debugExposure = value });
m_DebugLightingItems = list.ToArray();
var panel = DebugManager.instance.GetPanel(k_PanelLighting, true);
panel.children.Add(m_DebugLightingItems);

RegisterMaterialDebug();
RegisterLightingDebug();
RegisterRenderingDebug();
RegisterScreenSpaceTracingDebug();
}
public void UnregisterDebug()

UnregisterDebugItems(k_PanelLighting, m_DebugLightingItems);
UnregisterDebugItems(k_PanelRendering, m_DebugRenderingItems);
UnregisterDebugItems(k_PanelScreenSpaceTracing, m_DebugScreenSpaceTracingItems);
}
void UnregisterDebugItems(string panelName, DebugUI.Widget[] items)

121
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs.hlsl


#define FULLSCREENDEBUGMODE_PRE_REFRACTION_COLOR_PYRAMID (4)
#define FULLSCREENDEBUGMODE_DEPTH_PYRAMID (5)
#define FULLSCREENDEBUGMODE_FINAL_COLOR_PYRAMID (6)
#define FULLSCREENDEBUGMODE_MAX_LIGHTING_FULL_SCREEN_DEBUG (7)
#define FULLSCREENDEBUGMODE_MIN_RENDERING_FULL_SCREEN_DEBUG (8)
#define FULLSCREENDEBUGMODE_MOTION_VECTORS (9)
#define FULLSCREENDEBUGMODE_NAN_TRACKER (10)
#define FULLSCREENDEBUGMODE_MAX_RENDERING_FULL_SCREEN_DEBUG (11)
#define FULLSCREENDEBUGMODE_SCREEN_SPACE_TRACING (7)
#define FULLSCREENDEBUGMODE_MAX_LIGHTING_FULL_SCREEN_DEBUG (8)
#define FULLSCREENDEBUGMODE_MIN_RENDERING_FULL_SCREEN_DEBUG (9)
#define FULLSCREENDEBUGMODE_MOTION_VECTORS (10)
#define FULLSCREENDEBUGMODE_NAN_TRACKER (11)
#define FULLSCREENDEBUGMODE_MAX_RENDERING_FULL_SCREEN_DEBUG (12)
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.ScreenSpaceTracingDebug
// PackingRules = Exact
struct ScreenSpaceTracingDebug
{
int tracingModel;
uint loopStartPositionSSX;
uint loopStartPositionSSY;
float loopStartLinearDepth;
float3 loopRayDirectionSS;
uint loopMipLevelMax;
uint loopIterationMax;
float3 iterationPositionSS;
uint iterationMipLevel;
uint iteration;
float iterationLinearDepthBuffer;
int iterationIntersectionKind;
uint iterationCellSizeW;
uint iterationCellSizeH;
int proxyShapeType;
float projectionDistance;
bool endHitSuccess;
float endLinearDepth;
uint endPositionSSX;
uint endPositionSSY;
};
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.ScreenSpaceTracingDebug
//
int GetTracingModel(ScreenSpaceTracingDebug value)
{
return value.tracingModel;
}
uint GetLoopStartPositionSSX(ScreenSpaceTracingDebug value)
{
return value.loopStartPositionSSX;
}
uint GetLoopStartPositionSSY(ScreenSpaceTracingDebug value)
{
return value.loopStartPositionSSY;
}
float GetLoopStartLinearDepth(ScreenSpaceTracingDebug value)
{
return value.loopStartLinearDepth;
}
float3 GetLoopRayDirectionSS(ScreenSpaceTracingDebug value)
{
return value.loopRayDirectionSS;
}
uint GetLoopMipLevelMax(ScreenSpaceTracingDebug value)
{
return value.loopMipLevelMax;
}
uint GetLoopIterationMax(ScreenSpaceTracingDebug value)
{
return value.loopIterationMax;
}
float3 GetIterationPositionSS(ScreenSpaceTracingDebug value)
{
return value.iterationPositionSS;
}
uint GetIterationMipLevel(ScreenSpaceTracingDebug value)
{
return value.iterationMipLevel;
}
uint GetIteration(ScreenSpaceTracingDebug value)
{
return value.iteration;
}
float GetIterationLinearDepthBuffer(ScreenSpaceTracingDebug value)
{
return value.iterationLinearDepthBuffer;
}
int GetIterationIntersectionKind(ScreenSpaceTracingDebug value)
{
return value.iterationIntersectionKind;
}
uint GetIterationCellSizeW(ScreenSpaceTracingDebug value)
{
return value.iterationCellSizeW;
}
uint GetIterationCellSizeH(ScreenSpaceTracingDebug value)
{
return value.iterationCellSizeH;
}
int GetProxyShapeType(ScreenSpaceTracingDebug value)
{
return value.proxyShapeType;
}
float GetProjectionDistance(ScreenSpaceTracingDebug value)
{
return value.projectionDistance;
}
bool GetEndHitSuccess(ScreenSpaceTracingDebug value)
{
return value.endHitSuccess;
}
float GetEndLinearDepth(ScreenSpaceTracingDebug value)
{
return value.endLinearDepth;
}
uint GetEndPositionSSX(ScreenSpaceTracingDebug value)
{
return value.endPositionSSX;
}
uint GetEndPositionSSY(ScreenSpaceTracingDebug value)
{
return value.endPositionSSY;
}
#endif

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl


CBUFFER_START(UnityDebugDisplay)
// Set of parameters available when switching to debug shader mode
int _DebugLightingMode; // Match enum DebugLightingMode
int _DebugLightingSubMode;
int _DebugStep;
float4 _MouseClickPixelCoord; // xy unorm, zw norm
float _DebugExposure;
RWStructuredBuffer<ScreenSpaceTracingDebug> _DebugScreenSpaceTracingData;
void GetPropertiesDataDebug(uint paramId, inout float3 result, inout bool needLinearToSRGB)
{

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplayLatlong.shader


SAMPLER(sampler_InputCubemap);
float _Mipmap;
float _RequireToFlipInputTexture;
float _DebugExposure;
struct Attributes
{

width = height = depth = mipCount = 0;
_InputCubemap.GetDimensions(width, height, depth, mipCount);
mipCount = clamp(mipCount, 0, UNITY_SPECCUBE_LOD_STEPS);
return SAMPLE_TEXTURECUBE_LOD(_InputCubemap, sampler_InputCubemap, LatlongToDirectionCoordinate(input.texcoord.xy), _Mipmap * mipCount);
return SAMPLE_TEXTURECUBE_LOD(_InputCubemap, sampler_InputCubemap, LatlongToDirectionCoordinate(input.texcoord.xy), _Mipmap * mipCount) * exp2(_DebugExposure);
}
ENDHLSL

84
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader


#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Color.hlsl"
#include "CoreRP/ShaderLibrary/Debug.hlsl"
#include "HDRP/Material/Lit/Lit.cs.hlsl"
TEXTURE2D(_DebugFullScreenTexture);
CBUFFER_START (UnityDebug)
float _ShowGrid;
float _ShowDepthPyramidDebug;
CBUFFER_END
TEXTURE2D(_DebugFullScreenTexture);
TEXTURE2D(_DepthPyramidTexture);
StructuredBuffer<ScreenSpaceTracingDebug> _DebugScreenSpaceTracingData;
struct Attributes
{

PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_V);
float linearDepth = frac(posInput.linearDepth * 0.1);
return float4(linearDepth.xxx, 1.0);
}
if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_SCREEN_SPACE_TRACING)
{
const float circleRadius = 3.5;
const float ringSize = 1.5;
float4 color = SAMPLE_TEXTURE2D(_DebugFullScreenTexture, s_point_clamp_sampler, input.texcoord);
ScreenSpaceTracingDebug debug = _DebugScreenSpaceTracingData[0];
// Fetch Depth Buffer and Position Inputs
const float deviceDepth = LOAD_TEXTURE2D_LOD(_DepthPyramidTexture, int2(input.positionCS.xy) >> debug.iterationMipLevel, debug.iterationMipLevel).r;
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, deviceDepth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP);
float4 col = float4(0, 0, 0, 1);
// Common Pre Specific
// Fetch debug data
const uint2 loopStartPositionSS = uint2(debug.loopStartPositionSSX, debug.loopStartPositionSSY);
const uint2 endPositionSS = uint2(debug.endPositionSSX, debug.endPositionSSY);
float distanceToPosition = FLT_MAX;
float positionSDF = 0;
// Start position dot rendering
const float distanceToStartPosition = length(int2(posInput.positionSS) - int2(loopStartPositionSS));
const float startPositionSDF = clamp(circleRadius - distanceToStartPosition, 0, 1);
// Line rendering
const float distanceToRaySegment = DistanceToSegment(posInput.positionSS, loopStartPositionSS, endPositionSS);
const float raySegmentSDF = clamp(1 - distanceToRaySegment, 0, 1);
float cellSDF = 0;
float debugLinearDepth = 0;
if (debug.tracingModel == REFRACTIONSSRAYMODEL_HI_Z)
{
const uint2 iterationCellSize = uint2(debug.iterationCellSizeW, debug.iterationCellSizeH);
const float hasData = iterationCellSize.x != 0 || iterationCellSize.y != 0;
// Position dot rendering
distanceToPosition = length(int2(posInput.positionSS) - int2(debug.iterationPositionSS.xy));
positionSDF = clamp(circleRadius - distanceToPosition, 0, 1);
// Grid rendering
float2 distanceToCell = float2(posInput.positionSS % iterationCellSize);
distanceToCell = min(distanceToCell, float2(iterationCellSize) - distanceToCell);
distanceToCell = clamp(1 - distanceToCell, 0, 1);
cellSDF = max(distanceToCell.x, distanceToCell.y) * _ShowGrid;
debugLinearDepth = posInput.linearDepth;
}
col = float4(
( GetIndexColor(1) * startPositionSDF
+ GetIndexColor(3) * positionSDF
+ GetIndexColor(5) * cellSDF
+ GetIndexColor(7) * raySegmentSDF
),
col.a
);
// Common Post Specific
// Calculate SDF to draw a ring on both dots
const float startPositionRingDistance = abs(distanceToStartPosition - circleRadius);
const float startPositionRingSDF = clamp(ringSize - startPositionRingDistance, 0, 1);
const float positionRingDistance = abs(distanceToPosition - circleRadius);
const float positionRingSDF = clamp(ringSize - positionRingDistance, 0, 1);
const float w = clamp(1 - startPositionRingSDF - positionRingSDF, 0, 1);
col.rgb = col.rgb * w + float3(1, 1, 1) * (1 - w);
if (_ShowDepthPyramidDebug == 1)
color.rgb = frac(float3(debugLinearDepth, debugLinearDepth, debugLinearDepth) * 0.1);
col = float4(col.rgb * col.a + color.rgb, 1);
return col;
}
return float4(0.0, 0.0, 0.0, 0.0);

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugViewMaterialGBuffer.shader


// Caution: This value is not the same than the builtin data bakeDiffuseLighting. It also include emissive and multiply by the albedo
else if (_DebugViewMaterial == DEBUGVIEWGBUFFER_BAKE_DIFFUSE_LIGHTING_WITH_ALBEDO_PLUS_EMISSIVE)
{
// TODO: require a remap
// TODO: we should not gamma correct, but easier to debug for now without correct high range value
result = bakeLightingData.bakeDiffuseLighting; needLinearToSRGB = true;
result = bakeLightingData.bakeDiffuseLighting;;
result *= exp2(_DebugExposure);
needLinearToSRGB = true;
}
#ifdef SHADOWS_SHADOWMASK
else if (_DebugViewMaterial == DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK0)

20
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs


IndirectSpecularGtaoFromSsao,
EnvironmentProxyVolume,
EnvironmentSampleCoordinates,
ScreenSpaceTracingRefraction,
ScreenSpaceTracingReflection
}
[GenerateHLSL]
public enum DebugScreenSpaceTracing
{
None,
Color,
RayDirWS,
HitDepth,
HitSuccess,
HiZPositionNDC,
HiZRayDirNDC,
HiZIterationCount,
HiZMaxUsedMipLevel,
HiZIntersectionKind
}
public enum ShadowMapDebugMode

}
public DebugLightingMode debugLightingMode = DebugLightingMode.None;
public DebugScreenSpaceTracing debugScreenSpaceTracingMode = DebugScreenSpaceTracing.None;
public ShadowMapDebugMode shadowDebugMode = ShadowMapDebugMode.None;
public bool shadowDebugUseSelection = false;
public uint shadowMapIndex = 0;

public float skyReflectionMipmap = 0.0f;
public float environmentProxyDepthScale = 20;
public float debugExposure = 0.0f;
public LightLoop.TileClusterDebug tileClusterDebug = LightLoop.TileClusterDebug.None;
public LightLoop.TileClusterCategoryDebug tileClusterDebugByCategory = LightLoop.TileClusterCategoryDebug.Punctual;

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs.hlsl


#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO (8)
#define DEBUGLIGHTINGMODE_ENVIRONMENT_PROXY_VOLUME (9)
#define DEBUGLIGHTINGMODE_ENVIRONMENT_SAMPLE_COORDINATES (10)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION (11)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION (12)
//
// UnityEngine.Experimental.Rendering.HDPipeline.DebugScreenSpaceTracing: static fields
//
#define DEBUGSCREENSPACETRACING_NONE (0)
#define DEBUGSCREENSPACETRACING_COLOR (1)
#define DEBUGSCREENSPACETRACING_RAY_DIR_WS (2)
#define DEBUGSCREENSPACETRACING_HIT_DEPTH (3)
#define DEBUGSCREENSPACETRACING_HIT_SUCCESS (4)
#define DEBUGSCREENSPACETRACING_HI_ZPOSITION_NDC (5)
#define DEBUGSCREENSPACETRACING_HI_ZRAY_DIR_NDC (6)
#define DEBUGSCREENSPACETRACING_HI_ZITERATION_COUNT (7)
#define DEBUGSCREENSPACETRACING_HI_ZMAX_USED_MIP_LEVEL (8)
#define DEBUGSCREENSPACETRACING_HI_ZINTERSECTION_KIND (9)
#endif

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalProjectorComponent.cs


}
}
public void Update()
{
if (m_Handle != null)
{
if (transform.hasChanged == true)
{
DecalSystem.instance.UpdateCachedData(transform, m_DrawDistance, m_FadeScale, m_Handle);
transform.hasChanged = false;
}
}
}
private void DrawGizmo(bool selected)
{
var col = new Color(0.0f, 0.7f, 1f, 1.0f);

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs


{
public class DecalSystem
{
public const int kInvalidIndex = -1;
public const int kInvalidIndex = -1;
public const int kDecalAtlasSize = 128;
public class DecalHandle
{

if (m_DecalAtlas == null)
{
m_DecalAtlas = new TextureCache2D("DecalAtlas");
m_DecalAtlas.AllocTextureArray(2048, 128, 128, TextureFormat.RGBA32, true);
m_DecalAtlas.AllocTextureArray(2048, kDecalAtlasSize, kDecalAtlasSize, TextureFormat.ARGB32, true);
}
return m_DecalAtlas;
}

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/SerializedHDCamera.cs


public SerializedHDCamera(SerializedObject serializedObject)
{
this.serializedObject = serializedObject;
var additionals = CoreEditorUtils.GetAdditionalData<HDAdditionalCameraData>(serializedObject.targetObjects);
var additionals = CoreEditorUtils.GetAdditionalData<HDAdditionalCameraData>(serializedObject.targetObjects, HDAdditionalCameraData.InitDefaultHDAdditionalCameraData);
hideFlags.intValue = (int)HideFlags.HideInInspector;
// We don't hide additional camera data anymore on UX team request. To be compatible with already author scene we force to be visible
//hideFlags.intValue = (int)HideFlags.HideInInspector;
hideFlags.intValue = (int)HideFlags.None;
serializedAdditionalDataObject.ApplyModifiedProperties();
//backgroundColor = serializedObject.FindProperty("m_BackGroundColor");

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/EditorRenderPipelineResources/ReflectionProbesPreview.shader


#pragma vertex vert
#pragma fragment frag
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "HDRP/ShaderVariables.hlsl"
struct appdata

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs


newAsset.colorPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ColorPyramid.compute");
newAsset.depthPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/DepthPyramid.compute");
newAsset.copyChannelCS = Load<ComputeShader>(CorePath + "CoreResources/GPUCopy.compute");
newAsset.texturePaddingCS = Load<ComputeShader>(CorePath + "CoreResources/TexturePadding.compute");
newAsset.applyDistortionCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ApplyDistorsion.compute");
newAsset.clearDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute");

newAsset.deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute");
newAsset.deferredDirectionalShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/DeferredDirectionalShadow.compute");
newAsset.volumetricLightingCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/Resources/VolumetricLighting.compute");
newAsset.volumeVoxelizationCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumeVoxelization.compute");
newAsset.volumetricLightingCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumetricLighting.compute");
newAsset.subsurfaceScatteringCS = Load<ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute");
newAsset.subsurfaceScattering = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.shader");

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs


// Caution: this function must match the one in HDAdditionalLightData.ConvertPhysicalLightIntensityToLightIntensity - any change need to be replicated
void UpdateLightIntensity()
{
// Clamp negative values.
m_AdditionalLightData.directionalIntensity.floatValue = Mathf.Max(0, m_AdditionalLightData.directionalIntensity.floatValue);
m_AdditionalLightData.punctualIntensity.floatValue = Mathf.Max(0, m_AdditionalLightData.punctualIntensity.floatValue);
m_AdditionalLightData.areaIntensity.floatValue = Mathf.Max(0, m_AdditionalLightData.areaIntensity.floatValue);
switch (m_LightShape)
{
case LightShape.Directional:

// Internal utilities
void ApplyAdditionalComponentsVisibility(bool hide)
{
var flags = hide ? HideFlags.HideInInspector : HideFlags.None;
// UX team decided thta we should always show component in inspector.
// However already authored scene save this settings, so force the component to be visible
// var flags = hide ? HideFlags.HideInInspector : HideFlags.None;
var flags = HideFlags.None;
foreach (var t in m_SerializedAdditionalLightData.targetObjects)
((HDAdditionalLightData)t).hideFlags = flags;

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs


static void Drawer_SectionCaptureMirror(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalPosition, _.GetContent("Plane Position"));
EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalNormal, _.GetContent("Plane Normal"));
// EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalPosition, _.GetContent("Plane Position"));
// EditorGUILayout.PropertyField(d.captureMirrorPlaneLocalNormal, _.GetContent("Plane Normal"));
}
static void Drawer_SectionCaptureSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

static void Drawer_SectionProbeModeRealtimeSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
GUI.enabled = false;
d.refreshMode.enumValueIndex = (int)ReflectionProbeRefreshMode.EveryFrame;
d.capturePositionMode.enumValueIndex = (int)PlanarReflectionProbe.CapturePositionMode.MirrorCamera;
GUI.enabled = true;
}
static void Drawer_SectionInfluenceSettings(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

static void Drawer_FieldCaptureType(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)
{
GUI.enabled = false;
d.mode.enumValueIndex = (int)ReflectionProbeMode.Realtime;
GUI.enabled = true;
}
static void Drawer_FieldProxyVolumeReference(PlanarReflectionProbeUI s, SerializedPlanarReflectionProbe d, Editor o)

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs


}
}
if (d.useMirrorPlane)
{
var m = Handles.matrix;
Handles.matrix = mat;
Handles.color = k_GizmoMirrorPlaneCamera;
Handles.ArrowHandleCap(
0,
d.captureMirrorPlaneLocalPosition,
Quaternion.LookRotation(d.captureMirrorPlaneLocalNormal),
HandleUtility.GetHandleSize(d.captureMirrorPlaneLocalPosition),
Event.current.type
);
Handles.matrix = m;
}
if (d.proxyVolumeReference != null)
ReflectionProxyVolumeComponentUI.DrawHandles_EditNone(s.reflectionProxyVolume, d.proxyVolumeReference);
}

var showFrustrum = s.showCaptureHandles
|| EditMode.editMode == EditCenter;
var showCaptureMirror = (s.showCaptureHandles && d.useMirrorPlane)
var showCaptureMirror = d.useMirrorPlane
|| EditMode.editMode == EditMirrorPosition
|| EditMode.editMode == EditMirrorRotation;

45
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/LayeredLit/LayeredLitUI.cs


bool mainLayerInfluenceEnable = useMainLayerInfluence.floatValue > 0.0f;
if(mainLayerInfluenceEnable) // This is the only case where we need this sub category.
// Main layer does not have any options but height base blend.
if (layerIndex > 0)
// Main layer does not have any options but height base blend.
if (layerIndex > 0)
{
int paramIndex = layerIndex - 1;
int paramIndex = layerIndex - 1;
m_MaterialEditor.ShaderProperty(opacityAsDensity[layerIndex], styles.opacityAsDensityText);
m_MaterialEditor.ShaderProperty(opacityAsDensity[layerIndex], styles.opacityAsDensityText);
if (mainLayerInfluenceEnable)
{
m_MaterialEditor.ShaderProperty(inheritBaseColor[paramIndex], styles.inheritBaseColorText);
m_MaterialEditor.ShaderProperty(inheritBaseNormal[paramIndex], styles.inheritBaseNormalText);
// Main height influence is only available if the shader use the heightmap for displacement (per vertex or per level)
// We always display it as it can be tricky to know when per pixel displacement is enabled or not
m_MaterialEditor.ShaderProperty(inheritBaseHeight[paramIndex], styles.inheritBaseHeightText);
}
}
else
if (mainLayerInfluenceEnable)
if (!useMainLayerInfluence.hasMixedValue && useMainLayerInfluence.floatValue != 0.0f)
{
m_MaterialEditor.TexturePropertySingleLine(styles.layerInfluenceMapMaskText, layerInfluenceMaskMap);
}
m_MaterialEditor.ShaderProperty(inheritBaseColor[paramIndex], styles.inheritBaseColorText);
m_MaterialEditor.ShaderProperty(inheritBaseNormal[paramIndex], styles.inheritBaseNormalText);
// Main height influence is only available if the shader use the heightmap for displacement (per vertex or per level)
// We always display it as it can be tricky to know when per pixel displacement is enabled or not
m_MaterialEditor.ShaderProperty(inheritBaseHeight[paramIndex], styles.inheritBaseHeightText);
EditorGUILayout.Space();
else
{
if (!useMainLayerInfluence.hasMixedValue && useMainLayerInfluence.floatValue != 0.0f)
{
EditorGUILayout.LabelField(styles.layeringOptionText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
m_MaterialEditor.TexturePropertySingleLine(styles.layerInfluenceMapMaskText, layerInfluenceMaskMap);
EditorGUI.indentLevel--;
}
}
EditorGUILayout.Space();
DoLayerGUI(material, layerIndex, true, m_UseHeightBasedBlend);

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/LitUI.cs


public static GUIContent normalMapSpaceWarning = new GUIContent("Object space normal can't be use with triplanar mapping.");
// Transparency
public static string refractionModeText = "Refraction Mode";
public static string refractionModelText = "Refraction Model";
public static GUIContent refractionSSRayModelText = new GUIContent("SSRay Model", "Screen Space Ray Model");
public static GUIContent refractionIorText = new GUIContent("Index of refraction", "Index of refraction");
public static GUIContent refractionThicknessText = new GUIContent("Refraction Thickness", "Thickness for rough refraction");
public static GUIContent refractionThicknessMultiplierText = new GUIContent("Refraction Thickness multiplier (m)", "Thickness multiplier");

protected const string kATDistance = "_ATDistance";
protected MaterialProperty thicknessMultiplier = null;
protected const string kThicknessMultiplier = "_ThicknessMultiplier";
protected MaterialProperty refractionMode = null;
protected const string kRefractionMode = "_RefractionMode";
protected MaterialProperty refractionModel = null;
protected const string kRefractionModel = "_RefractionModel";
protected MaterialProperty refractionSSRayModel = null;
protected const string kRefractionSSRayModel = "_RefractionSSRayModel";
get { return refractionMode == null || refractionMode.floatValue == 0f; }
get { return refractionModel == null || refractionModel.floatValue == 0f; }
}
protected void FindMaterialLayerProperties(MaterialProperty[] props)

coatMaskMap = FindProperty(kCoatMaskMap, props);
// Transparency
refractionMode = FindProperty(kRefractionMode, props, false);
refractionModel = FindProperty(kRefractionModel, props, false);
refractionSSRayModel = FindProperty(kRefractionSSRayModel, props, false);
transmittanceColor = FindProperty(kTransmittanceColor, props, false);
transmittanceColorMap = FindProperty(kTransmittanceColorMap, props, false);
atDistance = FindProperty(kATDistance, props, false);

var surfaceTypeValue = (SurfaceType)surfaceType.floatValue;
if (surfaceTypeValue == SurfaceType.Transparent
&& refractionMode != null)
&& refractionModel != null)
{
EditorGUILayout.Space();
EditorGUILayout.LabelField(StylesBaseUnlit.TransparencyInputsText, EditorStyles.boldLabel);

if (refractionMode != null
if (refractionModel != null
m_MaterialEditor.ShaderProperty(refractionMode, Styles.refractionModeText);
var mode = (Lit.RefractionMode)refractionMode.floatValue;
if (mode != Lit.RefractionMode.None)
m_MaterialEditor.ShaderProperty(refractionModel, Styles.refractionModelText);
var mode = (Lit.RefractionModel)refractionModel.floatValue;
if (mode != Lit.RefractionModel.None)
m_MaterialEditor.ShaderProperty(refractionSSRayModel, Styles.refractionSSRayModelText);
m_MaterialEditor.ShaderProperty(ior, Styles.refractionIorText);
blendMode.floatValue = (float)BlendMode.Alpha;

CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_IRIDESCENCE", materialId == BaseLitGUI.MaterialId.LitIridescence);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SPECULAR_COLOR", materialId == BaseLitGUI.MaterialId.LitSpecular);
var refractionModeValue = (Lit.RefractionMode)material.GetFloat(kRefractionMode);
var refractionModelValue = (Lit.RefractionModel)material.GetFloat(kRefractionModel);
var refractionSSRayModelValue = (Lit.RefractionSSRayModel)material.GetFloat(kRefractionSSRayModel);
CoreUtils.SetKeyword(material, "_REFRACTION_PLANE", (refractionModeValue == Lit.RefractionMode.Plane) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_SPHERE", (refractionModeValue == Lit.RefractionMode.Sphere) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_PLANE", (refractionModelValue == Lit.RefractionModel.Plane) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_SPHERE", (refractionModelValue == Lit.RefractionModel.Sphere) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_SSRAY_PROXY", (refractionSSRayModelValue == Lit.RefractionSSRayModel.Proxy) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_SSRAY_HIZ", (refractionSSRayModelValue == Lit.RefractionSSRayModel.HiZ) && canHaveRefraction);
}
}
} // namespace UnityEditor

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/GlobalLightLoopSettingsUI.cs


++EditorGUI.indentLevel;
EditorGUILayout.PropertyField(d.skyReflectionSize, _.GetContent("Sky Reflection Size"));
EditorGUILayout.PropertyField(d.skyLightingOverrideLayerMask, _.GetContent("Sky Lighting Override Mask|This layer mask will define in which layers the sky system will look for sky settings volumes for lighting override"));
if(d.skyLightingOverrideLayerMask.intValue == -1)
{
EditorGUILayout.HelpBox("Be careful, Sky Lighting Override Mask is set to Everything. This is most likely a mistake as it serves no purpose.", MessageType.Warning);
}
--EditorGUI.indentLevel;
}
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineEditor.cs


public override void OnInspectorGUI()
{
var hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;
if (hdPipeline == null)
return;
var s = m_HDRenderPipelineUI;
var d = m_SerializedHDRenderPipeline;
var o = this;

208
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


readonly SkyManager m_SkyManager = new SkyManager();
readonly LightLoop m_LightLoop = new LightLoop();
readonly ShadowSettings m_ShadowSettings = new ShadowSettings();
readonly VolumetricLightingModule m_VolumetricLightingModule = new VolumetricLightingModule();
readonly VolumetricLightingSystem m_VolumetricLightingSystem = new VolumetricLightingSystem();
// Debugging
MaterialPropertyBlock m_SharedPropertyBlock = new MaterialPropertyBlock();

RTHandle m_DebugColorPickerBuffer;
RTHandle m_DebugFullScreenTempBuffer;
bool m_FullScreenDebugPushed;
bool m_ValidAPI; // False by default mean we render normally, true mean we don't render anything
ComputeBuffer m_DebugScreenSpaceTracingData = null;
ScreenSpaceTracingDebug[] m_DebugScreenSpaceTracingDataArray = new ScreenSpaceTracingDebug[1];
SetRenderingFeatures();
m_ValidAPI = true;
if (!SetRenderingFeatures())
{
m_ValidAPI = false;
return ;
}
m_BufferPyramid = new BufferPyramid(
var bufferPyramidProcessor = new BufferPyramidProcessor(
m_GPUCopy);
m_GPUCopy,
new TexturePadding(asset.renderPipelineResources.texturePaddingCS)
);
m_BufferPyramid = new BufferPyramid(bufferPyramidProcessor);
EncodeBC6H.DefaultInstance = EncodeBC6H.DefaultInstance ?? new EncodeBC6H(asset.renderPipelineResources.encodeBC6HCS);

m_MaterialList.ForEach(material => material.Build(asset));
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources);
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources, bufferPyramidProcessor);
m_VolumetricLightingModule.Build(asset);
m_VolumetricLightingSystem.Build(asset);
m_DebugDisplaySettings.RegisterDebug();
#if UNITY_EDITOR

m_DebugScreenSpaceTracingData = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(ScreenSpaceTracingDebug)));
InitializeRenderTextures();

RTHandle.Release(m_DebugColorPickerBuffer);
RTHandle.Release(m_DebugFullScreenTempBuffer);
m_DebugScreenSpaceTracingData.Release();
void SetRenderingFeatures()
bool SetRenderingFeatures()
{
// Set subshader pipeline tag
Shader.globalRenderPipeline = "HDRenderPipeline";

Debug.LogError("High Definition Render Pipeline doesn't support Gamma mode, change to Linear mode");
}
#endif
if (!IsSupportedPlatform())
{
Debug.LogError("Platform " + SystemInfo.operatingSystem + " with device " + SystemInfo.graphicsDeviceType.ToString() + " is not supported, no rendering will occur");
#if UNITY_EDITOR
foreach (UnityEditor.SceneView sv in Resources.FindObjectsOfTypeAll(typeof(UnityEditor.SceneView)))
sv.ShowNotification(new GUIContent("Platform " + SystemInfo.operatingSystem + " with device " + SystemInfo.graphicsDeviceType.ToString() + " is not supported, no rendering will occur"));
#endif
return false;
}
return true;
}
bool IsSupportedPlatform()
{
if (!SystemInfo.supportsComputeShaders)
return false;
if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11 ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D12 ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.PlayStation4 ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.XboxOne ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.XboxOneD3D12 ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.Vulkan)
{
return true;
}
if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal)
{
string os = SystemInfo.operatingSystem;
// Metal support depends on OS version:
// macOS 10.11.x doesn't have tessellation / earlydepthstencil support, early driver versions were buggy in general
// macOS 10.12.x should usually work with AMD, but issues with Intel/Nvidia GPUs. Regardless of the GPU, there are issues with MTLCompilerService crashing with some shaders
// macOS 10.13.x is expected to work, and if it's a driver/shader compiler issue, there's still hope on getting it fixed to next shipping OS patch release
//
// Has worked experimentally with iOS in the past, but it's not currently supported
//
if (os.StartsWith("Mac"))
{
// TODO: Expose in C# version number, for now assume "Mac OS X 10.10.4" format with version 10 at least
int startIndex = os.LastIndexOf(" ");
var parts = os.Substring(startIndex + 1).Split('.');
int a = Convert.ToInt32(parts[0]);
int b = Convert.ToInt32(parts[1]);
// In case in the future there's a need to disable specific patch releases
// int c = Convert.ToInt32(parts[2]);
if (a >= 10 && b >= 13)
return true;
}
}
return false;
}
void UnsetRenderingFeatures()

public override void Dispose()
{
UnsetRenderingFeatures();
if (!m_ValidAPI)
return ;
base.Dispose();
m_DebugDisplaySettings.UnregisterDebug();

m_SSSBufferManager.Cleanup();
m_SkyManager.Cleanup();
m_VolumetricLightingModule.Cleanup();
m_VolumetricLightingSystem.Cleanup();
UnsetRenderingFeatures();
#if UNITY_EDITOR
SceneViewDrawMode.ResetDrawMode();
FrameSettings.UnRegisterDebug("Scene View");

}
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!
m_VolumetricLightingModule.ResizeVBuffer(hdCamera, hdCamera.actualWidth, hdCamera.actualHeight);
m_VolumetricLightingSystem.ResizeVBuffer(hdCamera, hdCamera.actualWidth, hdCamera.actualHeight);
// update recorded window resolution
m_CurrentWidth = hdCamera.actualWidth;

m_DbufferManager.PushGlobalParams(cmd, m_FrameSettings);
m_VolumetricLightingModule.PushGlobalParams(hdCamera, cmd);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd);
var ssrefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
?? ScreenSpaceRefraction.@default;
ssrefraction.PushShaderParameters(cmd);
// Set up UnityPerView CBuffer.
hdCamera.SetupGlobalParams(cmd);

ReflectionProbeCullResults m_ReflectionProbeCullResults;
public override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
if (!m_ValidAPI)
return;
base.Render(renderContext, cameras);
RenderPipeline.BeginFrameRendering(cameras);

}
else
{
// Temporary hack. For scene view, by default, we don't want to have the lighting override layers in the current sky.
// Temporary hack:
// For scene view, by default, we use the "main" camera volume layer mask if it exists
// Otherwise we just remove the lighting override layers in the current sky to avoid conflicts
layerMask = (-1 & ~m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask);
var mainCamera = Camera.main;
bool needFallback = true;
if (mainCamera != null)
{
var mainCamAdditionalData = mainCamera.GetComponent<HDAdditionalCameraData>();
if (mainCamAdditionalData != null)
{
layerMask = mainCamAdditionalData.volumeLayerMask;
needFallback = false;
}
}
if (needFallback)
{
// If the override layer is "Everything", we fall-back to "Everything" for the current layer mask to avoid issues by having no current layer
// In practice we should never have "Everything" as an override mask as it does not make sense (a warning is issued in the UI)
if (m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask == -1)
layerMask = -1;
else
layerMask = (-1 & ~m_Asset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask);
}
}
}
VolumeManager.instance.Update(camera.transform, layerMask);

var postProcessLayer = camera.GetComponent<PostProcessLayer>();
// Disable post process if we enable debug mode or if the post process layer is disabled
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode != FullScreenDebugMode.None || m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
{
m_FrameSettings.enablePostprocess = false;
}

continue;
}
// Frustum cull density volumes on the CPU. Can be performed as soon as the camera is set up.
DensityVolumeList densityVolumes = m_VolumetricLightingSystem.PrepareVisibleDensityVolumeList(hdCamera, cmd);
// Note: Legacy Unity behave like this for ShadowMask
// When you select ShadowMask in Lighting panel it recompile shaders on the fly with the SHADOW_MASK keyword.
// However there is no C# function that we can query to know what mode have been select in Lighting Panel and it will be wrong anyway. Lighting Panel setup what will be the next bake mode. But until light is bake, it is wrong.

bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", CustomSamplerId.TPPrepareLightsForGPU.GetSampler()))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, m_ShadowSettings, m_CullResults, m_ReflectionProbeCullResults, camera) && m_FrameSettings.enableShadowMask;
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, camera, m_ShadowSettings, m_CullResults, m_ReflectionProbeCullResults, densityVolumes) && m_FrameSettings.enableShadowMask;
}
ConfigureForShadowMask(enableBakeShadowMask, cmd);

renderContext.ExecuteCommandBuffer(cmd);
cmd.Clear();
buildGPULightListsCompleteFence = m_LightLoop.BuildGPULightListsAsyncBegin(hdCamera, renderContext, m_CameraDepthStencilBuffer, m_CameraStencilBufferCopy, startFence, m_SkyManager.IsSkyValid());
buildGPULightListsCompleteFence = m_LightLoop.BuildGPULightListsAsyncBegin(hdCamera, renderContext, m_CameraDepthStencilBuffer, m_CameraStencilBufferCopy, startFence, m_SkyManager.IsLightingSkyValid());
}
using (new ProfilingSample(cmd, "Render shadows", CustomSamplerId.RenderShadows.GetSampler()))

{
using (new ProfilingSample(cmd, "Build Light list", CustomSamplerId.BuildLightList.GetSampler()))
{
m_LightLoop.BuildGPULightLists(hdCamera, cmd, m_CameraDepthStencilBuffer, m_CameraStencilBufferCopy, m_SkyManager.IsSkyValid());
m_LightLoop.BuildGPULightLists(hdCamera, cmd, m_CameraDepthStencilBuffer, m_CameraStencilBufferCopy, m_SkyManager.IsLightingSkyValid());
// The pass only requires the volume properties, and can run async.
m_VolumetricLightingModule.VoxelizeDensityVolumes(hdCamera, cmd);
// Perform the voxelization step which fills the density 3D texture.
// Requires the clustered lighting data structure to be built, and can run async.
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingModule.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);
RenderDeferredLighting(hdCamera, cmd);

}
}
#endif
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, hdCamera, FullScreenDebugMode.ScreenSpaceTracing);
// Caution: RenderDebug need to take into account that we have flip the screen (so anything capture before the flip will be flipped)
RenderDebug(hdCamera, cmd);

CommandBufferPool.Release(cmd);
renderContext.Submit();
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing)
{
m_DebugScreenSpaceTracingData.GetData(m_DebugScreenSpaceTracingDataArray);
var data = m_DebugScreenSpaceTracingDataArray[0];
m_CurrentDebugDisplaySettings.screenSpaceTracingDebugData = data;
}
} // For each camera
}

uint x, y, z;
m_applyDistortionCS.GetKernelThreadGroupSizes(m_applyDistortionKernel, out x, out y, out z);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._DistortionTexture, m_DistortionBuffer);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._GaussianPyramidColorTexture, m_BufferPyramid.colorPyramid);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, m_BufferPyramid.colorPyramid);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._CameraColorTexture, m_CameraColorBuffer);
cmd.SetComputeVectorParam(m_applyDistortionCS, HDShaderIDs._Size, size);

var visualEnv = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBuffer, m_CameraDepthStencilBuffer, cmd);
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBuffer, m_CameraDepthStencilBuffer, m_CurrentDebugDisplaySettings, cmd);
if (visualEnv.fogType != FogType.None || m_VolumetricLightingModule.preset != VolumetricLightingModule.VolumetricLightingPreset.Off)
if (visualEnv.fogType != FogType.None || m_VolumetricLightingSystem.preset != VolumetricLightingSystem.VolumetricLightingPreset.Off)
m_SkyManager.RenderOpaqueAtmosphericScattering(cmd);
}

}
else
{
// Assign debug data
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(1, m_DebugScreenSpaceTracingData);
}
HDUtils.SetRenderTarget(cmd, hdCamera, m_CameraColorBuffer, m_CameraDepthStencilBuffer);
if (m_FrameSettings.enableDBuffer) // enable d-buffer flag value is being interpreted more like enable decals in general now that we have clustered
{

if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
{
cmd.ClearRandomWriteTargets();
}
}
}
}

context.command = cmd;
context.camera = hdcamera.camera;
context.sourceFormat = RenderTextureFormat.ARGBHalf;
context.flip = true;
context.flip = hdcamera.camera.targetTexture == null;
layer.Render(context);
}

{
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() ||
m_CurrentDebugDisplaySettings.fullScreenDebugMode != FullScreenDebugMode.None ||
m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{
// enable globally the keyword DEBUG_DISPLAY on shader that support it with multicompile

cmd.SetGlobalInt(HDShaderIDs._DebugViewMaterial, (int)m_CurrentDebugDisplaySettings.GetDebugMaterialIndex());
cmd.SetGlobalInt(HDShaderIDs._DebugLightingMode, (int)m_CurrentDebugDisplaySettings.GetDebugLightingMode());
cmd.SetGlobalInt(HDShaderIDs._DebugLightingSubMode, (int)m_CurrentDebugDisplaySettings.GetDebugLightingSubMode());
cmd.SetGlobalInt(HDShaderIDs._DebugMipMapMode, (int)m_CurrentDebugDisplaySettings.GetDebugMipMapMode());
cmd.SetGlobalVector(HDShaderIDs._DebugLightingAlbedo, debugAlbedo);

cmd.SetGlobalVector(HDShaderIDs._MousePixelCoord, HDUtils.GetMouseCoordinates(hdCamera));
cmd.SetGlobalVector(HDShaderIDs._MouseClickPixelCoord, HDUtils.GetMouseClickCoordinates(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._DebugStep, HDUtils.debugStep);
cmd.SetGlobalInt(HDShaderIDs._ShowGrid, m_CurrentDebugDisplaySettings.showSSRayGrid ? 1 : 0);
cmd.SetGlobalInt(HDShaderIDs._ShowDepthPyramidDebug, m_CurrentDebugDisplaySettings.showSSRayDepthPyramid ? 1 : 0);
// The DebugNeedsExposure test allows us to set a neutral value if exposure is not needed. This way we don't need to make various tests inside shaders but only in this function.
cmd.SetGlobalFloat(HDShaderIDs._DebugExposure, m_CurrentDebugDisplaySettings.DebugNeedsExposure() ? lightingDebugSettings.debugExposure : 0.0f);
}
else
{

{
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{
HDUtils.BlitCameraTexture(cmd, hdCamera, textureID, m_DebugColorPickerBuffer);
using (new ProfilingSample(cmd, "Push To Color Picker"))
{
HDUtils.BlitCameraTexture(cmd, hdCamera, textureID, m_DebugColorPickerBuffer);
}
}
}

if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{
HDUtils.BlitCameraTexture(cmd, hdCamera, textureID, m_DebugColorPickerBuffer);
using (new ProfilingSample(cmd, "Push To Color Picker"))
{
HDUtils.BlitCameraTexture(cmd, hdCamera, textureID, m_DebugColorPickerBuffer);
}
}
}

// Everything we have capture is flipped (as it happen before FinalPass/postprocess/Blit. So if we are not in SceneView
// (i.e. we have perform a flip, we need to flip the input texture)
m_DebugFullScreen.SetFloat(HDShaderIDs._RequireToFlipInputTexture, hdCamera.camera.cameraType != CameraType.SceneView ? 1.0f : 0.0f);
m_DebugFullScreen.SetBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, m_BufferPyramid.depthPyramid);
HDUtils.DrawFullScreen(cmd, hdCamera, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
PushColorPickerDebugTexture(cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget, hdCamera);

m_SharedPropertyBlock.SetTexture(HDShaderIDs._InputCubemap, skyReflection);
m_SharedPropertyBlock.SetFloat(HDShaderIDs._Mipmap, lightingDebug.skyReflectionMipmap);
m_SharedPropertyBlock.SetFloat(HDShaderIDs._RequireToFlipInputTexture, hdCamera.camera.cameraType != CameraType.SceneView ? 1.0f : 0.0f);
m_SharedPropertyBlock.SetFloat(HDShaderIDs._DebugExposure, lightingDebug.debugExposure);
cmd.SetViewport(new Rect(x, y, overlaySize, overlaySize));
cmd.DrawProcedural(Matrix4x4.identity, m_DebugDisplayLatlong, 0, MeshTopology.Triangles, 3, 1, m_SharedPropertyBlock);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);

{
if (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.BackgroundColor ||
// If we want the sky but the sky don't exist, still clear with background color
(hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky && !m_SkyManager.IsSkyValid()) ||
(hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky && !m_SkyManager.IsVisualSkyValid()) ||
// Special handling for Preview we force to clear with background color (i.e black)
// Note that the sky use in this case is the last one setup. If there is no scene or game, there is no sky use as reflection in the preview
hdCamera.camera.cameraType == CameraType.Preview

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.asset


pointCookieSize: 512
cubeCookieTexArraySize: 16
reflectionProbeCacheSize: 128
planarReflectionProbeCacheSize: 128
planarReflectionProbeCacheSize: 4
planarReflectionTextureSize: 128
planarReflectionTextureSize: 1024
reflectionCacheCompressed: 0
planarReflectionCacheCompressed: 0
maxPlanarReflectionProbes: 128

28
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int g_LayeredSingleIdxBuffer = Shader.PropertyToID("g_LayeredSingleIdxBuffer");
public static readonly int _EnvLightIndexShift = Shader.PropertyToID("_EnvLightIndexShift");
public static readonly int _DensityVolumeIndexShift = Shader.PropertyToID("_DensityVolumeIndexShift");
public static readonly int g_isOrthographic = Shader.PropertyToID("g_isOrthographic");
public static readonly int g_iNrVisibLights = Shader.PropertyToID("g_iNrVisibLights");

public static readonly int _ViewTilesFlags = Shader.PropertyToID("_ViewTilesFlags");
public static readonly int _MousePixelCoord = Shader.PropertyToID("_MousePixelCoord");
public static readonly int _MouseClickPixelCoord = Shader.PropertyToID("_MouseClickPixelCoord");
public static readonly int _DebugStep = Shader.PropertyToID("_DebugStep");
public static readonly int _DebugExposure = Shader.PropertyToID("_DebugExposure");
public static readonly int _DebugScreenSpaceTracingData = Shader.PropertyToID("_DebugScreenSpaceTracingData");
public static readonly int _ShowGrid = Shader.PropertyToID("_ShowGrid");
public static readonly int _ShowDepthPyramidDebug = Shader.PropertyToID("_ShowDepthPyramidDebug");
public static readonly int _DebugLightingSubMode = Shader.PropertyToID("_DebugLightingSubMode");
public static readonly int _DebugLightingAlbedo = Shader.PropertyToID("_DebugLightingAlbedo");
public static readonly int _DebugLightingSmoothness = Shader.PropertyToID("_DebugLightingSmoothness");
public static readonly int _DebugLightingNormal = Shader.PropertyToID("_DebugLightingNormal");

public static readonly int _IrradianceSource = Shader.PropertyToID("_IrradianceSource");
public static readonly int _EnableDBuffer = Shader.PropertyToID("_EnableDBuffer");
public static readonly int _DecalAtlasResolution = Shader.PropertyToID("_DecalAtlasResolution");
public static readonly int[] _GBufferTexture =
{

Shader.PropertyToID("_SSSBufferTexture3"),
};
public static readonly int _SSRefractionRayMinLevel = Shader.PropertyToID("_SSRefractionRayMinLevel");
public static readonly int _SSRefractionRayMaxLevel = Shader.PropertyToID("_SSRefractionRayMaxLevel");
public static readonly int _SSRefractionRayMaxIterations = Shader.PropertyToID("_SSRefractionRayMaxIterations");
public static readonly int _SSRefractionRayDepthSuccessBias = Shader.PropertyToID("_SSRefractionRayDepthSuccessBias");
public static readonly int _SSRefractionInvScreenWeightDistance = Shader.PropertyToID("_SSRefractionInvScreenWeightDistance");
public static readonly int _GaussianPyramidColorTexture = Shader.PropertyToID("_GaussianPyramidColorTexture");
public static readonly int _PyramidDepthTexture = Shader.PropertyToID("_PyramidDepthTexture");
public static readonly int _GaussianPyramidColorMipSize = Shader.PropertyToID("_GaussianPyramidColorMipSize");
public static readonly int _DepthPyramidMipSize = Shader.PropertyToID("_PyramidDepthMipSize");
public static readonly int _ColorPyramidTexture = Shader.PropertyToID("_ColorPyramidTexture");
public static readonly int _DepthPyramidTexture = Shader.PropertyToID("_DepthPyramidTexture");
public static readonly int _ColorPyramidSize = Shader.PropertyToID("_ColorPyramidSize");
public static readonly int _ColorPyramidScale = Shader.PropertyToID("_ColorPyramidScale");
public static readonly int _DepthPyramidSize = Shader.PropertyToID("_DepthPyramidSize");
public static readonly int _DepthPyramidScale = Shader.PropertyToID("_DepthPyramidScale");
public static readonly int _DebugColorPickerTexture = Shader.PropertyToID("_DebugColorPickerTexture");
public static readonly int _ColorPickerParam = Shader.PropertyToID("_ColorPickerParam");

public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeProperties = Shader.PropertyToID("_VolumeProperties");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
}
}

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


return null;
}
}
public static int debugStep { get { return MousePositionDebug.instance.debugStep; } }
static MaterialPropertyBlock s_PropertyBlock = new MaterialPropertyBlock();

return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
private static void SetViewportAndClear(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor)
{
// Clearing a partial viewport currently does not go through the hardware clear.
// Instead it goes through a quad rendered with a specific shader.
// When enabling wireframe mode in the scene view, unfortunately it overrides this shader thus breaking every clears.
// That's why in the editor we don't set the viewport before clearing (it's set to full screen by the previous SetRenderTarget) but AFTER so that we benefit from un-bugged hardware clear.
// We consider that the small loss in performance is acceptable in the editor.
// A refactor of wireframe is needed before we can fix this properly (with not doing anything!)
#if !UNITY_EDITOR
SetViewport(cmd, camera, buffer);
#endif
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
#if UNITY_EDITOR
SetViewport(cmd, camera, buffer);
#endif
}
SetViewport(cmd, camera, buffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
SetViewportAndClear(cmd, camera, buffer, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)

public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
CoreUtils.SetRenderTarget(cmd, colorBuffer, depthBuffer, miplevel, cubemapFace, depthSlice);
SetViewport(cmd, camera, colorBuffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
SetViewportAndClear(cmd, camera, colorBuffer, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer)

public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
{
CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer); // Don't clear here, viewport needs to be set before we do.
SetViewport(cmd, camera, depthBuffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, CoreUtils.clearColorAllBlack);
SetViewportAndClear(cmd, camera, depthBuffer, clearFlag, CoreUtils.clearColorAllBlack);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)

public static Vector4 GetMouseCoordinates(HDCamera camera)
{
Vector2 mousePixelCoord = MousePositionDebug.instance.GetMousePosition(camera.screenSize.y);
return new Vector4(mousePixelCoord.x, mousePixelCoord.y, camera.scaleBias.x * mousePixelCoord.x / camera.screenSize.x, camera.scaleBias.y * mousePixelCoord.y / camera.screenSize.y);
}
// Returns mouse click coordinates: (x,y) in pixels and (z,w) normalized inside the render target (not the viewport)
public static Vector4 GetMouseClickCoordinates(HDCamera camera)
{
Vector2 mousePixelCoord = MousePositionDebug.instance.GetMouseClickPosition(camera.screenSize.y);
return new Vector4(mousePixelCoord.x, mousePixelCoord.y, camera.scaleBias.x * mousePixelCoord.x / camera.screenSize.x, camera.scaleBias.y * mousePixelCoord.y / camera.screenSize.y);
}
}

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs


// As we have our own default value, we need to initialize the light intensity correctly
public static void InitDefaultHDAdditionalLightData(HDAdditionalLightData lightData)
{
// At first init we need to initialize correctly the default value
lightData.ConvertPhysicalLightIntensityToLightIntensity();
// Special treatment for Unity builtin area light. Change it to our rectangle light
// Special treatment for Unity built-in area light. Change it to our rectangle light
var light = lightData.gameObject.GetComponent<Light>();
// Sanity check: lightData.lightTypeExtent is init to LightTypeExtent.Punctual (in case for unknow reasons we recreate additional data on an existing line)

light.type = LightType.Point; // Same as in HDLightEditor
#if UNITY_EDITOR
light.lightmapBakeType = LightmapBakeType.Realtime;
#endif
// At first init we need to initialize correctly the default value
lightData.ConvertPhysicalLightIntensityToLightIntensity();
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl


lightData.angleScale, lightData.angleOffset);
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
// TODO: sample the extinction from the density V-buffer.
float distVol = (lightData.lightType == GPULIGHTTYPE_PROJECTOR_BOX) ? distances.w : distances.x;
attenuation *= TransmittanceHomogeneousMedium(_GlobalExtinction, distVol);
#endif

96
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


Area,
Env,
Decal,
DensityVolume,
Count
}

EnvironmentAndPunctual = 5,
EnvironmentAndArea = 6,
EnvironmentAndAreaAndPunctual = 7,
Decal = 8
Decal = 8,
DensityVolumes = 16
};
public const int k_MaxDirectionalLightsOnScreen = 4;

int m_punctualLightCount = 0;
int m_areaLightCount = 0;
int m_lightCount = 0;
int m_densityVolumeCount = 0;
bool m_enableBakeShadowMask = false; // Track if any light require shadow mask. In this case we will need to enable the keyword shadow mask
float m_maxShadowDistance = 0.0f; // Save value from shadow settings

}
}
public void AddBoxVolumeDataAndBound(OrientedBBox obb, LightCategory category, LightFeatureFlags featureFlags, Matrix4x4 worldToView)
{
var bound = new SFiniteLightBound();
var volumeData = new LightVolumeData();
// transform to camera space (becomes a left hand coordinate frame in Unity since Determinant(worldToView)<0)
var positionVS = worldToView.MultiplyPoint(obb.center);
var rightVS = worldToView.MultiplyVector(obb.right);
var upVS = worldToView.MultiplyVector(obb.up);
var forwardVS = Vector3.Cross(upVS, rightVS);
var extents = new Vector3(obb.extentX, obb.extentY, obb.extentZ);
volumeData.lightVolume = (uint)LightVolumeType.Box;
volumeData.lightCategory = (uint)category;
volumeData.featureFlags = (uint)featureFlags;
bound.center = positionVS;
bound.boxAxisX = obb.extentX * rightVS;
bound.boxAxisY = obb.extentY * upVS;
bound.boxAxisZ = obb.extentZ * forwardVS;
bound.radius = extents.magnitude;
bound.scaleXY.Set(1.0f, 1.0f);
// The culling system culls pixels that are further
// than a threshold to the box influence extents.
// So we use an arbitrary threshold here (k_BoxCullingExtentOffset)
volumeData.lightPos = positionVS;
volumeData.lightAxisX = rightVS;
volumeData.lightAxisY = upVS;
volumeData.lightAxisZ = forwardVS;
volumeData.boxInnerDist = extents - k_BoxCullingExtentThreshold; // We have no blend range, but the culling code needs a small EPS value for some reason???
volumeData.boxInvRange.Set(1.0f / k_BoxCullingExtentThreshold.x, 1.0f / k_BoxCullingExtentThreshold.y, 1.0f / k_BoxCullingExtentThreshold.z);
m_lightList.bounds.Add(bound);
m_lightList.lightVolumes.Add(volumeData);
}
public int GetCurrentShadowCount()
{
return m_ShadowRequests.Count;

}
// Return true if BakedShadowMask are enabled
public bool PrepareLightsForGPU(CommandBuffer cmd, ShadowSettings shadowSettings, CullResults cullResults, ReflectionProbeCullResults reflectionProbeCullResults, Camera camera)
public bool PrepareLightsForGPU(CommandBuffer cmd, Camera camera, ShadowSettings shadowSettings, CullResults cullResults,
ReflectionProbeCullResults reflectionProbeCullResults, DensityVolumeList densityVolumes)
{
using (new ProfilingSample(cmd, "Prepare Lights For GPU"))
{

if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// Caution: 'DirectionalLightData.positionWS' is camera-relative after this point.
int n = m_lightList.directionalLights.Count;
DirectionalLightData lightData = m_lightList.directionalLights[n - 1];
int last = m_lightList.directionalLights.Count - 1;
DirectionalLightData lightData = m_lightList.directionalLights[last];
m_lightList.directionalLights[n - 1] = lightData;
m_lightList.directionalLights[last] = lightData;
}
}
continue;

if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// Caution: 'LightData.positionWS' is camera-relative after this point.
int n = m_lightList.lights.Count;
LightData lightData = m_lightList.lights[n - 1];
int last = m_lightList.lights.Count - 1;
LightData lightData = m_lightList.lights[last];
m_lightList.lights[n - 1] = lightData;
m_lightList.lights[last] = lightData;
}
}
}

if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// Caution: 'EnvLightData.positionWS' is camera-relative after this point.
int n = m_lightList.envLights.Count;
EnvLightData envLightData = m_lightList.envLights[n - 1];
int last = m_lightList.envLights.Count - 1;
EnvLightData envLightData = m_lightList.envLights[last];
m_lightList.envLights[n - 1] = envLightData;
m_lightList.envLights[last] = envLightData;
// Inject density volumes into the clustered data structure for efficient look up.
m_densityVolumeCount = densityVolumes.bounds != null ? densityVolumes.bounds.Count : 0;
Matrix4x4 worldToViewCR = worldToView;
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// The OBBs are camera-relative, the matrix is not. Fix it.
worldToViewCR.SetColumn(3, new Vector4(0, 0, 0, 1));
}
for (int i = 0, n = m_densityVolumeCount; i < n; i++)
{
// Density volumes are not lights and therefore should not affect light classification.
LightFeatureFlags featureFlags = 0;
AddBoxVolumeDataAndBound(densityVolumes.bounds[i], LightCategory.DensityVolume, featureFlags, worldToViewCR);
}
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count;
Debug.Assert(m_lightList.bounds.Count == m_lightCount);
Debug.Assert(m_lightList.lightVolumes.Count == m_lightCount);
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count + m_densityVolumeCount;
Debug.Assert(m_lightCount == m_lightList.bounds.Count);
Debug.Assert(m_lightCount == m_lightList.lightVolumes.Count);
int decalDatasCount = Math.Min(DecalSystem.m_DecalDatasCount, k_MaxDecalsOnScreen);
if (decalDatasCount > 0)

cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, 1, 1, 1);
int decalDatasCount = Math.Min(DecalSystem.m_DecalDatasCount, k_MaxDecalsOnScreen);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs._DensityVolumeIndexShift, m_lightList.lights.Count + m_lightList.envLights.Count + decalDatasCount);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs.g_iNrVisibLights, m_lightCount);
cmd.SetComputeMatrixArrayParam(buildPerVoxelLightListShader, HDShaderIDs.g_mScrProjectionArr, projscrArr);
cmd.SetComputeMatrixArrayParam(buildPerVoxelLightListShader, HDShaderIDs.g_mInvScrProjectionArr, invProjscrArr);

else // Pixel shader evaluation
{
int index = GetDeferredLightingMaterialIndex( options.outputSplitLighting ? 1 : 0,
m_FrameSettings.lightLoopSettings.enableTileAndCluster ? 1 : 0,
m_enableBakeShadowMask ? 1 : 0,
m_FrameSettings.lightLoopSettings.enableTileAndCluster ? 1 : 0,
m_enableBakeShadowMask ? 1 : 0,
debugDisplaySettings.IsDebugDisplayEnabled() ? 1 : 0);
Material currentLightingMaterial = m_deferredLightingMaterial[index];

m_DebugViewTilesMaterial.SetInt(HDShaderIDs._NumTiles, numTiles);
m_DebugViewTilesMaterial.SetInt(HDShaderIDs._ViewTilesFlags, (int)lightingDebug.tileClusterDebugByCategory);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, HDUtils.GetMouseCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MouseClickPixelCoord, HDUtils.GetMouseClickCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_TileList, s_TileList);
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_DispatchIndirectBuffer, s_DispatchIndirectBuffer);
m_DebugViewTilesMaterial.EnableKeyword("USE_FPTL_LIGHTLIST");

// lightCategories
m_DebugViewTilesMaterial.SetInt(HDShaderIDs._ViewTilesFlags, (int)lightingDebug.tileClusterDebugByCategory);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, HDUtils.GetMouseCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MouseClickPixelCoord, HDUtils.GetMouseClickCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_vLightListGlobal, bUseClustered ? s_PerVoxelLightLists : s_LightList);
m_DebugViewTilesMaterial.EnableKeyword(bUseClustered ? "USE_CLUSTERED_LIGHTLIST" : "USE_FPTL_LIGHTLIST");
m_DebugViewTilesMaterial.DisableKeyword(!bUseClustered ? "USE_CLUSTERED_LIGHTLIST" : "USE_FPTL_LIGHTLIST");

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs.hlsl


#define LIGHTCATEGORY_AREA (1)
#define LIGHTCATEGORY_ENV (2)
#define LIGHTCATEGORY_DECAL (3)
#define LIGHTCATEGORY_COUNT (4)
#define LIGHTCATEGORY_DENSITY_VOLUME (4)
#define LIGHTCATEGORY_COUNT (5)
//
// UnityEngine.Experimental.Rendering.HDPipeline.LightFeatureFlags: static fields

161
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.hlsl


#include "CoreRP/ShaderLibrary/Macros.hlsl"
//-----------------------------------------------------------------------------
// LightLoop
// ----------------------------------------------------------------------------

}
}
// We always apply exposure when in debug mode. The exposure value will be at a neutral 0.0 when not needed.
diffuseLighting *= exp2(_DebugExposure);
specularLighting *= exp2(_DebugExposure);
#endif
}

float reflectionHierarchyWeight = 0.0; // Max: 1.0
float refractionHierarchyWeight = 0.0; // Max: 1.0
if (featureFlags & LIGHTFEATUREFLAGS_SSREFRACTION)
// First loop iteration is:
// 1. Screen Space Refraction / Reflection
// 2. Environment Reflection / Refraction
// 3. Sky Reflection / Refraction
//
// Following loop iterations are:
// 1. Environment Reflection / Refraction
// 2. Sky Reflection / Refraction
// Common variable for all iterations
// Define macro for a better understanding of the loop
#ifdef LIGHTLOOP_TILE_PASS
uint envLightStart;
# define FETCHINDEX(index) FetchIndex(envLightStart, index);
#else
# define FETCHINDEX(index) index
#endif
uint envLightCount;
#define EVALUATE_BSDF_ENV(envLightData, TYPE, type) {\
IndirectLighting lighting = EvaluateBSDF_Env( context, V, posInput, preLightData, envLightData, bsdfData, \
envLightData.influenceShapeType, \
MERGE_NAME (GPUIMAGEBASEDLIGHTINGTYPE_, TYPE), MERGE_NAME (type, HierarchyWeight)); \
AccumulateIndirectLighting(lighting, aggregateLighting);\
}
// First loop iteration
if (featureFlags & (
LIGHTFEATUREFLAGS_ENV
| LIGHTFEATUREFLAGS_SKY
| LIGHTFEATUREFLAGS_SSREFRACTION
| LIGHTFEATUREFLAGS_SSREFLECTION
)
)
IndirectLighting lighting = EvaluateBSDF_SSLighting(
context,
V,
posInput,
preLightData,
bsdfData,
GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION,
refractionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
}
// Fetch first env light to provide the scene proxy for screen space computation
EnvLightData envLightData;
ZERO_INITIALIZE(EnvLightData, envLightData);
{
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStart(posInput, LIGHTCATEGORY_ENV, envLightStart, envLightCount);
#else
envLightCount = _EnvLightCount;
#endif
if (envLightCount > 0)
{
uint envLightIndex = FETCHINDEX(0);
envLightData = _EnvLightDatas[envLightIndex];
}
else
envLightData = InitSkyEnvLightData(0);
}
if (featureFlags & LIGHTFEATUREFLAGS_SSREFRACTION)
{
IndirectLighting lighting = EvaluateBSDF_SSLighting( context, V, posInput, preLightData, bsdfData, envLightData,
GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION, refractionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
}
if (featureFlags & LIGHTFEATUREFLAGS_SSREFLECTION)
{
IndirectLighting lighting = EvaluateBSDF_SSLighting( context, V, posInput, preLightData, bsdfData, envLightData,
GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION, reflectionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
}
if ((featureFlags & LIGHTFEATUREFLAGS_ENV) && envLightCount > 0)
{
context.sampleReflection = SINGLE_PASS_CONTEXT_SAMPLE_REFLECTION_PROBES;
EVALUATE_BSDF_ENV(envLightData, REFLECTION, reflection);
if (featureFlags & LIGHTFEATUREFLAGS_SSREFRACTION)
EVALUATE_BSDF_ENV(envLightData, REFRACTION, refraction);
}
if (featureFlags & LIGHTFEATUREFLAGS_SSREFLECTION)
{
IndirectLighting lighting = EvaluateBSDF_SSLighting(
context,
V,
posInput,
preLightData,
bsdfData,
GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION,
reflectionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
// Only apply the sky IBL if the sky texture is available
if ((featureFlags & LIGHTFEATUREFLAGS_SKY) && _EnvLightSkyEnabled)
{
// The sky is a single cubemap texture separate from the reflection probe texture array (different resolution and compression)
context.sampleReflection = SINGLE_PASS_CONTEXT_SAMPLE_SKY;
// The sky data are generated on the fly so the compiler can optimize the code
EnvLightData envLightSky = InitSkyEnvLightData(0);
EVALUATE_BSDF_ENV(envLightSky, REFLECTION, reflection);
if (featureFlags & LIGHTFEATUREFLAGS_SSREFRACTION)
EVALUATE_BSDF_ENV(envLightSky, REFRACTION, refraction);
}
// Following loop iterations
if (featureFlags & LIGHTFEATUREFLAGS_ENV || featureFlags & LIGHTFEATUREFLAGS_SKY)
{
// Reflection probes are sorted by volume (in the increasing order).

#ifdef LIGHTLOOP_TILE_PASS
uint envLightStart;
uint envLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_ENV, envLightStart, envLightCount);
#else
uint envLightCount = _EnvLightCount;
#endif
#ifdef LIGHTLOOP_TILE_PASS
uint envLightIndex = FetchIndex(envLightStart, i);
#else
uint envLightIndex = i;
#endif
IndirectLighting lighting = EvaluateBSDF_Env( context, V, posInput, preLightData, _EnvLightDatas[envLightIndex], bsdfData,
_EnvLightDatas[envLightIndex].influenceShapeType,
GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION, reflectionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
uint envLightIndex = FETCHINDEX(i);
EVALUATE_BSDF_ENV(_EnvLightDatas[envLightIndex], REFLECTION, reflection);
}
// Refraction probe and reflection probe will process exactly the same weight. It will be good for performance to be able to share this computation

{
for (i = 0; i < envLightCount && refractionHierarchyWeight < 1.0; ++i)
{
#ifdef LIGHTLOOP_TILE_PASS
uint envLightIndex = FetchIndex(envLightStart, i);
#else
uint envLightIndex = i;
#endif
IndirectLighting lighting = EvaluateBSDF_Env( context, V, posInput, preLightData, _EnvLightDatas[envLightIndex], bsdfData,
_EnvLightDatas[envLightIndex].influenceShapeType,
GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION, refractionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
uint envLightIndex = FETCHINDEX(i);
EVALUATE_BSDF_ENV(_EnvLightDatas[envLightIndex], REFRACTION, refraction);
}
}
}

// The sky data are generated on the fly so the compiler can optimize the code
EnvLightData envLightSky = InitSkyEnvLightData(0);
IndirectLighting lighting = EvaluateBSDF_Env( context, V, posInput, preLightData, envLightSky, bsdfData,
ENVSHAPETYPE_SKY,
GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION, reflectionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
EVALUATE_BSDF_ENV(envLightSky, REFLECTION, reflection);
}
if (featureFlags & LIGHTFEATUREFLAGS_SSREFRACTION)

// The sky data are generated on the fly so the compiler can optimize the code
EnvLightData envLightSky = InitSkyEnvLightData(0);
IndirectLighting lighting = EvaluateBSDF_Env( context, V, posInput, preLightData, envLightSky, bsdfData,
ENVSHAPETYPE_SKY,
GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION, refractionHierarchyWeight);
AccumulateIndirectLighting(lighting, aggregateLighting);
EVALUATE_BSDF_ENV(envLightSky, REFRACTION, refraction);
#undef EVALUATE_BSDF_ENV
#undef FETCHINDEX
// Also Apply indiret diffuse (GI)
// PostEvaluateBSDF will perform any operation wanted by the material and sum everything into diffuseLighting and specularLighting

24
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


//_Env2DCaptureVP is in capture space
float3 ndc = ComputeNormalizedDeviceCoordinatesWithZ(texCoord, _Env2DCaptureVP[index]);
color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, 0).rgb;
color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, lod).rgb;
color.a = any(ndc.xyz < 0) || any(ndc.xyz > 1) ? 0.0 : 1.0;
#ifdef DEBUG_DISPLAY

return 1;
}
uint FetchIndex(uint globalOffset, uint lightIndex)
{
return globalOffset + lightIndex;
}
uint FetchIndexWithBoundsCheck(uint start, uint count, uint i)
{
if (i < count)
{
return FetchIndex(start, i);
}
else
{
return UINT_MAX;
}
}
#ifdef LIGHTLOOP_TILE_PASS
#else
int j = start + i;
#endif
return _LightDatas[j];
}

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-bigtile.compute


#pragma kernel BigTileLightListGen
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"
#include "SortingComputeUtils.hlsl"

uniform float g_fNearPlane;
uniform float g_fFarPlane;
uniform uint g_isOrthographic;
// TODO: These aren't used, we should remove them
uniform int _EnvLightIndexShift;
uniform int _DecalIndexShift;
StructuredBuffer<float3> g_vBoundsBuffer : register( t1 );
StructuredBuffer<LightVolumeData> _LightVolumeData : register(t2);

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute


#pragma kernel TileLightListGen_DepthRT_MSAA_SrcBigTile LIGHTLISTGEN=TileLightListGen_DepthRT_MSAA_SrcBigTile ENABLE_DEPTH_TEXTURE_BACKPLANE MSAA_ENABLED USE_TWO_PASS_TILED_LIGHTING
#pragma kernel ClearAtomic
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "ShaderBase.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"

uint g_isOrthographic;
int _EnvLightIndexShift;
int _DecalIndexShift;
int _DensityVolumeIndexShift;
float g_fClustScale;
float g_fClustBase;

int shiftIndex[LIGHTCATEGORY_COUNT];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, LIGHTCATEGORY_COUNT);
// NOTE: Why is this indexed like this?
shiftIndex[LIGHTCATEGORY_COUNT - 2] = _EnvLightIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 1] = _DecalIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 3] = _EnvLightIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 2] = _DecalIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 1] = _DensityVolumeIndexShift;
int categoryListCount[LIGHTCATEGORY_COUNT]; // direct light count and reflection lights
int categoryListCount[LIGHTCATEGORY_COUNT]; // number of direct lights, reflection probes, decals and density volumes
ZERO_INITIALIZE_ARRAY(int, categoryListCount, LIGHTCATEGORY_COUNT);
uint offs = start;

const int lightVolIndex = GenerateLightCullDataIndex(coarseList[l], g_iNrVisibLights, eyeIndex);
uint lightCategory = _LightVolumeData[lightVolIndex].lightCategory;
++categoryListCount[lightCategory];
g_vLayeredLightList[offs++] = coarseList[l] - shiftIndex[lightCategory]; // reflection lights will be last since we sorted
}
g_vLayeredLightList[offs++] = coarseList[l] - shiftIndex[lightCategory];
}
}
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)

21
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild.compute


//#pragma #pragma enable_d3d11_debug_symbols
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "ShaderBase.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"

#define MAX_NR_COARSE_ENTRIES 64
#define MAX_NR_PRUNED_ENTRIES 24
#define CATEGORY_LIST_SIZE (LIGHTCATEGORY_COUNT - 1) // Skip density volumes
groupshared unsigned int coarseList[MAX_NR_COARSE_ENTRIES];
groupshared unsigned int prunedList[MAX_NR_COARSE_ENTRIES]; // temporarily support room for all 64 while in LDS

#endif
groupshared int ldsNrLightsFinal;
groupshared int ldsCategoryListCount[LIGHTCATEGORY_COUNT];
groupshared int ldsCategoryListCount[CATEGORY_LIST_SIZE];
#ifdef PERFORM_SPHERICAL_INTERSECTION_TESTS
groupshared uint lightOffsSph;

for(int l=(int) t; l<(int) g_iNrVisibLights; l += NR_THREADS)
{
#endif
// Skip density volumes (lights are sorted by category). TODO: improve data locality
if (_LightVolumeData[l].lightCategory == LIGHTCATEGORY_DENSITY_VOLUME) { break; }
const float3 vMi = g_vBoundsBuffer[l];
const float3 vMa = g_vBoundsBuffer[l+g_iNrVisibLights];

}
#endif
//
if(t<LIGHTCATEGORY_COUNT) ldsCategoryListCount[t]=0;
if(t<CATEGORY_LIST_SIZE) ldsCategoryListCount[t]=0;
#ifdef USE_FEATURE_FLAGS
if(t==0) ldsFeatureFlags=0;
#endif

// All our cull data are in the same list, but at render time envLights are separated so we need to shift the index
// to make it work correctly
int shiftIndex[LIGHTCATEGORY_COUNT];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, LIGHTCATEGORY_COUNT);
shiftIndex[LIGHTCATEGORY_COUNT - 2] = _EnvLightIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 1] = _DecalIndexShift;
int shiftIndex[CATEGORY_LIST_SIZE];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, CATEGORY_LIST_SIZE);
shiftIndex[CATEGORY_LIST_SIZE - 2] = _EnvLightIndexShift;
shiftIndex[CATEGORY_LIST_SIZE - 1] = _DecalIndexShift;
for(int category=0; category<LIGHTCATEGORY_COUNT; category++)
for(int category=0; category<CATEGORY_LIST_SIZE; category++)
{
int nrLightsFinal = ldsCategoryListCount[category];
int nrLightsFinalClamped = nrLightsFinal<MAX_NR_PRUNED_ENTRIES ? nrLightsFinal : MAX_NR_PRUNED_ENTRIES;

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/materialflags.compute


// #pragma enable_d3d11_debug_symbols
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "ShaderBase.hlsl"
#include "LightLoop.cs.hlsl"

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/scrbound.compute


#pragma kernel ScreenBoundsAABB
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightCullUtils.hlsl"

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


[AddComponentMenu("Rendering/Homogeneous Density Volume", 1100)]
public class HomogeneousDensityVolume : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
public DensityVolumeParameters parameters = new DensityVolumeParameters();
private void Awake()
{

private void OnValidate()
{
volumeParameters.Constrain();
parameters.Constrain();
if (volumeParameters.IsLocalVolume())
if (parameters.IsLocalVolume())
Gizmos.color = volumeParameters.albedo;
Gizmos.color = parameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}

foreach (HomogeneousDensityVolume volume in volumes)
{
if (volume.enabled && !volume.volumeParameters.IsLocalVolume())
if (volume.enabled && !volume.parameters.IsLocalVolume())
{
globalVolume = volume;
break;

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta


fileFormatVersion: 2
guid: 1c273c50d71d46a4f98a1d23256a8c63
guid: e1fbb15bf92b84f40a1eb030765b5afe
MonoImporter:
externalObjects: {}
serializedVersion: 2

324
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[GenerateHLSL]
public struct VolumeProperties
public struct DensityVolumeProperties
public static VolumeProperties GetNeutralVolumeProperties()
public static DensityVolumeProperties GetNeutralProperties()
VolumeProperties properties = new VolumeProperties();
DensityVolumeProperties properties = new DensityVolumeProperties();
properties.scattering = Vector3.zero;
properties.extinction = 0;

} // struct VolumeProperties
[Serializable]
public class VolumeParameters
public class DensityVolumeParameters
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Single global parameter for all volumes. TODO: UX
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public VolumeParameters()
public DensityVolumeParameters()
{
isLocal = true;
albedo = new Color(0.5f, 0.5f, 0.5f);

asymmetry = Mathf.Clamp(asymmetry, -1.0f, 1.0f);
}
public VolumeProperties GetProperties()
public DensityVolumeProperties GetProperties()
VolumeProperties properties = new VolumeProperties();
DensityVolumeProperties properties = new DensityVolumeProperties();
properties.scattering = GetScatteringCoefficient();
properties.extinction = GetExtinctionCoefficient();

} // class VolumeParameters
public class VolumetricLightingModule
public struct DensityVolumeList
{
public List<OrientedBBox> bounds;
public List<DensityVolumeProperties> properties;
}
public class VolumetricLightingSystem
{
public enum VolumetricLightingPreset
{

}
class VBuffer
{
public long viewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
public RenderTexture[] lightingRTEX = null;
public RenderTargetIdentifier[] lightingRTID = null;
public RenderTexture densityRTEX = null;
public RenderTargetIdentifier densityRTID = -1; // RenderTargetIdentifier cannot be NULL
const int k_IndexDensity = 0;
const int k_IndexIntegral = 1;
const int k_IndexHistory = 2; // Depends on frame ID
const int k_IndexFeedback = 3; // Depends on frame ID
long m_ViewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
public RenderTargetIdentifier GetDensityBuffer()
{
Debug.Assert(m_ViewID >= 0);
return m_Identifiers[k_IndexDensity];
}
Debug.Assert(viewID >= 0);
return lightingRTID[0];
Debug.Assert(m_ViewID >= 0);
return m_Identifiers[k_IndexIntegral];
Debug.Assert(viewID > 0); // Game View only
return lightingRTID[1 + ((Time.renderedFrameCount + 0) & 1)];
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexHistory + (Time.renderedFrameCount & 1)];
Debug.Assert(viewID > 0); // Game View only
return lightingRTID[1 + ((Time.renderedFrameCount + 1) & 1)];
}
public RenderTargetIdentifier GetDensityBuffer()
{
Debug.Assert(viewID >= 0);
return densityRTID;
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexFeedback - (Time.renderedFrameCount & 1)];
}
public void Create(long viewID, int w, int h, int d)

// Clean up first.
Destroy();
// The required number of buffers depends on the view type.
// Only Game Views need history and feedback buffers.
int n = isGameView ? 3 : 1;
int n = isGameView ? 4 : 2;
this.viewID = viewID;
this.lightingRTEX = new RenderTexture[n];
this.lightingRTID = new RenderTargetIdentifier[n];
m_ViewID = viewID;
m_Textures = new RenderTexture[n];
m_Identifiers = new RenderTargetIdentifier[n];
this.lightingRTEX[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
this.lightingRTEX[i].hideFlags = HideFlags.HideAndDontSave;
this.lightingRTEX[i].filterMode = FilterMode.Trilinear; // Custom
this.lightingRTEX[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout
this.lightingRTEX[i].volumeDepth = d;
this.lightingRTEX[i].enableRandomWrite = true;
this.lightingRTEX[i].name = CoreUtils.GetRenderTargetAutoName(w, h, RenderTextureFormat.ARGBHalf, String.Format("Volumetric{0}", i));
this.lightingRTEX[i].Create();
this.lightingRTID[i] = new RenderTargetIdentifier(this.lightingRTEX[i]);
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i].hideFlags = HideFlags.HideAndDontSave;
m_Textures[i].filterMode = FilterMode.Trilinear; // Custom
m_Textures[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout
m_Textures[i].volumeDepth = d;
m_Textures[i].enableRandomWrite = true;
m_Textures[i].name = CoreUtils.GetRenderTargetAutoName(w, h, RenderTextureFormat.ARGBHalf, String.Format("VBuffer{0}", i));
m_Textures[i].Create();
// TODO: clear the texture. Clearing 3D textures does not appear to work right now.
m_Identifiers[i] = new RenderTargetIdentifier(m_Textures[i]);
if (this.lightingRTEX != null)
if (m_Textures != null)
for (int i = 0, n = this.lightingRTEX.Length; i < n; i++)
for (int i = 0, n = m_Textures.Length; i < n; i++)
if (this.lightingRTEX[i] != null)
if (m_Textures[i] != null)
this.lightingRTEX[i].Release();
m_Textures[i].Release();
this.viewID = -1;
this.lightingRTEX = null;
this.lightingRTID = null;
m_ViewID = -1;
m_Textures = null;
m_Identifiers = null;
}
public void GetResolution(ref int w, ref int h, ref int d)
{
Debug.Assert(m_Textures != null);
Debug.Assert(m_Textures[0] != null);
Debug.Assert(m_Identifiers != null);
w = m_Textures[0].width;
h = m_Textures[0].height;
d = m_Textures[0].volumeDepth;
}
public long GetViewID()
{
return m_ViewID;
}
public bool IsValid()
{
return m_ViewID >= 0 && m_Textures != null && m_Textures[0] != null;
ComputeShader m_VolumeVoxelizationCS = null;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumes = null;
List<VolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
static ComputeBuffer s_VisibleVolumesBuffer = null;
static ComputeBuffer s_VisibleVolumeBoundsBuffer = null;
const float k_LogScale = 0.5f;
const float k_LogScale = 0.5f; // Tweak constant, controls the logarithmic depth distribution
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VisibleVolumes = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<VolumeProperties>();
s_VisibleVolumesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(VolumeProperties)));
m_VisibleVolumeBounds = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<DensityVolumeProperties>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeProperties)));
}
public void Cleanup()

m_VolumeVoxelizationCS = null;
m_VolumetricLightingCS = null;
for (int i = 0, n = m_VBuffers.Count; i < n; i++)

m_VBuffers = null;
m_VisibleVolumes = null;
m_VisibleVolumeBounds = null;
CoreUtils.SafeRelease(s_VisibleVolumesBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeBoundsBuffer);
CoreUtils.SafeRelease(s_VisibleVolumePropertiesBuffer);
}

if (vBuffer != null)
{
Debug.Assert(vBuffer.lightingRTEX != null);
Debug.Assert(vBuffer.lightingRTEX[0] != null);
Debug.Assert(vBuffer.lightingRTID != null);
int width = 0, height = 0, depth = 0;
vBuffer.GetResolution(ref width, ref height, ref depth);
if (w == vBuffer.lightingRTEX[0].width &&
h == vBuffer.lightingRTEX[0].height &&
d == vBuffer.lightingRTEX[0].volumeDepth)
if (w == width && h == height && d == depth)
{
// Everything matches, nothing to do here.
return;

for (int i = 0; i < n; i++)
{
// Check whether domain reload killed it...
if (viewID == m_VBuffers[i].viewID && m_VBuffers[i].lightingRTEX != null && m_VBuffers[i].lightingRTEX[0] != null)
if (viewID == m_VBuffers[i].GetViewID() && m_VBuffers[i].IsValid())
{
vBuffer = m_VBuffers[i];
}

HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
// TODO: may want to cache these results somewhere.
VolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.volumeParameters.GetProperties()
: VolumeProperties.GetNeutralVolumeProperties();
DensityVolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.parameters.GetProperties()
: DensityVolumeProperties.GetNeutralProperties();
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
SetPreconvolvedAmbientLightProbe(cmd, asymmetry);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));

}
public void VoxelizeDensityVolumes(HDCamera camera, CommandBuffer cmd)
public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera camera, CommandBuffer cmd)
if (preset == VolumetricLightingPreset.Off) return;
DensityVolumeList densityVolumes = new DensityVolumeList();
Vector3 camPosition = camera.camera.transform.position;
Vector3 camOffset = Vector3.zero; // World-origin-relative
if (preset == VolumetricLightingPreset.Off) return densityVolumes;
if (ShaderConfig.s_CameraRelativeRendering != 0)
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
camOffset = -camPosition; // Camera-relative
}
Vector3 camPosition = camera.camera.transform.position;
Vector3 camOffset = Vector3.zero; // World-origin-relative
m_VisibleVolumes.Clear();
m_VisibleVolumeProperties.Clear();
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
camOffset = camPosition; // Camera-relative
}
// Collect all the visible volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
m_VisibleVolumeBounds.Clear();
m_VisibleVolumeProperties.Clear();
foreach (HomogeneousDensityVolume volume in volumes)
{
// Only test active finite volumes.
if (volume.enabled && volume.volumeParameters.IsLocalVolume())
// Collect all visible finite volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
for (int i = 0; i < Math.Min(volumes.Length, k_MaxVisibleVolumeCount); i++)
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
HomogeneousDensityVolume volume = volumes[i];
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camOffset, camera.frustum, 6, 8))
// Only test active finite volumes.
if (volume.enabled && volume.parameters.IsLocalVolume())
var properties = volume.volumeParameters.GetProperties();
var obb = OrientedBBox.Create(volume.transform);
m_VisibleVolumes.Add(obb);
m_VisibleVolumeProperties.Add(properties);
// Handle camera-relative rendering.
obb.center -= camOffset;
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var properties = volume.parameters.GetProperties();
m_VisibleVolumeBounds.Add(obb);
m_VisibleVolumeProperties.Add(properties);
}
s_VisibleVolumeBoundsBuffer.SetData(m_VisibleVolumeBounds);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
// Fill the struct with pointers in order to share the data with the light loop.
densityVolumes.bounds = m_VisibleVolumeBounds;
densityVolumes.properties = m_VisibleVolumeProperties;
return densityVolumes;
}
s_VisibleVolumesBuffer.SetData(m_VisibleVolumes);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings)
{
if (preset == VolumetricLightingPreset.Off) return;
using (new ProfilingSample(cmd, "Volume Voxelization"))
{
int numVisibleVolumes = m_VisibleVolumeBounds.Count;
if (numVisibleVolumes == 0)
{
// Clear the render target instead of running the shader.
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// return;
// Clearing 3D textures does not seem to work!
// Use the workaround by running the full shader with 0 density.
}
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
int kernel = m_VolumeVoxelizationCS.FindKernel(enableClustered ? "VolumeVoxelizationClustered"
: "VolumeVoxelizationBruteforce");
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
camera.SetupComputeShader( m_VolumeVoxelizationCS, cmd);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeProperties, s_VisibleVolumePropertiesBuffer);
// TODO: set the constant buffer data only once.
cmd.SetComputeMatrixParam( m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
cmd.SetComputeIntParam( m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
}
}
// Ref: https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres

return coords;
}
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings frameSettings)
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings)
{
if (preset == VolumetricLightingPreset.Off) return;

Debug.Assert(vBuffer != null);
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
// CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingIntegralBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingFeedbackBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// Use the workaround by running the full shader with no volume.
// Use the workaround by running the full shader with 0 density.
bool enableClustered = frameSettings.lightLoopSettings.enableTileAndCluster;
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;
int kernel;

// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
: "VolumetricLightingAllLightsReproj");
: "VolumetricLightingBruteforceReproj");
: "VolumetricLightingAllLights");
: "VolumetricLightingBruteforce");
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
vBuffer.GetResolution(ref w, ref h, ref d);
// Compose the matrix which allows us to compute the world space view direction.
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;

Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
// TODO: set 'm_VolumetricLightingPreset'.
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(asymmetry));
cmd.SetComputeVectorParam( m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
// TODO: set the constant buffer data only once.
cmd.SetComputeVectorParam( m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(asymmetry));
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer()); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
if (enableReprojection)
{

// The shader defines GROUP_SIZE_1D = 16.
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1);
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);
}
}
} // class VolumetricLightingModule

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl


#ifndef VOLUMETRICLIGHTING_CS_HLSL
#define VOLUMETRICLIGHTING_CS_HLSL
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.DensityVolumeProperties
struct VolumeProperties
struct DensityVolumeProperties
{
float3 scattering;
float extinction;

// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.DensityVolumeProperties
float3 GetScattering(VolumeProperties value)
float3 GetScattering(DensityVolumeProperties value)
float GetExtinction(VolumeProperties value)
float GetExtinction(DensityVolumeProperties value)
{
return value.extinction;
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs


if (frameSettings.enableDBuffer)
{
cmd.SetGlobalInt(HDShaderIDs._EnableDBuffer, vsibleDecalCount > 0 ? 1 : 0);
cmd.SetGlobalVector(HDShaderIDs._DecalAtlasResolution, new Vector2(DecalSystem.kDecalAtlasSize, DecalSystem.kDecalAtlasSize));
BindBufferAsTextures(cmd);
}
else

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.hlsl


CBUFFER_START(UnityDecalParameters)
uint _EnableDBuffer;
float2 _DecalAtlasResolution;
CBUFFER_END
UNITY_INSTANCING_BUFFER_START(Decal)

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalData.hlsl


#if _COLORMAP
surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, texCoordDS.xy);
surfaceData.baseColor.w *= totalBlend;
totalBlend = surfaceData.baseColor.w; // base alpha affects aall other channels;
surfaceData.HTileMask |= DBUFFERHTILEBIT_DIFFUSE;
#endif
#if _NORMALMAP

48
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalUtilities.hlsl


// Caution: We can't compute LOD inside a dynamic loop. The gradient are not accessible.
// we need to find a way to calculate mips. For now just fetch first mip of the decals
void ApplyBlendNormal(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, float3x3 decalToWorld, float blend)
void ApplyBlendNormal(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, float3x3 decalToWorld, float blend, float lod)
src.xyz = mul(decalToWorld, UnpackNormalmapRGorAG(SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, 0 /* ComputeTextureLOD(texCoords) */))) * 0.5f + 0.5f;
src.xyz = mul(decalToWorld, UnpackNormalmapRGorAG(SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, lod))) * 0.5f + 0.5f;
src.w = blend;
dst.xyz = src.xyz * src.w + dst.xyz * (1.0f - src.w);
dst.w = dst.w * (1.0f - src.w);

void ApplyBlendDiffuse(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, float blend)
void ApplyBlendDiffuse(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, inout float blend, float lod)
float4 src = SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, 0 /* ComputeTextureLOD(texCoords) */);
float4 src = SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, lod);
blend = src.w; // diffuse texture alpha affects all other channels
void ApplyBlendMask(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, float blend)
void ApplyBlendMask(inout float4 dst, inout int matMask, float2 texCoords, int sliceIndex, int mapMask, float blend, float lod)
float4 src = SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, 0 /* ComputeTextureLOD(texCoords) */);
float4 src = SAMPLE_TEXTURE2D_ARRAY_LOD(_DecalAtlas, sampler_DecalAtlas, texCoords, sliceIndex, lod);
src.z = src.w;
src.w = blend;
dst.xyz = src.xyz * src.w + dst.xyz * (1.0f - src.w);

void AddDecalContribution(PositionInputs posInput, inout SurfaceData surfaceData)
void AddDecalContribution(PositionInputs posInput, inout SurfaceData surfaceData, inout float alpha)
{
if(_EnableDBuffer)
{

#endif
float3 positionWS = GetAbsolutePositionWS(posInput.positionWS);
uint i = 0;
UNITY_LOOP
for (i = 0; i < decalCount; i++)
{
DecalData decalData = FetchDecal(decalStart, i);

int diffuseIndex = decalData.normalToWorld[1][3];
int normalIndex = decalData.normalToWorld[2][3];
int maskIndex = decalData.normalToWorld[3][3];
if((all(positionDS.xyz > 0.0f) && all(1.0f - positionDS.xyz > 0.0f))) // clip to decal space
float lod = ComputeTextureLOD(positionDS.xz, _DecalAtlasResolution);
decalBlend = ((all(positionDS.xyz > 0.0f) && all(1.0f - positionDS.xyz > 0.0f))) ? decalBlend : 0; // use blend of 0 instead of an 'if' because compiler moves the lod calculation inside the 'if' which causes incorrect values
// if any of the pixels in the 2x2 quad gets rejected
// Verified that lod calculation works with a test texture, looking at the shader code in Razor the lod calculation is outside the dynamic branches where the texture fetch happens,
// however compiler was placing it inside the branch that was rejecting the pixel, which was causing incorrect lod to be calculated for any 2x2 quad where any of the pixels were rejected,
// so had to use alpha blend of 0 instead of branching to solve that issue."
if(diffuseIndex != -1)
if(diffuseIndex != -1)
{
ApplyBlendDiffuse(DBuffer0, mask, positionDS.xz, diffuseIndex, DBUFFERHTILEBIT_DIFFUSE, decalBlend);
}
ApplyBlendDiffuse(DBuffer0, mask, positionDS.xz, diffuseIndex, DBUFFERHTILEBIT_DIFFUSE, decalBlend, lod);
alpha = alpha < decalBlend ? decalBlend : alpha; // use decal alpha if it higher than transparent alpha
}
if(normalIndex != -1)
{
ApplyBlendNormal(DBuffer1, mask, positionDS.xz, normalIndex, DBUFFERHTILEBIT_NORMAL, (float3x3)decalData.normalToWorld, decalBlend);
}
if(normalIndex != -1)
{
ApplyBlendNormal(DBuffer1, mask, positionDS.xz, normalIndex, DBUFFERHTILEBIT_NORMAL, (float3x3)decalData.normalToWorld, decalBlend, lod);
}
if(maskIndex != -1)
{
ApplyBlendMask(DBuffer2, mask, positionDS.xz, maskIndex, DBUFFERHTILEBIT_MASK, decalBlend);
}
if(maskIndex != -1)
{
ApplyBlendMask(DBuffer2, mask, positionDS.xz, maskIndex, DBUFFERHTILEBIT_MASK, decalBlend, lod);
}
}
#else

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs


ValidateArray(ref halfRcpVariancesAndWeights, DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT * 2);
ValidateArray(ref filterKernelsBasic, DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT * DiffusionProfileConstants.SSS_BASIC_N_SAMPLES);
Debug.Assert(DiffusionProfileConstants.DIFFUSION_PROFILE_NEUTRAL_ID <= 32, "Transmission and Texture flags (32-bit integer) cannot support more than 32 profiles.");
Debug.Assert(DiffusionProfileConstants.DIFFUSION_PROFILE_COUNT <= 32, "Transmission and Texture flags (32-bit integer) cannot support more than 32 profiles.");
UpdateCache();
}

worldScales[neutralId] = Vector4.one;
shapeParams[neutralId] = Vector4.zero;
transmissionTintsAndFresnel0[neutralId].w = 0.04f; // Match DEFAULT_SPECULAR_VALUE defined in Lit.hlsl
for (int j = 0, n = DiffusionProfileConstants.SSS_N_SAMPLES_NEAR_FIELD; j < n; j++)
{

79
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs


using UnityEngine.Rendering;
using System;
using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

RenderPipelineResources m_RenderPipelineResources;
BufferPyramidProcessor m_BufferPyramidProcessor;
List<RenderTexture> m_PlanarColorMips = new List<RenderTexture>();
public IBLFilterGGX(RenderPipelineResources renderPipelineResources)
public IBLFilterGGX(RenderPipelineResources renderPipelineResources, BufferPyramidProcessor processor)
m_BufferPyramidProcessor = processor;
}
public bool IsInitialized()

{
CoreUtils.Destroy(m_GgxConvolveMaterial);
CoreUtils.Destroy(m_GgxIblSampleData);
for (var i = 0; i < m_PlanarColorMips.Count; ++i)
m_PlanarColorMips[i].Release();
m_PlanarColorMips.Clear();
}
void FilterCubemapCommon( CommandBuffer cmd,

public void FilterPlanarTexture(CommandBuffer cmd, Texture source, RenderTexture target)
{
// TODO: planar convolution
cmd.CopyTexture(source, 0, 0, target, 0, 0);
var lodCount = Mathf.Max(Mathf.FloorToInt(Mathf.Log(Mathf.Min(source.width, source.height), 2f)), 0);
for (var i = 0 ; i < lodCount - 0; ++i)
{
var width = target.width >> (i + 1);
var height = target.height >> (i + 1);
var rtHash = HashRenderTextureProperties(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
var lodIsMissing = i >= m_PlanarColorMips.Count;
RenderTexture rt = null;
var createRT = lodIsMissing
|| (rt = m_PlanarColorMips[i]) == null
|| rtHash != HashRenderTextureProperties(
rt.width, rt.height, rt.depth, rt.format, rt.sRGB
? RenderTextureReadWrite.sRGB
: RenderTextureReadWrite.Linear
);
if (createRT && rt)
rt.Release();
if (createRT)
{
rt = new RenderTexture(
width,
height,
target.depth,
target.format,
target.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear
);
rt.enableRandomWrite = true;
rt.name = "Planar Convolution Tmp RT";
rt.hideFlags = HideFlags.HideAndDontSave;
rt.Create();
}
if (lodIsMissing)
m_PlanarColorMips.Add(rt);
else if (createRT)
m_PlanarColorMips[i] = rt;
}
m_BufferPyramidProcessor.RenderColorPyramid(
new RectInt(0, 0, source.width, source.height),
cmd,
source,
target,
m_PlanarColorMips,
lodCount
);
}
// Filters MIP map levels (other than 0) with GGX using multiple importance sampling.

m_GgxConvolveMaterial.SetTexture("_MarginalRowDensities", marginalRowCdf);
FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
}
int HashRenderTextureProperties(
int width,
int height,
int depth,
RenderTextureFormat format,
RenderTextureReadWrite sRGB)
{
return width.GetHashCode()
^ height.GetHashCode()
^ depth.GetHashCode()
^ format.GetHashCode()
^ sRGB.GetHashCode();
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLit.shader


// Include
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Wind.hlsl"
#include "../../ShaderPass/FragInputs.hlsl"
#include "../../ShaderPass/ShaderPass.cs.hlsl"

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitData.hlsl


#endif
#if defined(_SUBSURFACE_MASK_MAP0)
#define _SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap0
#define SAMPLER_SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap0
#define _SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap1
#define SAMPLER_SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap1
#define _SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap2
#define SAMPLER_SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap2
#define _SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap3
#define SAMPLER_SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap3
#endif
#if defined(_THICKNESSMAP0)

void ComputeMaskWeights(float4 inputMasks, out float outWeights[_MAX_LAYER])
{
ZERO_INITIALIZE_ARRAY(float, outWeights, _MAX_LAYER);
float masks[_MAX_LAYER];
masks[0] = inputMasks.a;

// This function handle triplanar
void ComputeLayerWeights(FragInputs input, LayerTexCoord layerTexCoord, float4 inputAlphaMask, float4 blendMasks, out float outWeights[_MAX_LAYER])
{
for (int i = 0; i < _MAX_LAYER; ++i)
{
outWeights[i] = 0.0f;
}
#if defined(_DENSITY_MODE)
// Note: blendMasks.argb because a is main layer
float4 opacityAsDensity = saturate((inputAlphaMask - (float4(1.0, 1.0, 1.0, 1.0) - blendMasks.argb)) * 20.0); // 20.0 is the number of steps in inputAlphaMask (Density mask. We decided 20 empirically)

#endif
#ifndef _DISABLE_DBUFFER
AddDecalContribution(posInput, surfaceData);
AddDecalContribution(posInput, surfaceData, alpha);
#endif
#if defined(DEBUG_DISPLAY)

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitTessellation.shader


// Include
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/tessellation.hlsl"
#include "CoreRP/ShaderLibrary/Tessellation.hlsl"
#include "../../ShaderPass/FragInputs.hlsl"
#include "../../ShaderPass/ShaderPass.cs.hlsl"

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs


LitClearCoat = 1 << 6
};
[GenerateHLSL(PackingRules.Exact)]
public enum RefractionMode
public enum RefractionModel
[GenerateHLSL]
public enum RefractionSSRayModel
{
None = 0,
Proxy = 1,
HiZ = 2
};
[GenerateHLSL]
public enum HiZIntersectionKind
{
None,
Cell,
Depth
}
//-----------------------------------------------------------------------------
// SurfaceData

15
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs.hlsl


#define MATERIALFEATUREFLAGS_LIT_CLEAR_COAT (64)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+RefractionMode: static fields
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+RefractionSSRayModel: static fields
//
#define REFRACTIONSSRAYMODEL_NONE (0)
#define REFRACTIONSSRAYMODEL_PROXY (1)
#define REFRACTIONSSRAYMODEL_HI_Z (2)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+HiZIntersectionKind: static fields
#define REFRACTIONMODE_NONE (0)
#define REFRACTIONMODE_PLANE (1)
#define REFRACTIONMODE_SPHERE (2)
#define HIZINTERSECTIONKIND_NONE (0)
#define HIZINTERSECTIONKIND_CELL (1)
#define HIZINTERSECTIONKIND_DEPTH (2)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+SurfaceData: static fields

177
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


// Rough refraction texture
// Color pyramid (width, height, lodcount, Unused)
TEXTURE2D(_GaussianPyramidColorTexture);
TEXTURE2D(_ColorPyramidTexture);
TEXTURE2D(_PyramidDepthTexture);
TEXTURE2D(_DepthPyramidTexture);
CBUFFER_START(UnityLightingParameters)
// Buffer pyramid
float4 _ColorPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _DepthPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _ColorPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
float4 _DepthPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
// Screen space lighting
float _SSRefractionInvScreenWeightDistance; // Distance for screen space smoothstep with fallback
CBUFFER_START(UnityGaussianPyramidParameters)
float4 _GaussianPyramidColorMipSize; // (x,y) = PyramidToScreenScale, z = lodCount
float4 _PyramidDepthMipSize;
// Ambiant occlusion
float4 _AmbientOcclusionParam; // xyz occlusion color, w directLightStrenght
CBUFFER_START(UnityAmbientOcclusionParameters)
float4 _AmbientOcclusionParam; // xyz occlusion color, w directLightStrenght
CBUFFER_END
// Area light textures
// TODO: This one should be set into a constant Buffer at pass frequency (with _Screensize)

#define GBufferType2 float4
#define GBufferType3 float4
#define HAS_REFRACTION (defined(_REFRACTION_PLANE) || defined(_REFRACTION_SPHERE))
#define HAS_REFRACTION (defined(_REFRACTION_PLANE) || defined(_REFRACTION_SPHERE)) && (defined(_REFRACTION_SSRAY_PROXY) || defined(_REFRACTION_SSRAY_HIZ))
#define DEFAULT_SPECULAR_VALUE 0.04

#if HAS_REFRACTION
# include "CoreRP/ShaderLibrary/Refraction.hlsl"
# include "HDRP/Lighting/Reflection/VolumeProjection.hlsl"
# include "HDRP/Lighting/LightDefinition.cs.hlsl"
# define SSRTID Refraction
# include "HDRP/Lighting/Reflection/ScreenSpaceTracing.hlsl"
# undef SSRTID
# if defined(_REFRACTION_PLANE)
# define REFRACTION_MODEL(V, posInputs, bsdfData) RefractionModelPlane(V, posInputs.positionWS, bsdfData.normalWS, bsdfData.ior, bsdfData.thickness)

#endif
float3 EstimateRaycast(float3 V, PositionInputs posInputs, float3 positionWS, float3 rayWS)
{
// For all refraction approximation, to calculate the refracted point in world space,
// we approximate the scene as a plane (back plane) with normal -V at the depth hit point.
// (We avoid to raymarch the depth texture to get the refracted point.)
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
// Get the depth of the approximated back plane
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionNDC * (depthSize >> 2), 2).r;
float depth = LinearEyeDepth(pyramidDepth, _DepthBufferParam);
// Distance from point to the back plane
float depthFromPositionInput = depth - posInputs.linearDepth;
float offset = dot(-V, positionWS - posInputs.positionWS);
float depthFromPosition = depthFromPositionInput - offset;
float hitDistanceFromPosition = depthFromPosition / dot(-V, rayWS);
return positionWS + rayWS * hitDistanceFromPosition;
}
# if defined(_REFRACTION_SSRAY_PROXY)
# define REFRACTION_SSRAY_IN ScreenSpaceProxyRaycastInput
# define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceProxyRaycastRefraction(input, hit)
# elif defined(_REFRACTION_SSRAY_HIZ)
# define REFRACTION_SSRAY_IN ScreenSpaceHiZRaymarchInput
# define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceHiZRaymarchRefraction(input, hit)
# endif
#endif
// This method allows us to know at compile time what material features should be removed from the code by Tile (Indepenently of the value of material feature flag per pixel).
// This is only useful for classification during lighting, so it's not needed in EncodeIntoGBuffer and ConvertSurfaceDataToBSDFData (where we always know exactly what the material feature is)

preLightData.transparentTransmittance = exp(-bsdfData.absorptionCoefficient * refraction.dist);
// Empirical remap to try to match a bit the refraction probe blurring for the fallback
// Use IblPerceptualRoughness so we can handle approx of clear coat.
preLightData.transparentSSMipLevel = sqrt(preLightData.iblPerceptualRoughness) * uint(_GaussianPyramidColorMipSize.z);
preLightData.transparentSSMipLevel = sqrt(preLightData.iblPerceptualRoughness) * uint(_ColorPyramidScale.z);
#endif
return preLightData;

IndirectLighting EvaluateBSDF_SSLighting(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, BSDFData bsdfData,
EnvLightData envLightData,
int GPUImageBasedLightingType,
inout float hierarchyWeight)
{

case GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION:
{
#if HAS_REFRACTION
// Refraction process:
// 1. Depending on the shape model, we calculate the refracted point in world space and the optical depth
// 2. We calculate the screen space position of the refracted point

float3 refractedBackPointWS = EstimateRaycast(V, posInput, preLightData.transparentPositionWS, preLightData.transparentRefractV);
float3 rayOriginWS = preLightData.transparentPositionWS;
float3 rayDirWS = preLightData.transparentRefractV;
#if DEBUG_DISPLAY
int debugMode = DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION;
bool debug = _DebugLightingMode == debugMode
&& !any(int2(_MouseClickPixelCoord.xy) - int2(posInput.positionSS));
#endif
// Initialize screen space tracing
REFRACTION_SSRAY_IN ssRayInput;
ZERO_INITIALIZE(REFRACTION_SSRAY_IN, ssRayInput);
// Common initialization
ssRayInput.rayOriginWS = rayOriginWS;
ssRayInput.rayDirWS = rayDirWS;
#if DEBUG_DISPLAY
ssRayInput.debug = debug;
#endif
// Algorithm specific initialization
#ifdef _REFRACTION_SSRAY_HIZ
ssRayInput.maxIterations = uint(-1);
#elif _REFRACTION_SSRAY_PROXY
ssRayInput.proxyData = envLightData;
#endif
// Calculate screen space coordinates of refracted point in back plane
float2 refractedBackPointNDC = ComputeNormalizedDeviceCoordinates(refractedBackPointWS, UNITY_MATRIX_VP);
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointNDC * depthSize, 0).r, _DepthBufferParam);
// Perform ray query
ScreenSpaceRayHit hit;
ZERO_INITIALIZE(ScreenSpaceRayHit, hit);
bool hitSuccessful = REFRACTION_SSRAY_QUERY(ssRayInput, hit);
// Debug screen space tracing
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == debugMode
&& _DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
{
float weight = 1.0;
UpdateLightingHierarchyWeights(hierarchyWeight, weight);
lighting.specularTransmitted = hit.debugOutput;
return lighting;
}
#endif
if (!hitSuccessful)
return lighting;
float2 weightNDC = clamp(min(hit.positionNDC, 1 - hit.positionNDC) * _SSRefractionInvScreenWeightDistance, 0, 1);
weightNDC = weightNDC * weightNDC * (3 - 2 * weightNDC);
float weight = weightNDC.x * weightNDC.y;
float hitDeviceDepth = LOAD_TEXTURE2D_LOD(_DepthPyramidTexture, hit.positionSS, 0).r;
float hitLinearDepth = LinearEyeDepth(hitDeviceDepth, _ZBufferParams);
if (refractedBackPointDepth < posInput.linearDepth
|| any(refractedBackPointNDC < 0.0)
|| any(refractedBackPointNDC > 1.0))
if (hitLinearDepth < posInput.linearDepth
|| weight == 0)
// Map the roughness to the correct mip map level of the color pyramid
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _GaussianPyramidColorMipSize.xy, preLightData.transparentSSMipLevel).rgb;
UpdateLightingHierarchyWeights(hierarchyWeight, weight); // Shouldn't be needed, but safer in case we decide to change hierarchy priority
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transparentTransmittance;
float3 preLD = SAMPLE_TEXTURE2D_LOD(
_ColorPyramidTexture,
s_trilinear_clamp_sampler,
hit.positionNDC * _ColorPyramidScale.xy,
preLightData.transparentSSMipLevel
).rgb;
float weight = 1.0;
UpdateLightingHierarchyWeights(hierarchyWeight, weight); // Shouldn't be needed, but safer in case we decide to change hierarchy priority
lighting.specularTransmitted *= (1.0 - preLightData.specularFGD) * weight;
float3 F = preLightData.specularFGD;
lighting.specularTransmitted = (1.0 - F) * preLD.rgb * preLightData.transparentTransmittance * weight;
#else
// No refraction, no need to go further
hierarchyWeight = 1.0;

#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
switch(_DebugLightingMode)
case DEBUGLIGHTINGMODE_LUX_METER:
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO)
{
break;
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO:
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO)
{
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO:
}
break;
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO)
{
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO:
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO)
{
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO:
}
break;
else if (_DebugMipMapMode != DEBUGMIPMAPMODE_NONE)
case DEBUGMIPMAPMODE_NONE:
diffuseLighting = bsdfData.diffuseColor;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:
if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
diffuseLighting = bsdfData.diffuseColor;
diffuseLighting = lighting.indirect.specularTransmitted;
}
break;
}
#endif
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.shader


_TransparentSortPriority("_TransparentSortPriority", Float) = 0
// Transparency
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionMode("Refraction Mode", Int) = 0
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionModel("Refraction Model", Int) = 0
[Enum(Proxy, 1, HiZ, 2)]_RefractionSSRayModel("Refraction SSRay Model", Int) = 0
_Ior("Index Of Refraction", Range(1.0, 2.5)) = 1.0
_ThicknessMultiplier("Thickness Multiplier", Float) = 1.0
_TransmittanceColor("Transmittance Color", Color) = (1.0, 1.0, 1.0)

#pragma shader_feature _PIXEL_DISPLACEMENT_LOCK_OBJECT_SCALE
#pragma shader_feature _VERTEX_WIND
#pragma shader_feature _ _REFRACTION_PLANE _REFRACTION_SPHERE
#pragma shader_feature _ _REFRACTION_SSRAY_PROXY _REFRACTION_SSRAY_HIZ
#pragma shader_feature _ _EMISSIVE_MAPPING_PLANAR _EMISSIVE_MAPPING_TRIPLANAR
#pragma shader_feature _ _MAPPING_PLANAR _MAPPING_TRIPLANAR

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitData.hlsl


#define SAMPLER_MASKMAP_IDX sampler_MaskMap
#define SAMPLER_HEIGHTMAP_IDX sampler_HeightMap
#define SAMPLER_SUBSURFACE_MASKMAP_IDX sampler_SubsurfaceMaskMap
#define SAMPLER_SUBSURFACE_MASK_MAP_IDX sampler_SubsurfaceMaskMap
#define SAMPLER_THICKNESSMAP_IDX sampler_ThicknessMap
// include LitDataIndividualLayer to define GetSurfaceData

surfaceData.tangentWS = Orthonormalize(surfaceData.tangentWS, surfaceData.normalWS);
#ifndef _DISABLE_DBUFFER
AddDecalContribution(posInput, surfaceData);
AddDecalContribution(posInput, surfaceData, alpha);
#endif
#if defined(DEBUG_DISPLAY)

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitDataIndividualLayer.hlsl


surfaceData.subsurfaceMask = ADD_IDX(_SubsurfaceMask);
#ifdef _SUBSURFACE_MASK_MAP_IDX
surfaceData.subsurfaceMask *= SAMPLE_UVMAPPING_TEXTURE2D(ADD_IDX(_SubsurfaceMaskMap), SAMPLER_SUBSURFACE_MASKMAP_IDX, ADD_IDX(layerTexCoord.base)).r;
surfaceData.subsurfaceMask *= SAMPLE_UVMAPPING_TEXTURE2D(ADD_IDX(_SubsurfaceMaskMap), SAMPLER_SUBSURFACE_MASK_MAP_IDX, ADD_IDX(layerTexCoord.base)).r;
#endif
#ifdef _THICKNESSMAP_IDX

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitTessellation.shader


_TransparentSortPriority("_TransparentSortPriority", Float) = 0
// Transparency
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionMode("Refraction Mode", Int) = 0
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionModel("Refraction Model", Int) = 0
[Enum(Proxy, 1, HiZ, 2)]_RefractionSSRayModel("Refraction SSRay Model", Int) = 0
_Ior("Index Of Refraction", Range(1.0, 2.5)) = 1.0
_ThicknessMultiplier("Thickness Multiplier", Float) = 1.0
_TransmittanceColor("Transmittance Color", Color) = (1.0, 1.0, 1.0)

#pragma shader_feature _VERTEX_WIND
#pragma shader_feature _ _TESSELLATION_PHONG
#pragma shader_feature _ _REFRACTION_PLANE _REFRACTION_SPHERE
#pragma shader_feature _ _REFRACTION_SSRAY_PROXY _REFRACTION_SSRAY_HIZ
#pragma shader_feature _ _EMISSIVE_MAPPING_PLANAR _EMISSIVE_MAPPING_TRIPLANAR
#pragma shader_feature _ _MAPPING_PLANAR _MAPPING_TRIPLANAR

// Include
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/tessellation.hlsl"
#include "CoreRP/ShaderLibrary/Tessellation.hlsl"
#include "../../ShaderPass/FragInputs.hlsl"
#include "../../ShaderPass/ShaderPass.cs.hlsl"

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/ShaderPass/LitDepthPass.hlsl


// Attributes
#define REQUIRE_TANGENT_TO_WORLD defined(_PIXEL_DISPLACEMENT)
#define REQUIRE_NORMAL defined(TESSELLATION_ON) || REQUIRE_TANGENT_TO_WORLD || defined(_VERTEX_WIND) || defined(_VERTEX_DISPLACEMENT)
#define REQUIRE_VERTEX_COLOR ((defined(_VERTEX_DISPLACEMENT) || defined(_TESSELLATION_DISPLACEMENT)) && defined(LAYERED_LIT_SHADER) && (defined(_LAYER_MASK_VERTEX_COLOR_MUL) || defined(_LAYER_MASK_VERTEX_COLOR_ADD))) || defined(_VERTEX_WIND)
#define REQUIRE_VERTEX_COLOR (defined(_VERTEX_DISPLACEMENT) || defined(_TESSELLATION_DISPLACEMENT) || (defined(LAYERED_LIT_SHADER) && (defined(_LAYER_MASK_VERTEX_COLOR_MUL) || defined(_LAYER_MASK_VERTEX_COLOR_ADD))) || defined(_VERTEX_WIND))
// This first set of define allow to say which attributes will be use by the mesh in the vertex and domain shader (for tesselation)

#define VARYINGS_NEED_TEXCOORD3
#endif
#endif
#endif
#if REQUIRE_VERTEX_COLOR
#define VARYINGS_NEED_COLOR
#endif
// This include will define the various Attributes/Varyings structure

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/ShaderPass/LitVelocityPass.hlsl


// Attributes
#define REQUIRE_TANGENT_TO_WORLD defined(_PIXEL_DISPLACEMENT)
#define REQUIRE_NORMAL defined(TESSELLATION_ON) || REQUIRE_TANGENT_TO_WORLD || defined(_VERTEX_WIND) || defined(_VERTEX_DISPLACEMENT)
#define REQUIRE_VERTEX_COLOR ((defined(_VERTEX_DISPLACEMENT) || defined(_TESSELLATION_DISPLACEMENT)) && defined(LAYERED_LIT_SHADER) && (defined(_LAYER_MASK_VERTEX_COLOR_MUL) || defined(_LAYER_MASK_VERTEX_COLOR_ADD))) || defined(_VERTEX_WIND)
#define REQUIRE_VERTEX_COLOR (defined(_VERTEX_DISPLACEMENT) || defined(_TESSELLATION_DISPLACEMENT) || (defined(LAYERED_LIT_SHADER) && (defined(_LAYER_MASK_VERTEX_COLOR_MUL) || defined(_LAYER_MASK_VERTEX_COLOR_ADD))) || defined(_VERTEX_WIND))
// This first set of define allow to say which attributes will be use by the mesh in the vertex and domain shader (for tesselation)

#define VARYINGS_NEED_TEXCOORD3
#endif
#endif
#endif
#if REQUIRE_VERTEX_COLOR
#define VARYINGS_NEED_COLOR
#endif
// This include will define the various Attributes/Varyings structure

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


{
// Currently, Unity does not offer a way to access the GCN HTile even on PS4 and Xbox One.
// Therefore, it's computed in a pixel shader, and optimized to only contain the SSS bit.
// Clear the HTile texture. TODO: move this to ClearBuffers(). Clear operations must be batched!
HDUtils.SetRenderTarget(cmd, hdCamera, depthStencilBufferRT); // No need for color buffer here
HDUtils.SetRenderTarget(cmd, hdCamera, depthStencilBufferRT); // No need for color buffer here
CoreUtils.DrawFullScreen(cmd, m_CopyStencilForSplitLighting, null, 2);
cmd.ClearRandomWriteTargets();
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Unlit/Unlit.shader


// Include
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "../../ShaderVariables.hlsl"
#include "../../ShaderPass/FragInputs.hlsl"
#include "../../ShaderPass/ShaderPass.cs.hlsl"

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/ApplyDistorsion.compute


#pragma only_renderers d3d11 ps4 xboxone vulkan metal
TEXTURE2D(_DistortionTexture);
TEXTURE2D(_GaussianPyramidColorTexture);
TEXTURE2D(_ColorPyramidTexture);
SamplerState sampler_GaussianPyramidColorTexture;
SamplerState sampler_ColorPyramidTexture;
float4 _GaussianPyramidColorMipSize;
float4 _ColorPyramidScale;
CBUFFER_END
#pragma kernel KMain

// Get source pixel for distortion
float2 distordedUV = float2(dispatchThreadId + int2(distortion * _FetchBias)) * _Size.zw;
float mip = (_GaussianPyramidColorMipSize.z - 1) * clamp(distortionBlur, 0.0, 1.0);
float4 sampled = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, sampler_GaussianPyramidColorTexture, distordedUV, mip);
float mip = (_ColorPyramidScale.z - 1) * clamp(distortionBlur, 0.0, 1.0);
float4 sampled = SAMPLE_TEXTURE2D_LOD(_ColorPyramidTexture, sampler_ColorPyramidTexture, distordedUV, mip);
_CameraColorTexture[dispatchThreadId] = sampled;
}

128
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


{
class BufferPyramid
{
static readonly int _Size = Shader.PropertyToID("_Size");
static readonly int _Source = Shader.PropertyToID("_Source");
static readonly int _Result = Shader.PropertyToID("_Result");
static readonly int _SrcSize = Shader.PropertyToID("_SrcSize");
const int k_DepthBlockSize = 4;
GPUCopy m_GPUCopy;
ComputeShader m_ColorPyramidCS;
int m_ColorPyramidKernel;
ComputeShader m_DepthPyramidCS;
int m_DepthPyramidKernel_8;
int m_DepthPyramidKernel_1;
public BufferPyramid(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS, GPUCopy gpuCopy)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
BufferPyramidProcessor m_Processor;
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthPyramidKernel_8 = m_DepthPyramidCS.FindKernel("KMain_8");
m_DepthPyramidKernel_1 = m_DepthPyramidCS.FindKernel("KMain_1");
public BufferPyramid(BufferPyramidProcessor processor)
{
m_Processor = processor;
}
float GetXRscale()

public void CreateBuffers()
{
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false, name: "ColorPymarid");
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "DepthPyramid"); // Need randomReadWrite because we downsample the first mip with a compute shader.
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "ColorPyramid");
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RGFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "DepthPyramid"); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
public void DestroyBuffers()

int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_DepthPyramidBuffer.rt.format, m_DepthPyramidMips, lodCount);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidMipSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, lodCount, 0.0f));
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, depthTexture, m_DepthPyramidBuffer, new Vector2(hdCamera.actualWidth, hdCamera.actualHeight));
RTHandle src = m_DepthPyramidBuffer;
for (var i = 0; i < lodCount; i++)
{
RTHandle dest = m_DepthPyramidMips[i];
var srcMipWidth = hdCamera.actualWidth >> i;
var srcMipHeight = hdCamera.actualHeight >> i;
var dstMipWidth = srcMipWidth >> 1;
var dstMipHeight = srcMipHeight >> 1;
var kernel = m_DepthPyramidKernel_8;
var kernelBlockSize = 8f;
if (dstMipWidth < 4 * k_DepthBlockSize
|| dstMipHeight < 4 * k_DepthBlockSize)
{
kernel = m_DepthPyramidKernel_1;
kernelBlockSize = 1;
}
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(srcMipWidth, srcMipHeight, (1.0f / srcMipWidth) * scale.x, (1.0f / srcMipHeight) * scale.y));
cmd.DispatchCompute(
m_DepthPyramidCS,
kernel,
Mathf.CeilToInt(dstMipWidth / kernelBlockSize),
Mathf.CeilToInt(dstMipHeight / kernelBlockSize),
1);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_DepthPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_DepthPyramidBuffer, 0, i + 1, 0, 0);
src = dest;
}
m_Processor.RenderDepthPyramid(
hdCamera.actualWidth, hdCamera.actualHeight,
cmd,
depthTexture,
m_DepthPyramidBuffer,
m_DepthPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._PyramidDepthTexture, m_DepthPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, m_DepthPyramidBuffer);
}
public void RenderColorPyramid(

UpdatePyramidMips(hdCamera, m_ColorPyramidBuffer.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera);
cmd.SetGlobalVector(HDShaderIDs._GaussianPyramidColorMipSize, new Vector4(scale.x, scale.y, lodCount, 0.0f));
// Copy mip 0
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, colorTexture, m_ColorPyramidBuffer, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
RTHandle src = m_ColorPyramidBuffer;
for (var i = 0; i < lodCount; i++)
{
RTHandle dest = m_ColorPyramidMips[i];
var srcMipWidth = hdCamera.actualWidth >> i;
var srcMipHeight = hdCamera.actualHeight >> i;
var dstMipWidth = srcMipWidth >> 1;
var dstMipHeight = srcMipHeight >> 1;
// TODO: Add proper stereo support to the compute job
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(m_ColorPyramidCS, _Size, new Vector4(dest.rt.width, dest.rt.height, 1f / dest.rt.width, 1f / dest.rt.height));
cmd.DispatchCompute(
m_ColorPyramidCS,
m_ColorPyramidKernel,
Mathf.CeilToInt(dstMipWidth / 8f),
Mathf.CeilToInt(dstMipHeight / 8f),
1);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_ColorPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_ColorPyramidBuffer, 0, i + 1, 0, 0);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));
src = dest;
}
m_Processor.RenderColorPyramid(
hdCamera,
cmd,
colorTexture,
m_ColorPyramidBuffer,
m_ColorPyramidMips,
lodCount,
scale
);
cmd.SetGlobalTexture(HDShaderIDs._GaussianPyramidColorTexture, m_ColorPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, m_ColorPyramidBuffer);
}
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/CopyDepthBuffer.shader


ZTest Always
ZWrite On
Blend Off
ColorMask 0
HLSLPROGRAM
#pragma target 4.5

58
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/DepthPyramid.compute


#include "CoreRP/ShaderLibrary/Common.hlsl"
#pragma kernel KMain_8 KERNEL_SIZE=8 KERNEL_NAME=KMain_8
#pragma kernel KMain_1 KERNEL_SIZE=1 KERNEL_NAME=KMain_1
// ------------------------------------------------
// Algorithm
// ------------------------------------------------
// Downsample a depth texture by taking min value of sampled pixels
// ------------------------------------------------
// Variants
// ------------------------------------------------
#pragma kernel KDepthDownSample8 KERNEL_SIZE=8 KERNEL_NAME=KDepthDownSample8
#pragma kernel KDepthDownSample1 KERNEL_SIZE=1 KERNEL_NAME=KDepthDownSample1
Texture2D<float> _Source;
RWTexture2D<float> _Result;
// ------------------------------------------------
// Texture buffers
// ------------------------------------------------
Texture2D<float2> _Source;
RW_TEXTURE2D(float2, _Result);
// ------------------------------------------------
// Constant buffers
// ------------------------------------------------
int2 _RectOffset; // Offset in source texture
// ------------------------------------------------
// Kernel
// ------------------------------------------------
#if UNITY_REVERSED_Z
# define MIN_DEPTH(l, r) max(l, r)
# define MAX_DEPTH(l, r) min(l, r)
#else
# define MIN_DEPTH(l, r) min(l, r)
# define MAX_DEPTH(l, r) max(l, r)
#endif
int2 threadUL = dispatchThreadId;
uint2 srcPixelUL = _RectOffset + (dispatchThreadId << 1);
// Offset by 0.5 so sampling get the proper pixels
float2 offset = float2(srcPixelUL) + 0.5;
// Downsample the block
float2 offset = float2(threadUL) * 2.0f + 1.0f;
float4 depths = GATHER_RED_TEXTURE2D(_Source, sampler_PointClamp, offset * _SrcSize.zw, 0.0);
float4 depths = GATHER_RED_TEXTURE2D(_Source, sampler_PointClamp, offset * _SrcSize.zw, 0.0).wzxy;
// Downsample the block
float p00 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset) * _SrcSize.zw, 0.0).x;
float p10 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset + float2(1.0, 0.0)) * _SrcSize.zw, 0.0).x;
float p01 = SAMPLE_TEXTURE2D_LOD(_Source, sampler_PointClamp, (offset + float2(0.0, 1.0)) * _SrcSize.zw, 0.0).x;

// Select the nearest sample
#if UNITY_REVERSED_Z
float minDepth = max(max(depths.x, depths.y), max(depths.z, depths.w));
#else
float minDepth = min(min(depths.x, depths.y), min(depths.z, depths.w));
#endif
float minDepth = MIN_DEPTH(MIN_DEPTH(depths.x, depths.y), MIN_DEPTH(depths.z, depths.w));
float maxDepth = MAX_DEPTH(MAX_DEPTH(depths.x, depths.y), MAX_DEPTH(depths.z, depths.w));
_Result[dispatchThreadId] = minDepth;
uint2 dstPixel = (_RectOffset >> 1) + dispatchThreadId;
_Result[dstPixel] = float2(minDepth, maxDepth);
#undef MIN_DEPTH
#undef MAX_DEPTH

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset


colorPyramidCS: {fileID: 7200000, guid: 4e3267a1135742441a14298d8dcac04a, type: 3}
depthPyramidCS: {fileID: 7200000, guid: 64a553bb564274041906f78ffba955e4, type: 3}
copyChannelCS: {fileID: 7200000, guid: a4d45eda75e8e474dbe24a31f741f3b4, type: 3}
texturePaddingCS: {fileID: 7200000, guid: 6736f53014c69f84aa2130aeae99730b, type: 3}
applyDistortionCS: {fileID: 7200000, guid: 2fa6c0e3fe6dc3145a4156f21913fe5c, type: 3}
clearDispatchIndirectShader: {fileID: 7200000, guid: fc1f553acb80a6446a32d33e403d0656,
type: 3}

type: 3}
deferredDirectionalShadowComputeShader: {fileID: 7200000, guid: fbde6fae193b2a94e9fd97c163c204f4,
type: 3}
volumetricLightingCS: {fileID: 7200000, guid: 799166e2ee6a4b041bba9e74f6942097,
volumeVoxelizationCS: {fileID: 7200000, guid: c20b371db720da244b73830ec74a343a,
type: 3}
volumetricLightingCS: {fileID: 7200000, guid: b4901a10df2d1e24282725e9fbc77c97,
type: 3}
subsurfaceScatteringCS: {fileID: 7200000, guid: b06a7993621def248addd55d0fe931b1,
type: 3}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs


public ComputeShader colorPyramidCS;
public ComputeShader depthPyramidCS;
public ComputeShader copyChannelCS;
public ComputeShader texturePaddingCS;
public ComputeShader applyDistortionCS;
// Lighting tile pass resources

public ComputeShader buildMaterialFlagsShader;
public ComputeShader deferredComputeShader;
public ComputeShader deferredDirectionalShadowComputeShader;
public ComputeShader volumeVoxelizationCS;
public ComputeShader volumetricLightingCS;
public ComputeShader subsurfaceScatteringCS; // Disney SSS

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPass.cs


LightTransport,
Shadows,
SubsurfaceScattering,
VolumeVoxelization,
VolumetricLighting,
DBuffer
}

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPass.cs.hlsl


#define SHADERPASS_LIGHT_TRANSPORT (7)
#define SHADERPASS_SHADOWS (8)
#define SHADERPASS_SUBSURFACE_SCATTERING (9)
#define SHADERPASS_VOLUMETRIC_LIGHTING (10)
#define SHADERPASS_DBUFFER (11)
#define SHADERPASS_VOLUME_VOXELIZATION (10)
#define SHADERPASS_VOLUMETRIC_LIGHTING (11)
#define SHADERPASS_DBUFFER (12)
#endif

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/HDRISky/HDRISkyRenderer.cs


public override void RenderSky(BuiltinSkyParameters builtinParams, bool renderForCubemap)
{
m_SkyHDRIMaterial.SetTexture(HDShaderIDs._Cubemap, m_HdriSkyParams.hdriSky);
m_SkyHDRIMaterial.SetVector(HDShaderIDs._SkyParam, new Vector4(m_HdriSkyParams.exposure, m_HdriSkyParams.multiplier, -m_HdriSkyParams.rotation, 0.0f)); // -rotation to match Legacy...
m_SkyHDRIMaterial.SetVector(HDShaderIDs._SkyParam, new Vector4(GetExposure(m_HdriSkyParams, builtinParams.debugSettings), m_HdriSkyParams.multiplier, -m_HdriSkyParams.rotation, 0.0f)); // -rotation to match Legacy...
// This matrix needs to be updated at the draw call frequency.
m_PropertyBlock.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, builtinParams.pixelCoordToViewDirMatrix);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/ProceduralSky/ProceduralSkyRenderer.cs


sunDirection = -builtinParams.sunLight.transform.forward;
}
m_ProceduralSkyMaterial.SetVector(HDShaderIDs._SkyParam, new Vector4(m_ProceduralSkyParams.exposure, m_ProceduralSkyParams.multiplier, 0.0f, 0.0f));
m_ProceduralSkyMaterial.SetVector(HDShaderIDs._SkyParam, new Vector4(GetExposure(m_ProceduralSkyParams, builtinParams.debugSettings), m_ProceduralSkyParams.multiplier, 0.0f, 0.0f));
m_ProceduralSkyMaterial.SetFloat(_SunSizeParam, m_ProceduralSkyParams.sunSize);
m_ProceduralSkyMaterial.SetFloat(_SunSizeConvergenceParam, m_ProceduralSkyParams.sunSizeConvergence);
m_ProceduralSkyMaterial.SetFloat(_AtmoshpereThicknessParam, m_ProceduralSkyParams.atmosphereThickness);

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs


public RTHandle depthBuffer;
public HDCamera hdCamera;
public DebugDisplaySettings debugSettings;
public static RenderTargetIdentifier nullRT = -1;
}

m_SkyRenderingContext.Cleanup();
}
public bool IsSkyValid()
public bool IsLightingSkyValid()
public bool IsVisualSkyValid()
{
return m_VisualSky.IsValid();
}
void BlitCubemap(CommandBuffer cmd, Cubemap source, RenderTexture dest)
{

m_UpdateRequired = false;
SetGlobalSkyTexture(cmd);
if (IsSkyValid())
if (IsLightingSkyValid())
{
cmd.SetGlobalInt(HDShaderIDs._EnvLightSkyEnabled, 1);
}

}
}
public void RenderSky(HDCamera camera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, CommandBuffer cmd)
public void RenderSky(HDCamera camera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, cmd);
m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, debugSettings, cmd);
}
public void RenderOpaqueAtmosphericScattering(CommandBuffer cmd)

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderer.cs


// renderForCubemap: When rendering into a cube map, no depth buffer is available so user has to make sure not to use depth testing or the depth texture.
public abstract void RenderSky(BuiltinSkyParameters builtinParams, bool renderForCubemap);
public abstract bool IsValid();
protected float GetExposure(SkySettings skySettings, DebugDisplaySettings debugSettings)
{
float debugExposure = 0.0f;
if(debugSettings != null && debugSettings.DebugNeedsExposure())
{
debugExposure = debugSettings.lightingDebugSettings.debugExposure;
}
return skySettings.exposure + debugExposure;
}
}
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderingContext.cs


m_BuiltinParameters.screenSize = m_CubemapScreenSize;
m_BuiltinParameters.cameraPosWS = camera.camera.transform.position;
m_BuiltinParameters.hdCamera = null;
m_BuiltinParameters.debugSettings = null; // We don't want any debug when updating the environment.
int sunHash = 0;
if (sunLight != null)

return result;
}
public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, CommandBuffer cmd)
public void RenderSky(SkyUpdateContext skyContext, HDCamera hdCamera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
{
if (skyContext.IsValid() && hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky)
{

m_BuiltinParameters.colorBuffer = colorBuffer;
m_BuiltinParameters.depthBuffer = depthBuffer;
m_BuiltinParameters.hdCamera = hdCamera;
m_BuiltinParameters.debugSettings = debugSettings;
skyContext.renderer.SetRenderTargets(m_BuiltinParameters);
skyContext.renderer.RenderSky(m_BuiltinParameters, false);

39
ScriptableRenderPipeline/LightweightPipeline/LWRP/LightweightPipeline.cs


private int m_ScreenSpaceShadowMapRTID;
private Matrix4x4[] m_ShadowMatrices = new Matrix4x4[kMaxCascades + 1];
private RenderTargetIdentifier m_CurrCameraColorRT;
private RenderTargetIdentifier m_ShadowMapRT;
private RenderTexture m_ShadowMapRT;
private RenderTargetIdentifier m_ScreenSpaceShadowMapRT;
private RenderTargetIdentifier m_ColorRT;
private RenderTargetIdentifier m_CopyColorRT;

CameraRenderTargetID.depth = Shader.PropertyToID("_CameraDepthTexture");
CameraRenderTargetID.depthCopy = Shader.PropertyToID("_CameraCopyDepthTexture");
m_ShadowMapRT = new RenderTargetIdentifier(m_ShadowMapRTID);
m_ScreenSpaceShadowMapRT = new RenderTargetIdentifier(m_ScreenSpaceShadowMapRTID);
m_ColorRT = new RenderTargetIdentifier(CameraRenderTargetID.color);

if (shadows && m_ShadowSettings.screenSpace)
ShadowCollectPass(visibleLights, ref context, ref lightData, frameRenderingConfiguration);
if (!shadows)
{
var setRT = CommandBufferPool.Get("Generate Small Shadow Buffer");
if (m_ShadowSettings.screenSpace)
setRT.GetTemporaryRT(m_ScreenSpaceShadowMapRTID, 4, 4, 0, FilterMode.Bilinear, m_ShadowSettings.screenspaceShadowmapTextureFormat);
else
setRT.GetTemporaryRT(m_ShadowMapRTID, 4, 4, 0, FilterMode.Bilinear, m_ShadowSettings.shadowmapTextureFormat);
setRT.Blit(Texture2D.whiteTexture, m_ScreenSpaceShadowMapRT);
context.ExecuteCommandBuffer(setRT);
}
ForwardPass(visibleLights, frameRenderingConfiguration, ref context, ref lightData, stereoEnabled);

CopyTexture(cmd, CameraRenderTargetID.depth, BuiltinRenderTextureType.CameraTarget, m_CopyDepthMaterial, true);
#endif
cmd.ReleaseTemporaryRT(m_ShadowMapRTID);
cmd.ReleaseTemporaryRT(m_ScreenSpaceShadowMapRTID);
cmd.ReleaseTemporaryRT(CameraRenderTargetID.depthCopy);
cmd.ReleaseTemporaryRT(CameraRenderTargetID.depth);

CommandBufferPool.Release(cmd);
context.Submit();
if (m_ShadowMapRT)
{
RenderTexture.ReleaseTemporary(m_ShadowMapRT);
m_ShadowMapRT = null;
}
m_ShadowMapRT = null;
if (m_Asset.AreShadowsEnabled() && lightData.mainLightIndex != -1)
{
VisibleLight mainLight = visibleLights[lightData.mainLightIndex];

float invShadowResolution = 1.0f / m_Asset.ShadowAtlasResolution;
float invHalfShadowResolution = 0.5f * invShadowResolution;
cmd.Clear();
cmd.SetGlobalTexture(m_ShadowMapRTID, m_ShadowMapRT);
cmd.SetGlobalMatrixArray(ShadowConstantBuffer._WorldToShadow, m_ShadowMatrices);
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowData, new Vector4(light.shadowStrength, 0.0f, 0.0f, 0.0f));
cmd.SetGlobalVectorArray(ShadowConstantBuffer._DirShadowSplitSpheres, m_DirectionalShadowSplitDistances);

bool success = false;
var cmd = CommandBufferPool.Get("Prepare Shadowmap");
cmd.GetTemporaryRT(m_ShadowMapRTID, m_ShadowSettings.shadowAtlasWidth,
m_ShadowSettings.shadowAtlasHeight, kShadowBufferBits, FilterMode.Bilinear, m_ShadowSettings.shadowmapTextureFormat);
RenderTextureDescriptor shadowmapDescriptor = new RenderTextureDescriptor(m_ShadowSettings.shadowAtlasWidth,
m_ShadowSettings.shadowAtlasHeight, m_ShadowSettings.shadowmapTextureFormat, kShadowBufferBits);
shadowmapDescriptor.shadowSamplingMode = ShadowSamplingMode.CompareDepths;
m_ShadowMapRT = RenderTexture.GetTemporary(shadowmapDescriptor);
m_ShadowMapRT.filterMode = FilterMode.Bilinear;
m_ShadowMapRT.wrapMode = TextureWrapMode.Clamp;
// LightweightPipeline.SetRenderTarget is meant to be used with camera targets, not shadowmaps
CoreUtils.SetRenderTarget(cmd, m_ShadowMapRT, ClearFlag.Depth, CoreUtils.ConvertSRGBToActiveColorSpace(m_CurrCamera.backgroundColor));

{
cmd.SetViewport(new Rect(m_ShadowSlices[cascadeIndex].atlasX, m_ShadowSlices[cascadeIndex].atlasY,
m_ShadowSlices[cascadeIndex].shadowResolution, m_ShadowSlices[cascadeIndex].shadowResolution));
cmd.EnableScissorRect(new Rect(m_ShadowSlices[cascadeIndex].atlasX + 4, m_ShadowSlices[cascadeIndex].atlasY + 4,
m_ShadowSlices[cascadeIndex].shadowResolution - 8, m_ShadowSlices[cascadeIndex].shadowResolution - 8));
cmd.Clear();
cmd.DisableScissorRect();
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
}

m_PostProcessRenderContext.sourceFormat = m_ColorFormat;
m_PostProcessRenderContext.destination = dest;
m_PostProcessRenderContext.command = cmd;
m_PostProcessRenderContext.flip = true;
m_PostProcessRenderContext.flip = m_CurrCamera.targetTexture == null;
if (opaqueOnly)
{

7
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Core.hlsl


#endif
#endif
#ifndef BUMP_SCALE_NOT_SUPPORTED
#define BUMP_SCALE_NOT_SUPPORTED !SHADER_HINT_NICE_QUALITY
#endif
///////////////////////////////////////////////////////////////////////////////
#ifdef _NORMALMAP
#define OUTPUT_NORMAL(IN, OUT) OutputTangentToWorld(IN.tangent, IN.normal, OUT.tangent.xyz, OUT.binormal.xyz, OUT.normal.xyz)

void OutputTangentToWorld(half4 vertexTangent, half3 vertexNormal, out half3 tangentWS, out half3 binormalWS, out half3 normalWS)
{
// mikkts space compliant. only normalize when extracting normal at frag.
tangentWS = normalize(mul((half3x3)UNITY_MATRIX_M, vertexTangent.xyz));
tangentWS = TransformObjectToWorldDir(vertexTangent.xyz);
binormalWS = cross(normalWS, tangentWS) * sign;
}

53
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/InputBuiltin.hlsl


float4x4 unity_ObjectToWorld;
float4x4 unity_WorldToObject;
float4 unity_LODFade; // x is the fade value ranging within [0,1]. y is x quantized into 16 levels
real4 unity_LODFade; // x is the fade value ranging within [0,1]. y is x quantized into 16 levels
float4 unity_DynamicLightmapST;
float4 unity_SHAr;
float4 unity_SHAg;
float4 unity_SHAb;
float4 unity_SHBr;
float4 unity_SHBg;
float4 unity_SHBb;
float4 unity_SHC;
// x = Disabled(0)/Enabled(1)
// y = Computation are done in global space(0) or local space(1)
// z = Texel size on U texture coordinate
float4 unity_ProbeVolumeParams;
float4x4 unity_ProbeVolumeWorldToObject;
float3 unity_ProbeVolumeSizeInv;
float3 unity_ProbeVolumeMin;
real4 unity_SHAr;
real4 unity_SHAg;
real4 unity_SHAb;
real4 unity_SHBr;
real4 unity_SHBg;
real4 unity_SHBb;
real4 unity_SHC;
float4 unity_ProbesOcclusion;
real4 unity_ProbesOcclusion;
half4 unity_SpecCube0_HDR;
real4 unity_SpecCube0_HDR;
half4 unity_Lightmap_HDR;
real4 unity_Lightmap_HDR;
half4 unity_LightIndicesOffsetAndCount;
half4 unity_4LightIndices0;
half4 unity_4LightIndices1;
real4 unity_LightIndicesOffsetAndCount;
real4 unity_4LightIndices0;
real4 unity_4LightIndices1;
CBUFFER_END

#if defined(USING_STEREO_MATRICES) && defined(UNITY_STEREO_MULTIVIEW_ENABLED)
CBUFFER_START(UnityStereoEyeIndices)
float4 unity_StereoEyeIndices[2];
real4 unity_StereoEyeIndices[2];
CBUFFER_END
#endif

// ----------------------------------------------------------------------------
CBUFFER_START(UnityPerFrame)
float4 glstate_lightmodel_ambient;
float4 unity_AmbientSky;
float4 unity_AmbientEquator;
float4 unity_AmbientGround;
float4 unity_IndirectSpecColor;
real4 glstate_lightmodel_ambient;
real4 unity_AmbientSky;
real4 unity_AmbientEquator;
real4 unity_AmbientGround;
real4 unity_IndirectSpecColor;
half4 unity_FogColor;
real4 unity_FogColor;
#if !defined(USING_STEREO_MATRICES)
float4x4 glstate_matrix_projection;

int unity_StereoEyeIndex;
#endif
float4 unity_ShadowColor;
real4 unity_ShadowColor;
CBUFFER_END
// ----------------------------------------------------------------------------

19
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/InputSurface.hlsl


#endif
CBUFFER_START(UnityPerMaterial)
half4 _MainTex_ST;
float4 _MainTex_ST;
half4 _SpecColor;
half4 _EmissionColor;
half _SmoothnessTextureChannel;
half4 _SpecColor;
half4 _EmissionColor;
TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex);
TEXTURE2D(_MetallicGlossMap); SAMPLER(sampler_MetallicGlossMap);
TEXTURE2D(_SpecGlossMap); SAMPLER(sampler_SpecGlossMap);
TEXTURE2D(_BumpMap); SAMPLER(sampler_BumpMap);
TEXTURE2D(_OcclusionMap); SAMPLER(sampler_OcclusionMap);
TEXTURE2D(_EmissionMap); SAMPLER(sampler_EmissionMap);
TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex);
TEXTURE2D(_MetallicGlossMap); SAMPLER(sampler_MetallicGlossMap);
TEXTURE2D(_SpecGlossMap); SAMPLER(sampler_SpecGlossMap);
TEXTURE2D(_BumpMap); SAMPLER(sampler_BumpMap);
TEXTURE2D(_OcclusionMap); SAMPLER(sampler_OcclusionMap);
TEXTURE2D(_EmissionMap); SAMPLER(sampler_EmissionMap);
// Must match Lightweigth ShaderGraph master node
struct SurfaceData

19
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Lighting.hlsl


half roughness;
half roughness2;
half grazingTerm;
// We save some light invariant BRDF terms so we don't have to recompute
// them in the light loop. Take a look at DirectBRDF function for detailed explaination.
half normalizationTerm; // roughness * 4.0 + 2.0
half roughness2MinusOne; // roughness² - 1.0
};
half ReflectivitySpecular(half3 specular)

outBRDFData.roughness = PerceptualRoughnessToRoughness(outBRDFData.perceptualRoughness);
outBRDFData.roughness2 = outBRDFData.roughness * outBRDFData.roughness;
outBRDFData.normalizationTerm = outBRDFData.roughness * 4.0h + 2.0h;
outBRDFData.roughness2MinusOne = outBRDFData.roughness2 - 1.0h;
#ifdef _ALPHAPREMULTIPLY_ON
outBRDFData.diffuse *= alpha;
alpha = alpha * oneMinusReflectivity + reflectivity;

half LoH = saturate(dot(lightDirectionWS, halfDir));
// GGX Distribution multiplied by combined approximation of Visibility and Fresnel
// BRDFspec = (D * V * F) / 4.0
// D = roughness² / ( NoH² * (roughness² - 1) + 1 )²
// V * F = 1.0 / ( LoH² * (roughness + 0.5) )
half d = NoH * NoH * (brdfData.roughness2 - 1.h) + 1.00001h;
// Final BRDFspec = roughness² / ( NoH² * (roughness² - 1) + 1 )² * (LoH² * (roughness + 0.5) * 4.0)
// We further optimize a few light invariant terms
// brdfData.normalizationTerm = (roughness + 0.5) * 4.0 rewritten as roughness * 4.0 + 2.0 to a fit a MAD.
half d = NoH * NoH * brdfData.roughness2MinusOne + 1.00001h;
half specularTerm = brdfData.roughness2 / ((d * d) * max(0.1h, LoH2) * (brdfData.roughness + 0.5h) * 4);
half specularTerm = brdfData.roughness2 / ((d * d) * max(0.1h, LoH2) * brdfData.normalizationTerm);
// on mobiles (where half actually means something) denominator have risk of overflow
// clamp below was added specifically to "fix" that, but dx compiler (we convert bytecode to metal/gles)

66
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassLit.hlsl


half4 fogFactorAndVertexLight : TEXCOORD6; // x: fogFactor, yzw: vertex light
#ifdef _SHADOWS_ENABLED
#endif
float4 clipPos : SV_POSITION;
UNITY_VERTEX_INPUT_INSTANCE_ID

#endif
inputData.viewDirectionWS = FragmentViewDirWS(viewDir);
#ifdef _SHADOWS_ENABLED
#else
inputData.shadowCoord = float4(0, 0, 0, 0);
#endif
inputData.fogCoord = IN.fogFactorAndVertexLight.x;
inputData.vertexLighting = IN.fogFactorAndVertexLight.yzw;
inputData.bakedGI = SAMPLE_GI(IN.lightmapUV, IN.vertexSH, inputData.normalWS);

// Vertex and Fragment functions //
///////////////////////////////////////////////////////////////////////////////
// Vertex: Used for Standard and StandardSimpleLighting shaders
// Used in Standard (Physically Based) shader
LightweightVertexOutput LitPassVertex(LightweightVertexInput v)
{
LightweightVertexOutput o = (LightweightVertexOutput)0;

o.uv = TransformMainTextureCoord(v.texcoord);
o.posWSShininess.xyz = TransformObjectToWorld(v.vertex.xyz);
o.posWSShininess.w = _Shininess * 128.0;
o.clipPos = TransformWorldToHClip(o.posWSShininess.xyz);
half3 viewDir = VertexViewDirWS(GetCameraPositionWS() - o.posWSShininess.xyz);

half fogFactor = ComputeFogFactor(o.clipPos.z);
o.fogFactorAndVertexLight = half4(fogFactor, vertexLight);
#ifdef _SHADOWS_ENABLED
#endif
// Used for Standard shader
// Used in Standard (Physically Based) shader
half4 LitPassFragment(LightweightVertexOutput IN) : SV_Target
{
UNITY_SETUP_INSTANCE_ID(IN);

ApplyFog(color.rgb, inputData.fogCoord);
return color;
}
// Used in Standard (Simple Lighting) shader
// TODO: we only need to specialise this because of _Shininess prop
// Once we refactor SimpleLighting shader we will be able to share vertex
// between PBS and Simple
LightweightVertexOutput LitPassVertexSimple(LightweightVertexInput v)
{
LightweightVertexOutput o = (LightweightVertexOutput)0;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_TRANSFER_INSTANCE_ID(v, o);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.uv = TransformMainTextureCoord(v.texcoord);
o.posWSShininess.xyz = TransformObjectToWorld(v.vertex.xyz);
o.posWSShininess.w = _Shininess * 128.0;
o.clipPos = TransformWorldToHClip(o.posWSShininess.xyz);
half3 viewDir = VertexViewDirWS(GetCameraPositionWS() - o.posWSShininess.xyz);
#ifdef _NORMALMAP
o.normal.w = viewDir.x;
o.tangent.w = viewDir.y;
o.binormal.w = viewDir.z;
#else
o.viewDir = viewDir;
#endif
// initializes o.normal and if _NORMALMAP also o.tangent and o.binormal
OUTPUT_NORMAL(v, o);
// We either sample GI from lightmap or SH.
// Lightmap UV and vertex SH coefficients use the same interpolator ("float2 lightmapUV" for lightmap or "half3 vertexSH" for SH)
// see DECLARE_LIGHTMAP_OR_SH macro.
// The following funcions initialize the correct variable with correct data
OUTPUT_LIGHTMAP_UV(v.lightmapUV, unity_LightmapST, o.lightmapUV);
OUTPUT_SH(o.normal.xyz, o.vertexSH);
half3 vertexLight = VertexLighting(o.posWSShininess.xyz, o.normal.xyz);
half fogFactor = ComputeFogFactor(o.clipPos.z);
o.fogFactorAndVertexLight = half4(fogFactor, vertexLight);
#ifdef _SHADOWS_ENABLED
#if SHADOWS_SCREEN
o.shadowCoord = ComputeShadowCoord(o.clipPos);
#else
o.shadowCoord = TransformWorldToShadowCoord(o.posWSShininess.xyz);
#endif
#endif
return o;
}
// Used for StandardSimpleLighting shader

15
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassMeta.hlsl


float2 uv : TEXCOORD0;
};
float4 MetaVertexPosition(float4 vertex, float2 uv1, float2 uv2, float4 lightmapST, float4 dynlightmapST)
float4 MetaVertexPosition(float4 vertex, float2 uv1, float2 uv2, float4 lightmapST)
{
if (unity_MetaVertexControl.x)
{

vertex.z = vertex.z > 0 ? 1.0e-4f : 0.0f;
}
if (unity_MetaVertexControl.y)
{
vertex.xy = uv2 * dynlightmapST.xy + dynlightmapST.zw;
// OpenGL right now needs to actually use incoming vertex position,
// so use it in a very dummy way
vertex.z = vertex.z > 0 ? 1.0e-4f : 0.0f;
vertex.z = vertex.z > 0 ? REAL_MIN : 0.0f;
}
return TransformWorldToHClip(vertex.xyz); // Need to transfer from world to clip compared to legacy
}

MetaVertexOuput LightweightVertexMeta(MetaVertexInput v)
{
MetaVertexOuput o;
o.pos = MetaVertexPosition(v.vertex, v.uv1.xy, v.uv2.xy, unity_LightmapST, unity_DynamicLightmapST);
o.uv = TRANSFORM_TEX(v.uv0, _MainTex);
o.pos = MetaVertexPosition(v.vertex, v.uv1.xy, v.uv2.xy, unity_LightmapST);
o.uv = TransformMainTextureCoord(v.uv0);
return o;
}

10
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Shadows.hlsl


#define BEYOND_SHADOW_FAR(shadowCoord) shadowCoord.z >= UNITY_RAW_FAR_CLIP_VALUE
#endif
#define OUTSIDE_SHADOW_BOUNDS(shadowCoord) shadowCoord.x <= 0 || shadowCoord.x >= 1 || shadowCoord.y <= 0 || shadowCoord.y >= 1 || BEYOND_SHADOW_FAR(shadowCoord)
half GetShadowStrength()
{
return _ShadowData.x;

#endif
// Shadow coords that fall out of the light frustum volume must always return attenuation 1.0
return (OUTSIDE_SHADOW_BOUNDS(shadowCoord)) ? 1.0 : attenuation;
return BEYOND_SHADOW_FAR(shadowCoord) ? 1.0 : attenuation;
}
inline half ComputeCascadeIndex(float3 positionWS)

half RealtimeShadowAttenuation(float4 shadowCoord)
{
#if NO_SHADOWS
#ifndef _SHADOWS_ENABLED
return 1.0h;
#endif
#if defined(NO_SHADOWS)
return 1.0h;
#elif SHADOWS_SCREEN
return SampleScreenSpaceShadowMap(shadowCoord);

3
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandard.shader


#pragma multi_compile _ _ADDITIONAL_LIGHTS
#pragma multi_compile _ _VERTEX_LIGHTS
#pragma multi_compile _ _MIXED_LIGHTING_SUBTRACTIVE
#pragma multi_compile_fog
#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile_fog
//--------------------------------------
// GPU Instancing

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存