浏览代码

Merge branch 'master'

/main
Evgenii Golubev 7 年前
当前提交
070349c8
共有 214 个文件被更改,包括 3437 次插入3848 次删除
  1. 23
      .gitignore
  2. 987
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png
  3. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png
  4. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta
  5. 999
      ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2401_Light_on_Tesselation.unity.png
  6. 244
      SampleScenes/HDTest/HDRenderLoopTest.unity
  7. 95
      ScriptableRenderPipeline/Core/CoreRP/CoreUtils.cs
  8. 90
      ScriptableRenderPipeline/Core/CoreRP/Editor/CoreEditorDrawers.cs
  9. 28
      ScriptableRenderPipeline/Core/CoreRP/Editor/ShaderGenerator/ShaderTypeGeneration.cs
  10. 494
      ScriptableRenderPipeline/Core/CoreRP/RTHandle.cs
  11. 8
      ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopy.compute
  12. 3
      ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopy.cs
  13. 11
      ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopyAsset.cs
  14. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderGenerator/ShaderGeneratorAttributes.cs
  15. 58
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  16. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/CommonLighting.hlsl
  17. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Debug.hlsl
  18. 2
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl
  19. 1
      ScriptableRenderPipeline/Core/CoreRP/Shadow/Shadow.cs
  20. 2
      ScriptableRenderPipeline/Core/CoreRP/TextureCache.cs
  21. 2
      ScriptableRenderPipeline/Core/package.json
  22. 108
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  23. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugColorPicker.shader
  24. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
  25. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl
  26. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplayLatlong.shader
  27. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader
  28. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebugPanel.cs
  29. 117
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/MaterialDebug.cs
  30. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.cs
  31. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraUI.cs
  32. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Decal/DecalMenuItems.cs
  33. 94
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/EditorRenderPipelineResources/ReflectionProbesPreview.shader
  34. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs
  35. 62
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDRenderPipelineMenuItems.cs
  36. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs
  37. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/HDReflectionProbeUI.Drawers.cs
  38. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs
  39. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/Volume/InfluenceVolumeUI.Drawers.cs
  40. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/Volume/ProxyVolumeUI.cs
  41. 69
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/LayeredLit/LayeredLitUI.cs
  42. 43
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/BaseLitUI.cs
  43. 123
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/LitUI.cs
  44. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Unlit/BaseUnlitUI.cs
  45. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/FrameSettingsUI.cs
  46. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/LightLoopSettingsUI.cs
  47. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/RenderPipelineSettingsUI.cs
  48. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedRenderPipelineSettings.cs
  49. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs
  50. 725
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  51. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.asset
  52. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs
  53. 55
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  54. 197
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  55. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader
  56. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/DeferredDirectionalShadow.compute
  57. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs
  58. 105
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  59. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs.hlsl
  60. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
  61. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopSettings.cs
  62. 28
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute
  63. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/PlanarReflectionProbeCache.cs
  64. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ProbeWrapper.cs
  65. 20
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionProbeCache.cs
  66. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs
  67. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
  68. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  69. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs
  70. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs.hlsl
  71. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs
  72. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs.hlsl
  73. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.hlsl
  74. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalUtilities.hlsl
  75. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs
  76. 92
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLit.shader
  77. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitData.hlsl
  78. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitDataDisplacement.hlsl
  79. 98
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitTessellation.shader
  80. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs
  81. 84
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs.hlsl
  82. 92
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  83. 91
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.shader
  84. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitData.hlsl
  85. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitDataDisplacement.hlsl
  86. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitReference.hlsl
  87. 97
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitTessellation.shader
  88. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/MaterialUtilities.hlsl
  89. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/RenderPipelineMaterial.cs
  90. 105
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  91. 25
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Unlit/Unlit.shader
  92. 39
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  93. 24
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/RenderPipelineSettings.cs
  94. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/Blit.shader
  95. 251
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  96. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset
  97. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs
  98. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  99. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesFunctions.hlsl
  100. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesMatrixDefsHDCamera.hlsl

23
.gitignore


artifacts/**
build/**
DataSource/*
obj/*
obj/*
*.aspx
*.browser
*.sln
*.suo
DataSource/*
*.exe
*.ini
*.map
*.browser
*.map
*.ini
*.npmrc
*.pyc
*.exe
*.aspx
*.sln
*.suo
*.npmrc

987
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png
文件差异内容过多而无法显示
查看文件

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 0
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

999
ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2401_Light_on_Tesselation.unity.png
文件差异内容过多而无法显示
查看文件

244
SampleScenes/HDTest/HDRenderLoopTest.unity


shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

m_LookSpeedController: 120
m_LookSpeedMouse: 10
m_MoveSpeed: 50
m_MoveSpeedIncrement: 2.5
m_Turbo: 10
--- !u!114 &1828470167
MonoBehaviour:

type: 2}
--- !u!114 &1828470169
MonoBehaviour:
m_ObjectHideFlags: 0
m_ObjectHideFlags: 2
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828470159}

m_Bits: 4294967295
m_FrameSettings:
enableShadow: 1
enableContactShadows: 1
enableSSR: 1
enableSSAO: 1
enableSubsurfaceScattering: 1

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 1024
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

shadowResolution: 512
shadowDimmer: 1
shadowFadeDistance: 10000
enableContactShadows: 0
contactShadowLength: 0
contactShadowDistanceScaleFactor: 0.5
contactShadowMaxDistance: 50
contactShadowFadeDistance: 5
contactShadowSampleCount: 8
shadowCascadeCount: 4
shadowCascadeRatios:
- 0.05

95
ScriptableRenderPipeline/Core/CoreRP/CoreUtils.cs


}
}
static RenderTexture m_EmptyUAV;
public static RenderTexture emptyUAV
{
get
{
if(m_EmptyUAV == null)
{
m_EmptyUAV = new RenderTexture(1, 1, 0);
m_EmptyUAV.enableRandomWrite = true;
m_EmptyUAV.Create();
}
return m_EmptyUAV;
}
}
public static void ClearRenderTarget(CommandBuffer cmd, ClearFlag clearFlag, Color clearColor)
{
if (clearFlag != ClearFlag.None)
cmd.ClearRenderTarget((clearFlag & ClearFlag.Depth) != 0, (clearFlag & ClearFlag.Color) != 0, clearColor);
}
if (clearFlag != ClearFlag.None)
cmd.ClearRenderTarget((clearFlag & ClearFlag.Depth) != 0, (clearFlag & ClearFlag.Color) != 0, clearColor);
ClearRenderTarget(cmd, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)

public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier colorBuffer, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
cmd.SetRenderTarget(colorBuffer, depthBuffer, miplevel, cubemapFace, depthSlice);
if (clearFlag != ClearFlag.None)
cmd.ClearRenderTarget((clearFlag & ClearFlag.Depth) != 0, (clearFlag & ClearFlag.Color) != 0, clearColor);
ClearRenderTarget(cmd, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer)

public static void SetRenderTarget(CommandBuffer cmd, RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthBuffer, ClearFlag clearFlag, Color clearColor)
{
cmd.SetRenderTarget(colorBuffers, depthBuffer);
if (clearFlag != ClearFlag.None)
cmd.ClearRenderTarget((clearFlag & ClearFlag.Depth) != 0, (clearFlag & ClearFlag.Color) != 0, clearColor);
ClearRenderTarget(cmd, clearFlag, clearColor);
public static RenderTexture CreateRenderTexture(RenderTextureDescriptor baseDesc, int depthBufferBits, RenderTextureFormat format,
RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default)
public static string GetRenderTargetAutoName(int width, int height, RenderTextureFormat format, string name = "", bool mips = false, bool enableMSAA = false, MSAASamples msaaSamples = MSAASamples.None)
baseDesc.depthBufferBits = depthBufferBits;
baseDesc.colorFormat = format;
baseDesc.sRGB = (readWrite != RenderTextureReadWrite.Linear);
// Depth-only needs bindMS in order to use with CopyTexture
if ((format == RenderTextureFormat.Depth) && (baseDesc.msaaSamples > 1))
baseDesc.bindMS = true;
return new RenderTexture(baseDesc);
}
public static void CreateCmdTemporaryRT(CommandBuffer cmd, int nameID, RenderTextureDescriptor baseDesc,
int depthBufferBits, FilterMode filter, RenderTextureFormat format,
RenderTextureReadWrite readWrite = RenderTextureReadWrite.Default, int msaaSamplesOverride = 0, bool enableRandomWrite = false)
{
if (msaaSamplesOverride > 0)
UpdateRenderTextureDescriptor(ref baseDesc, depthBufferBits, format, readWrite, msaaSamplesOverride, enableRandomWrite);
string temp;
if (enableMSAA)
temp = string.Format("{0}x{1}_{2}{3}_{4}", width, height, format, mips ? "_Mips" : "", msaaSamples.ToString());
UpdateRenderTextureDescriptor(ref baseDesc, depthBufferBits, format, readWrite, baseDesc.msaaSamples, enableRandomWrite);
cmd.GetTemporaryRT(nameID, baseDesc, filter);
}
public static void UpdateRenderTextureDescriptor(ref RenderTextureDescriptor baseDesc, int depthBufferBits, RenderTextureFormat format, RenderTextureReadWrite readWrite, int msaaSamples, bool enableRandomWrite)
{
baseDesc.depthBufferBits = depthBufferBits;
baseDesc.colorFormat = format;
baseDesc.sRGB = (readWrite != RenderTextureReadWrite.Linear);
temp = string.Format("{0}x{1}_{2}{3}", width, height, format, mips ? "_Mips" : "");
Debug.Assert(!enableRandomWrite || (msaaSamples == 1));
baseDesc.msaaSamples = msaaSamples;
baseDesc.enableRandomWrite = enableRandomWrite;
}
temp = String.Format("{0}_{1}", name == "" ? "RenderTarget" : name, temp);
public static void ClearCubemap(CommandBuffer cmd, RenderTargetIdentifier buffer, Color clearColor)
{
// We should have the option to clear mip maps here, but since RenderTargetIdentifier, we can't know the number to clear...
// So for now, we won't do it.
for (int i = 0; i < 6; ++i)
SetRenderTarget(cmd, buffer, ClearFlag.Color, clearColor, 0, (CubemapFace)i);
return temp;
}
public static void ClearCubemap(CommandBuffer cmd, RenderTexture renderTexture, Color clearColor, bool clearMips = false)

mesh.triangles = triangles;
return mesh;
}
public static void ResizeHTile(ref RenderTexture hTile, ref RenderTargetIdentifier hTileRT, RenderTextureDescriptor desc)
{
// We must use a RenderTexture and not GetTemporaryRT() as currently Unity only aloow to bind a RenderTexture for a UAV in a pixel shader
// We use 8x8 tiles in order to match the native GCN HTile as closely as possible.
desc.width = (desc.width + 7) / 8;
desc.height = (desc.height + 7) / 8;
// TODO: This fails allocation with MSAA enabled?
hTile = CreateRenderTexture(desc, 0, RenderTextureFormat.R8, RenderTextureReadWrite.Linear); // DXGI_FORMAT_R8_UINT is not supported by Unity
hTile.filterMode = FilterMode.Point;
hTile.enableRandomWrite = true;
hTile.Create();
hTileRT = new RenderTargetIdentifier(hTile);
}
}
}

90
ScriptableRenderPipeline/Core/CoreRP/Editor/CoreEditorDrawers.cs


using System;
using System.Collections.Generic;
using UnityEditor.AnimatedValues;
using UnityEngine;

[Flags]
public enum FoldoutOption
{
None = 0,
Indent = 1 << 0,
Animate = 1 << 1
}
[Flags]
public enum FadeOption
{
None = 0,
Indent = 1 << 0,
Animate = 1 << 1
}
public static class CoreEditorDrawer<TUIState, TData>
{
public interface IDrawer

return new ActionDrawerInternal(drawers);
}
public static IDrawer FadeGroup(AnimBoolItemGetter fadeGetter, bool indent, params IDrawer[] groupDrawers)
public static IDrawer FadeGroup(AnimBoolItemGetter fadeGetter, FadeOption options, params IDrawer[] groupDrawers)
return new FadeGroupsDrawerInternal(fadeGetter, indent, groupDrawers);
return new FadeGroupsDrawerInternal(fadeGetter, options, groupDrawers);
public static IDrawer FoldoutGroup(string title, AnimBoolGetter root, bool indent, params IDrawer[] bodies)
public static IDrawer FoldoutGroup(string title, AnimBoolGetter root, FoldoutOption options, params IDrawer[] bodies)
return new FoldoutDrawerInternal(title, root, indent, bodies);
return new FoldoutDrawerInternal(title, root, options, bodies);
}
public static IDrawer Select<T2UIState, T2Data>(

class FadeGroupsDrawerInternal : IDrawer
{
IDrawer[] groupDrawers;
AnimBoolItemGetter getter;
bool indent;
IDrawer[] m_GroupDrawers;
AnimBoolItemGetter m_Getter;
FadeOption m_Options;
public FadeGroupsDrawerInternal(AnimBoolItemGetter getter, bool indent, params IDrawer[] groupDrawers)
bool indent { get { return (m_Options & FadeOption.Indent) != 0; } }
bool animate { get { return (m_Options & FadeOption.Animate) != 0; } }
public FadeGroupsDrawerInternal(AnimBoolItemGetter getter, FadeOption options, params IDrawer[] groupDrawers)
this.groupDrawers = groupDrawers;
this.getter = getter;
this.indent = indent;
m_GroupDrawers = groupDrawers;
m_Getter = getter;
m_Options = options;
}
void IDrawer.Draw(TUIState s, TData p, Editor owner)

GUILayout.BeginVertical();
for (var i = 0; i < groupDrawers.Length; ++i)
for (var i = 0; i < m_GroupDrawers.Length; ++i)
var b = getter(s, p, owner, i);
if (EditorGUILayout.BeginFadeGroup(b.faded))
var b = m_Getter(s, p, owner, i);
if (animate && EditorGUILayout.BeginFadeGroup(b.faded)
|| !animate && b.target)
groupDrawers[i].Draw(s, p, owner);
m_GroupDrawers[i].Draw(s, p, owner);
EditorGUILayout.EndFadeGroup();
if (animate)
EditorGUILayout.EndFadeGroup();
}
GUILayout.EndVertical();
}

{
IDrawer[] bodies;
AnimBoolGetter isExpanded;
string title;
bool indent;
IDrawer[] m_Bodies;
AnimBoolGetter m_IsExpanded;
string m_Title;
FoldoutOption m_Options;
bool m_Animate;
bool animate { get { return (m_Options & FoldoutOption.Animate) != 0; } }
bool indent { get { return (m_Options & FoldoutOption.Indent) != 0; } }
public FoldoutDrawerInternal(string title, AnimBoolGetter isExpanded, bool indent, params IDrawer[] bodies)
public FoldoutDrawerInternal(string title, AnimBoolGetter isExpanded, FoldoutOption options, params IDrawer[] bodies)
this.title = title;
this.isExpanded = isExpanded;
this.bodies = bodies;
this.indent = indent;
m_Title = title;
m_IsExpanded = isExpanded;
m_Bodies = bodies;
m_Options = options;
var r = isExpanded(s, p, owner);
var r = m_IsExpanded(s, p, owner);
r.target = CoreEditorUtils.DrawHeaderFoldout(title, r.target);
r.target = CoreEditorUtils.DrawHeaderFoldout(m_Title, r.target);
if (EditorGUILayout.BeginFadeGroup(r.faded))
if (animate && EditorGUILayout.BeginFadeGroup(r.faded)
|| !animate && r.target)
for (var i = 0; i < bodies.Length; i++)
bodies[i].Draw(s, p, owner);
for (var i = 0; i < m_Bodies.Length; i++)
m_Bodies[i].Draw(s, p, owner);
EditorGUILayout.EndFadeGroup();
if (animate)
EditorGUILayout.EndFadeGroup();
GUILayout.EndVertical();
}
}

28
ScriptableRenderPipeline/Core/CoreRP/Editor/ShaderGenerator/ShaderTypeGeneration.cs


if (attr.needParamDebug)
{
string className = type.FullName.Substring(type.FullName.LastIndexOf((".")) + 1); // ClassName include nested class
className = className.Replace('+', '_'); // FullName is Class+NestedClass replace by Class_NestedClass
string name = InsertUnderscore(field.Name);
string defineName = ("DEBUGVIEW_" + className + "_" + name).ToUpper();
m_Statics[defineName] = Convert.ToString(attr.paramDefinesStart + debugCounter++);
List<string> displayNames = new List<string>();
displayNames.Add(field.Name);
bool isDirection = false;
bool sRGBDisplay = false;

{
var propertyAttr = (SurfaceDataAttributes[])field.GetCustomAttributes(typeof(SurfaceDataAttributes), false);
// User can want to only override isDirection and sRGBDisplay
// in this case it can specify empty string
if (propertyAttr[0].displayNames[0] != "")
{
displayNames.Clear();
displayNames.AddRange(propertyAttr[0].displayNames);
}
m_DebugFields.Add(new DebugFieldInfo(defineName, field.Name, field.FieldType, isDirection, sRGBDisplay));
string className = type.FullName.Substring(type.FullName.LastIndexOf((".")) + 1); // ClassName include nested class
className = className.Replace('+', '_'); // FullName is Class+NestedClass replace by Class_NestedClass
foreach (string it in displayNames)
{
string fieldName = it.Replace(' ', '_');
string name = InsertUnderscore(fieldName);
string defineName = ("DEBUGVIEW_" + className + "_" + name).ToUpper();
m_Statics[defineName] = Convert.ToString(attr.paramDefinesStart + debugCounter++);
m_DebugFields.Add(new DebugFieldInfo(defineName, field.Name, field.FieldType, isDirection, sRGBDisplay));
}
}
if (field.FieldType.IsPrimitive)

494
ScriptableRenderPipeline/Core/CoreRP/RTHandle.cs


public class RTHandle
{
public static int s_MaxWidth { get; private set; }
public static int s_MaxHeight { get; private set; }
enum RTCategory
{
Regular = 0,
MSAA = 1,
Count
}
// Static management.
public static int s_MaxWidth { get { return s_MaxWidths[(int)RTCategory.Regular]; } }
public static int s_MaxHeight { get { return s_MaxHeights[(int)RTCategory.Regular]; } }
public static int s_MaxWidthMSAAA { get { return s_MaxWidths[(int)RTCategory.MSAA]; } }
public static int s_MaxHeightMSAA { get { return s_MaxHeights[(int)RTCategory.MSAA]; } }
private static int GetMaxWidth(RTCategory category) { return s_MaxWidths[(int)category]; }
private static int GetMaxHeight(RTCategory category) { return s_MaxHeights[(int)category]; }
static List<RTHandle> s_AutoSizedRTs;
public RenderTexture rt { get; private set; }
public RenderTargetIdentifier nameID;
// Parameters for auto-scaled Render Textures
static bool s_ScaledRTSupportsMSAA = false;
static MSAASamples s_ScaledRTCurrentMSAASamples = MSAASamples.None;
static List<RTHandle> s_AutoSizedRTs;
static RTCategory s_ScaledRTCurrentCategory = RTCategory.Regular;
Vector2 scaleFactor = Vector2.one;
ScaleFunc scaleFunc;
static int[] s_MaxWidths = new int[(int)RTCategory.Count];
static int[] s_MaxHeights = new int[(int)RTCategory.Count];
public static int maxWidth { get { return GetMaxWidth(s_ScaledRTCurrentCategory); } }
public static int maxHeight { get { return GetMaxHeight(s_ScaledRTCurrentCategory); } }
s_MaxWidth = 1;
s_MaxHeight = 1;
for (int i = 0; i < (int)RTCategory.Count; ++i)
{
s_MaxWidths[i] = 1;
s_MaxHeights[i] = 1;
}
RTHandle(RenderTexture rt)
// Call this once to set the initial size and allow msaa targets or not.
public static void Initialize(int width, int height, bool scaledRTsupportsMSAA, MSAASamples scaledRTMSAASamples)
this.rt = rt;
nameID = new RenderTargetIdentifier(rt);
}
Debug.Assert(s_AutoSizedRTs.Count == 0, "RTHandle.Initialize should only be called once before allocating any Render Texture.");
for (int i = 0; i < (int)RTCategory.Count; ++i)
{
s_MaxWidths[i] = width;
s_MaxHeights[i] = height;
}
public void Release()
{
s_AutoSizedRTs.Remove(this);
CoreUtils.Destroy(rt);
rt = null;
nameID = BuiltinRenderTextureType.None;
s_ScaledRTSupportsMSAA = scaledRTsupportsMSAA;
s_ScaledRTCurrentMSAASamples = scaledRTMSAASamples;
rth.Release();
if(rth != null)
rth.Release();
public static void SetReferenceSize(int width, int height)
public static void SetReferenceSize(int width, int height, bool msaa, MSAASamples msaaSamples)
// Technically, the enum could be passed as argument directly but let's not pollute public API with unnecessary complexity for now.
RTCategory category = msaa ? RTCategory.MSAA : RTCategory.Regular;
if (width > s_MaxWidth || height > s_MaxHeight)
Resize(width, height);
bool msaaSamplesChanged = msaa && (msaaSamples != s_ScaledRTCurrentMSAASamples);
if (width > GetMaxWidth(category) || height > GetMaxHeight(category) || msaaSamplesChanged)
Resize(width, height, category, msaaSamples);
public static void ResetReferenceSize(int width, int height)
public static void ResetReferenceSize(int width, int height, bool msaa, MSAASamples msaaSamples)
// Technically, the enum could be passed as argument directly but let's not pollute public API with unnecessary complexity for now.
RTCategory category = msaa ? RTCategory.MSAA : RTCategory.Regular;
if (width != s_MaxWidth || height != s_MaxHeight)
Resize(width, height);
bool msaaSamplesChanged = msaa && (msaaSamples != s_ScaledRTCurrentMSAASamples);
if (width != GetMaxWidth(category) || height != GetMaxHeight(category) || msaaSamplesChanged)
Resize(width, height, category, msaaSamples);
static void Resize(int width, int height)
static void Resize(int width, int height, RTCategory category, MSAASamples msaaSamples)
s_MaxWidth = width;
s_MaxHeight = height;
var maxSize = new Vector2Int(s_MaxWidth, s_MaxHeight);
s_MaxWidths[(int)category] = width;
s_MaxHeights[(int)category] = height;
s_ScaledRTCurrentMSAASamples = msaaSamples;
var maxSize = new Vector2Int(width, height);
s_ScaledRTCurrentCategory = category;
var rt = rth.rt;
rt.Release();
var rt = rth.m_RTs[(int)category];
// This can happen if you create a RTH for MSAA. By default we only create the MSAA version of the target.
// Missing version will be created when needed in the getter.
if (rt != null)
{
rt.Release();
Vector2Int scaledSize = rth.GetScaledSize(maxSize);
rt.width = Mathf.Max(scaledSize.x, 1);
rt.height = Mathf.Max(scaledSize.y, 1);
Vector2Int scaledSize;
if (category == RTCategory.MSAA)
rt.antiAliasing = (int)s_ScaledRTCurrentMSAASamples;
if (rth.scaleFunc != null)
{
scaledSize = rth.scaleFunc(maxSize);
}
else
{
scaledSize = new Vector2Int(
x: Mathf.RoundToInt(rth.scaleFactor.x * s_MaxWidth),
y: Mathf.RoundToInt(rth.scaleFactor.y * s_MaxHeight)
);
rt.name = CoreUtils.GetRenderTargetAutoName(rt.width, rt.height, rt.format, rth.m_Name, mips: rt.useMipMap, enableMSAA : category == RTCategory.MSAA, msaaSamples: s_ScaledRTCurrentMSAASamples);
rt.Create();
rt.width = Mathf.Max(scaledSize.x, 1);
rt.height = Mathf.Max(scaledSize.y, 1);
rt.Create();
// This method wraps around regular RenderTexture creation.
// There is no specific logic applied to RenderTextures created this way.
int slices = 1,
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
MSAASamples msaaSamples = MSAASamples.None,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None
int slices = 1,
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
MSAASamples msaaSamples = MSAASamples.None,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None,
string name = ""
bool enableMSAA = msaaSamples != MSAASamples.None;
if (!enableMSAA && bindTextureMS == true)
{
Debug.LogWarning("RTHandle allocated without MSAA but with bindMS set to true, forcing bindMS to false.");
bindTextureMS = false;
}
var rt = new RenderTexture(width, height, (int)depthBufferBits, colorFormat, sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear)
{
hideFlags = HideFlags.HideAndDontSave,

bindTextureMS = bindTextureMS,
useDynamicScale = useDynamicScale,
vrUsage = vrUsage,
memorylessMode = memoryless
memorylessMode = memoryless,
name = CoreUtils.GetRenderTargetAutoName(width, height, colorFormat, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples)
return new RTHandle(rt);
RTCategory category = enableMSAA ? RTCategory.MSAA : RTCategory.Regular;
var newRT = new RTHandle();
newRT.SetRenderTexture(rt, category);
newRT.useScaling = false;
newRT.m_EnableRandomWrite = enableRandomWrite;
newRT.m_EnableMSAA = enableMSAA;
newRT.m_Name = name;
return newRT;
// Next two methods are used to allocate RenderTexture that depend on the frame settings (resolution and msaa for now)
// RenderTextures allocated this way are meant to be defined by a scale of camera resolution (full/half/quarter resolution for example).
// The idea is that internally the system will scale up the size of all render texture so that it amortizes with time and not reallocate when a smaller size is required (which is what happens with TemporaryRTs).
// Since MSAA cannot be changed on the fly for a given RenderTexture, a separate instance will be created if the user requires it. This instance will be the one used after the next call of SetReferenceSize if MSAA is required.
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
MSAASamples msaaSamples = MSAASamples.None,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
bool enableMSAA = false,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None,
string name = ""
int width = Mathf.Max(Mathf.RoundToInt(scaleFactor.x * s_MaxWidth), 1);
int height = Mathf.Max(Mathf.RoundToInt(scaleFactor.y * s_MaxHeight), 1);
bool allocForMSAA = s_ScaledRTSupportsMSAA ? enableMSAA : false;
RTCategory category = allocForMSAA ? RTCategory.MSAA : RTCategory.Regular;
int width = Mathf.Max(Mathf.RoundToInt(scaleFactor.x * GetMaxWidth(category)), 1);
int height = Mathf.Max(Mathf.RoundToInt(scaleFactor.y * GetMaxHeight(category)), 1);
var rth = Alloc(width,
var rth = AllocAutoSizedRenderTexture(width,
height,
1,
depthBufferBits,

autoGenerateMips,
anisoLevel,
mipMapBias,
msaaSamples,
enableMSAA,
memoryless
memoryless,
name
s_AutoSizedRTs.Add(rth);
return rth;
}

//
public static RTHandle Alloc(
ScaleFunc scaleFunc,
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
MSAASamples msaaSamples = MSAASamples.None,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None
DepthBits depthBufferBits = DepthBits.None,
RenderTextureFormat colorFormat = RenderTextureFormat.Default,
FilterMode filterMode = FilterMode.Point,
TextureWrapMode wrapMode = TextureWrapMode.Repeat,
TextureDimension dimension = TextureDimension.Tex2D,
bool sRGB = true,
bool enableRandomWrite = false,
bool useMipMap = false,
bool autoGenerateMips = true,
int anisoLevel = 1,
float mipMapBias = 0f,
bool enableMSAA = false,
bool bindTextureMS = false,
bool useDynamicScale = false,
VRTextureUsage vrUsage = VRTextureUsage.None,
RenderTextureMemoryless memoryless = RenderTextureMemoryless.None,
string name = ""
var scaleFactor = scaleFunc(new Vector2Int(s_MaxWidth, s_MaxHeight));
bool allocForMSAA = s_ScaledRTSupportsMSAA ? enableMSAA : false;
RTCategory category = allocForMSAA ? RTCategory.MSAA : RTCategory.Regular;
var scaleFactor = scaleFunc(new Vector2Int(GetMaxWidth(category), GetMaxHeight(category)));
var rth = Alloc(width,
var rth = AllocAutoSizedRenderTexture(width,
height,
1,
depthBufferBits,

autoGenerateMips,
anisoLevel,
mipMapBias,
msaaSamples,
enableMSAA,
memoryless
memoryless,
name
return rth;
}
// Internal function
static RTHandle AllocAutoSizedRenderTexture(
int width,
int height,
int slices,
DepthBits depthBufferBits,
RenderTextureFormat colorFormat,
FilterMode filterMode,
TextureWrapMode wrapMode,
TextureDimension dimension,
bool sRGB,
bool enableRandomWrite,
bool useMipMap,
bool autoGenerateMips,
int anisoLevel,
float mipMapBias,
bool enableMSAA,
bool bindTextureMS,
bool useDynamicScale,
VRTextureUsage vrUsage,
RenderTextureMemoryless memoryless,
string name
)
{
// Here user made a mistake in setting up msaa/bindMS, hence the warning
if (!enableMSAA && bindTextureMS == true)
{
Debug.LogWarning("RTHandle allocated without MSAA but with bindMS set to true, forcing bindMS to false.");
bindTextureMS = false;
}
bool allocForMSAA = s_ScaledRTSupportsMSAA ? enableMSAA : false;
// Here we purposefully disable MSAA so we just force the bindMS param to false.
if (!allocForMSAA)
{
bindTextureMS = false;
}
// MSAA Does not support random read/write.
bool UAV = enableRandomWrite;
if (allocForMSAA && (UAV == true))
{
Debug.LogWarning("RTHandle that is MSAA-enabled cannot allocate MSAA RT with 'enableRandomWrite = true'.");
UAV = false;
}
int msaaSamples = allocForMSAA ? (int)s_ScaledRTCurrentMSAASamples : 1;
RTCategory category = allocForMSAA ? RTCategory.MSAA : RTCategory.Regular;
var rt = new RenderTexture(width, height, (int)depthBufferBits, colorFormat, sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear)
{
hideFlags = HideFlags.HideAndDontSave,
volumeDepth = slices,
filterMode = filterMode,
wrapMode = wrapMode,
dimension = dimension,
enableRandomWrite = UAV,
useMipMap = useMipMap,
autoGenerateMips = autoGenerateMips,
anisoLevel = anisoLevel,
mipMapBias = mipMapBias,
antiAliasing = msaaSamples,
bindTextureMS = bindTextureMS,
useDynamicScale = useDynamicScale,
vrUsage = vrUsage,
memorylessMode = memoryless,
name = CoreUtils.GetRenderTargetAutoName(width, height, colorFormat, name, mips : useMipMap, enableMSAA: allocForMSAA, msaaSamples : s_ScaledRTCurrentMSAASamples)
};
rt.Create();
RTHandle rth = new RTHandle();
rth.SetRenderTexture(rt, category);
rth.m_EnableMSAA = enableMSAA;
rth.m_EnableRandomWrite = enableRandomWrite;
rth.useScaling = true;
rth.m_Name = name;
s_AutoSizedRTs.Add(rth);
return rth;
}

public static implicit operator RenderTargetIdentifier(RTHandle handle)
{
return handle.nameID;
}
public static string DumpRTInfo()
{
string result = "";
for (int i = 0; i < s_AutoSizedRTs.Count; ++i)
{
RenderTexture rt = s_AutoSizedRTs[i].rt;
result = string.Format("{0}\nRT ({1})\t Format: {2} W: {3} H {4}\n", result, i, rt.format, rt.width, rt.height );
}
return result;
}
// Instance data
RenderTexture[] m_RTs = new RenderTexture[2];
RenderTargetIdentifier[] m_NameIDs = new RenderTargetIdentifier[2];
bool m_EnableMSAA = false;
bool m_EnableRandomWrite = false;
string m_Name;
Vector2 scaleFactor = Vector2.one;
ScaleFunc scaleFunc;
public bool useScaling { get; private set; }
public RenderTexture rt
{
get
{
if(!useScaling)
{
return m_EnableMSAA ? m_RTs[(int)RTCategory.MSAA] : m_RTs[(int)RTCategory.Regular];
}
else
{
RTCategory category = (m_EnableMSAA && s_ScaledRTCurrentCategory == RTCategory.MSAA) ? RTCategory.MSAA : RTCategory.Regular;
CreateIfNeeded(category);
return m_RTs[(int)category];
}
}
}
public RenderTargetIdentifier nameID
{
get
{
if (!useScaling)
{
return m_EnableMSAA ? m_NameIDs[(int)RTCategory.MSAA] : m_RTs[(int)RTCategory.Regular];
}
else
{
RTCategory category = (m_EnableMSAA && s_ScaledRTCurrentCategory == RTCategory.MSAA) ? RTCategory.MSAA : RTCategory.Regular;
CreateIfNeeded(category);
return m_NameIDs[(int)category];
}
}
}
// Keep constructor private
RTHandle()
{
}
void SetRenderTexture(RenderTexture rt, RTCategory category)
{
m_RTs[(int)category] = rt;
m_NameIDs[(int)category] = new RenderTargetIdentifier(rt);
}
void CreateIfNeeded(RTCategory category)
{
// If a RT was first created for MSAA then the regular one might be null, in this case we create it.
// That's why we never test the MSAA version: It should always be there if RT was declared correctly.
if(category == RTCategory.Regular && m_RTs[(int)RTCategory.Regular] == null)
{
RenderTexture refRT = m_RTs[(int)RTCategory.MSAA];
Debug.Assert(refRT != null);
Vector2Int scaledSize = GetScaledSize(new Vector2Int(s_MaxWidth, s_MaxHeight));
RenderTexture newRT = new RenderTexture(scaledSize.x, scaledSize.y, refRT.depth, refRT.format, refRT.sRGB ? RenderTextureReadWrite.sRGB : RenderTextureReadWrite.Linear)
{
hideFlags = HideFlags.HideAndDontSave,
volumeDepth = refRT.volumeDepth,
filterMode = refRT.filterMode,
wrapMode = refRT.wrapMode,
dimension = refRT.dimension,
enableRandomWrite = m_EnableRandomWrite, // We cannot take the info from the msaa rt since we force it to 1
useMipMap = refRT.useMipMap,
autoGenerateMips = refRT.autoGenerateMips,
anisoLevel = refRT.anisoLevel,
mipMapBias = refRT.mipMapBias,
antiAliasing = 1, // No MSAA for the regular version of the texture.
bindTextureMS = false, // Somehow, this can be true even if antiAliasing == 1. Leads to Unity-internal binding errors.
useDynamicScale = refRT.useDynamicScale,
vrUsage = refRT.vrUsage,
memorylessMode = refRT.memorylessMode,
name = CoreUtils.GetRenderTargetAutoName(refRT.width, refRT.height, refRT.format, m_Name, mips : refRT.useMipMap)
};
newRT.Create();
m_RTs[(int)RTCategory.Regular] = newRT;
m_NameIDs[(int)RTCategory.Regular] = new RenderTargetIdentifier(newRT);
}
}
public void Release()
{
s_AutoSizedRTs.Remove(this);
for (int i = 0; i < (int)RTCategory.Count; ++i)
{
CoreUtils.Destroy(m_RTs[i]);
m_NameIDs[i] = BuiltinRenderTextureType.None;
m_RTs[i] = null;
}
}
public Vector2Int GetScaledSize(Vector2Int refSize)
{
if (scaleFunc != null)
{
return scaleFunc(refSize);
}
else
{
return new Vector2Int(
x: Mathf.RoundToInt(scaleFactor.x * refSize.x),
y: Mathf.RoundToInt(scaleFactor.y * refSize.y)
);
}
}
}
}

8
ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopy.compute


#include "../ShaderLibrary/Common.hlsl"
SamplerState sampler_LinearClamp;
CBUFFER_START(cb)
float4 _Size;
CBUFFER_END
RWTexture2D<float1> _Result1;
Texture2D<float4> _Source4;

[numthreads(8, 8, 1)]
void KSampleCopy4_1_x(uint2 dispatchThreadId : SV_DispatchThreadID)
{
_Result1[dispatchThreadId] = SAMPLE_TEXTURE2D_LOD(_Source4, sampler_LinearClamp, float2(dispatchThreadId) * _Size.zw, 0.0).x;
_Result1[dispatchThreadId] = LOAD_TEXTURE2D(_Source4, dispatchThreadId).x;
}

3
ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopy.cs


static readonly int _Result1 = Shader.PropertyToID("_Result1");
static readonly int _Source4 = Shader.PropertyToID("_Source4");
static readonly int _Size = Shader.PropertyToID("_Size");
var s = new Vector4(size.x, size.y, 1f / size.x, 1f / size.y);
cmd.SetComputeVectorParam(m_Shader, _Size, s);
cmd.SetComputeTextureParam(m_Shader, k_SampleKernel_xyzw2x, _Source4, source);
cmd.SetComputeTextureParam(m_Shader, k_SampleKernel_xyzw2x, _Result1, target);
cmd.DispatchCompute(m_Shader, k_SampleKernel_xyzw2x, (int)Mathf.Max((size.x) / 8, 1), (int)Mathf.Max((size.y) / 8, 1), 1);

11
ScriptableRenderPipeline/Core/CoreRP/Resources/GPUCopyAsset.cs


csc.AppendLine(" public GPUCopy(ComputeShader shader)");
csc.AppendLine(" {");
csc.AppendLine(" m_Shader = shader;");
csm.AppendLine(" static readonly int _Size = Shader.PropertyToID(\"_Size\");");
for (var i = 0; i < operations.Length; i++)
{
var o = operations[i];

k_KernelSize.ToString(), k_KernelSize.ToString()));
cck.AppendLine(string.Format(@"void {0}(uint2 dispatchThreadId : SV_DispatchThreadID)", kernelName));
cck.AppendLine("{");
cck.AppendLine(string.Format(" _Result{0}[dispatchThreadId] = SAMPLE_TEXTURE2D_LOD(_Source{1}, sampler_LinearClamp, float2(dispatchThreadId) * _Size.zw, 0.0).{2};",
cck.AppendLine(string.Format(" _Result{0}[dispatchThreadId] = LOAD_TEXTURE2D(_Source{1}, dispatchThreadId).{2};",
o.targetChannel.ToString(), o.sourceChannel.ToString(), o.subscript));
cck.AppendLine("}");
cck.AppendLine();

// CSharp method
csm.AppendLine(string.Format(@" public void SampleCopyChannel_{0}2{1}(CommandBuffer cmd, RenderTargetIdentifier source, RenderTargetIdentifier target, Vector2 size)", channelName, o.subscript));
csm.AppendLine(" {");
csm.AppendLine(" var s = new Vector4(size.x, size.y, 1f / size.x, 1f / size.y);");
csm.AppendLine(" cmd.SetComputeVectorParam(m_Shader, _Size, s);");
csm.AppendLine(string.Format(" cmd.SetComputeTextureParam(m_Shader, {0}, _Source{1}, source);", kernelIndexName, o.sourceChannel.ToString()));
csm.AppendLine(string.Format(" cmd.SetComputeTextureParam(m_Shader, {0}, _Result{1}, target);", kernelIndexName, o.targetChannel.ToString()));
csm.AppendLine(string.Format(" cmd.DispatchCompute(m_Shader, {0}, (int)Mathf.Max((size.x) / {1}, 1), (int)Mathf.Max((size.y) / {1}, 1), 1);", kernelIndexName, k_KernelSize.ToString()));

cc.AppendLine(@"// Autogenerated file. Do not edit by hand");
cc.AppendLine();
cc.AppendLine(@"#include ""../ShaderLibrary/Common.hlsl""");
cc.AppendLine();
cc.AppendLine(@"SamplerState sampler_LinearClamp;");
cc.AppendLine();
cc.AppendLine(@"CBUFFER_START(cb)");
cc.AppendLine(@" float4 _Size;");
cc.AppendLine(@"CBUFFER_END");
cc.AppendLine(ccp.ToString()); // Properties
cc.AppendLine(cck.ToString()); // Kernels

15
ScriptableRenderPipeline/Core/CoreRP/ShaderGenerator/ShaderGeneratorAttributes.cs


using System;
using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering
{

[AttributeUsage(AttributeTargets.Field)]
public class SurfaceDataAttributes : System.Attribute
{
public string displayName;
public string[] displayNames;
this.displayName = displayName;
displayNames = new string[1];
displayNames[0] = displayName;
this.isDirection = isDirection;
this.sRGBDisplay = sRGBDisplay;
}
// We allow users to add several names for one field, so user can override the auto behavior and do something else with the same data
// typical example is normal that you want to draw in view space or world space. So user can override view space case and do the transform.
public SurfaceDataAttributes(string[] displayName, bool isDirection = false, bool sRGBDisplay = false)
{
displayNames = displayName;
this.isDirection = isDirection;
this.sRGBDisplay = sRGBDisplay;
}

58
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


0, 0, 1, 0,
0, 0, 0, 1};
// Use case examples:
// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float4 ComputeClipSpaceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float4 ComputeClipSpacePosition(float2 positionNDC, float deviceDepth)
float4 positionCS = mul(clipSpaceTransform, float4(position, 1.0));
float4 positionCS = float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
// Our clip space is correct, but the NDC is flipped.
// Conceptually, it should be (positionNDC.y = 1.0 - positionNDC.y), but this is more efficient.
// Our world space, view space, screen space and NDC space are Y-up.
// Our clip space is flipped upside-down due to poor legacy Unity design.
// The flip is baked into the projection matrix, so we only have to flip
// manually when going from CS to NDC and back.
positionCS.y = -positionCS.y;
#endif

// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float2 ComputeNormalizedDeviceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float4 ComputeClipSpacePosition(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float4 positionCS = ComputeClipSpaceCoordinates(position, clipSpaceTransform);
return positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
return mul(clipSpaceTransform, float4(position, 1.0));
float4 ComputeClipSpacePosition(float2 positionNDC, float deviceDepth)
// Use case examples:
// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float3 ComputeNormalizedDeviceCoordinatesWithZ(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float4 positionCS = float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float4 positionCS = ComputeClipSpacePosition(position, clipSpaceTransform);
// Our clip space is correct, but the NDC is flipped.
// Conceptually, it should be (positionNDC.y = 1.0 - positionNDC.y), but this is more efficient.
// Our world space, view space, screen space and NDC space are Y-up.
// Our clip space is flipped upside-down due to poor legacy Unity design.
// The flip is baked into the projection matrix, so we only have to flip
// manually when going from CS to NDC and back.
return positionCS;
positionCS *= rcp(positionCS.w);
positionCS.xy = positionCS.xy * 0.5 + 0.5;
return positionCS.xyz;
}
// Use case examples:
// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float2 ComputeNormalizedDeviceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
{
return ComputeNormalizedDeviceCoordinatesWithZ(position, clipSpaceTransform).xy;
}
float3 ComputeViewSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invProjMatrix)

struct PositionInputs
{
float3 positionWS; // World space position (could be camera-relative)
float2 positionNDC; // Normalized screen UVs : [0, 1) (with the half-pixel offset)
uint2 positionSS; // Screen space pixel coordinates : [0, NumPixels)
uint2 tileCoord; // Screen tile coordinates : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1] (typically reversed)
float linearDepth; // View space Z coordinate : [Near, Far]
float2 positionNDC; // Normalized screen coordinates within the viewport : [0, 1) (with the half-pixel offset)
uint2 positionSS; // Screen space pixel coordinates : [0, NumPixels)
uint2 tileCoord; // Screen tile coordinates : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1] (typically reversed)
float linearDepth; // View space Z coordinate : [Near, Far]
};
// This function is use to provide an easy way to sample into a screen texture, either from a pixel or a compute shaders.

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/CommonLighting.hlsl


// Helper functions
//-----------------------------------------------------------------------------
// Ref: "Crafting a Next-Gen Material Pipeline for The Order: 1886".
float ClampNdotV(float NdotV)
{
return max(NdotV, 0.0001);
}
// Inputs: normalized normal and view vectors.
// Outputs: front-facing normal, and the new non-negative value of the cosine of the view angle.
// Important: call Orthonormalize() on the tangent and recompute the bitangent afterwards.

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Debug.hlsl


}
return GetDebugMipColor(originalColor, tex, texelSize, uv);
}
// mipInfo :
// x = quality setings minStreamingMipLevel
// y = original mip count for texture

{
float3 outColor = float3(1.0, 0.0, 1.0); // Can't calculate without original mip count - return magenta
{
float3 outColor = float3(1.0, 0.0, 1.0); // Can't calculate without original mip count - return magenta
uint originalTextureMipCount = uint(mipInfo.y);
if (originalTextureMipCount != 0)

2
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl


// Ref: Listing 18 in "Moving Frostbite to PBR" + https://knarkowicz.wordpress.com/2014/12/27/analytical-dfg-term-for-ibl/
real4 IntegrateGGXAndDisneyFGD(real3 V, real3 N, real roughness, uint sampleCount = 8192)
{
real NdotV = saturate(dot(N, V));
real NdotV = ClampNdotV(dot(N, V));
real4 acc = real4(0.0, 0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d
real2 randNum = InitRandom(V.xy * 0.5 + 0.5);

1
ScriptableRenderPipeline/Core/CoreRP/Shadow/Shadow.cs


m_Shadowmap.hideFlags = HideFlags.DontSaveInEditor | HideFlags.DontSaveInBuild;
m_Shadowmap.dimension = TextureDimension.Tex2DArray;
m_Shadowmap.volumeDepth = (int) m_Slices;
m_Shadowmap.name = CoreUtils.GetRenderTargetAutoName(shadowmap.width, shadowmap.height, shadowmap.format, "Shadow", mips : shadowmap.useMipMap);
m_ShadowmapId = new RenderTargetIdentifier( m_Shadowmap );
}

2
ScriptableRenderPipeline/Core/CoreRP/TextureCache.cs


using System;
using UnityEngine;
using System.Collections.Generic;
using UnityEngine.Rendering;

for (int m = 0; m < m_NumPanoMipLevels; m++)
{
m_StagingRTs[m] = new RenderTexture(Mathf.Max(1, panoWidthTop >> m), Mathf.Max(1, panoHeightTop >> m), 0, RenderTextureFormat.ARGBHalf) { hideFlags = HideFlags.HideAndDontSave };
m_StagingRTs[m].name = CoreUtils.GetRenderTargetAutoName(Mathf.Max(1, panoWidthTop >> m), Mathf.Max(1, panoHeightTop >> m), RenderTextureFormat.ARGBHalf, String.Format("PanaCache{0}", m));
}
if (m_CubeBlitMaterial)

2
ScriptableRenderPipeline/Core/package.json


{
"name": "com.unity.render-pipelines.core",
"description": "Core library for Unity render pipelines.",
"version": "0.1.28",
"version": "0.1.29",
"unity": "2018.1",
"dependencies": {
"com.unity.postprocessing": "0.1.8"

108
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Vector4 viewParam;
public PostProcessRenderContext postprocessRenderContext;
public Matrix4x4[] viewMatrixStereo;
public Matrix4x4[] projMatrixStereo;
// Non oblique projection matrix (RHS)
public Matrix4x4 nonObliqueProjMatrix
{

}
}
// This is the size actually used for this camera (as it can be altered by VR for example)
int m_ActualWidth;
int m_ActualHeight;
// This is the scale and bias of the camera viewport compared to the reference size of our Render Targets (RHandle.maxSize)
Vector2 m_CameraScaleBias;
// Current mssa sample
MSAASamples m_msaaSamples;
public int actualWidth { get { return m_ActualWidth; } }
public int actualHeight { get { return m_ActualHeight; } }
public Vector2 scaleBias { get { return m_CameraScaleBias; } }
public MSAASamples msaaSamples { get { return m_msaaSamples; } }
public Matrix4x4 viewProjMatrix
{
get { return projMatrix * viewMatrix; }

get { return nonJitteredProjMatrix * viewMatrix; }
}
public RenderTextureDescriptor renderTextureDesc { get; private set; }
public Matrix4x4 GetViewProjMatrixStereo(uint eyeIndex)
{
return (projMatrixStereo[eyeIndex] * viewMatrixStereo[eyeIndex]);
}
// Always true for cameras that just got added to the pool - needed for previous matrices to
// avoid one-frame jumps/hiccups with temporal effects (motion blur, TAA...)

public HDCamera(Camera cam)
{
camera = cam;
m_AdditionalCameraData = cam.GetComponent<HDAdditionalCameraData>();
frustum = new Frustum();
frustumPlaneEquations = new Vector4[6];
camera = cam;
frustum = new Frustum();
frustumPlaneEquations = new Vector4[6];
viewMatrixStereo = new Matrix4x4[2];
projMatrixStereo = new Matrix4x4[2];
m_AdditionalCameraData = cam.GetComponent<HDAdditionalCameraData>();
Reset();
}

m_LastFrameActive = Time.frameCount;
RenderTextureDescriptor tempDesc;
m_ActualWidth = camera.pixelWidth;
m_ActualHeight = camera.pixelHeight;
var screenWidth = m_ActualWidth;
var screenHeight = m_ActualHeight;
screenSize = new Vector4(XRSettings.eyeTextureWidth, XRSettings.eyeTextureHeight, 1.0f / XRSettings.eyeTextureWidth, 1.0f / XRSettings.eyeTextureHeight);
tempDesc = XRSettings.eyeTextureDesc;
}
else
{
screenSize = new Vector4(camera.pixelWidth, camera.pixelHeight, 1.0f / camera.pixelWidth, 1.0f / camera.pixelHeight);
tempDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
}
screenWidth = XRSettings.eyeTextureWidth;
screenHeight = XRSettings.eyeTextureHeight;
if (frameSettings.enableMSAA)
{
// this is already pre-validated to be a valid sample count by InitializeFrameSettings
var sampleCount = (int)frameSettings.msaaSampleCount;
tempDesc.msaaSamples = sampleCount;
var xrDesc = XRSettings.eyeTextureDesc;
m_ActualWidth = xrDesc.width;
m_ActualHeight = xrDesc.height;
ConfigureStereoMatrices();
else
// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
int maxWidth = RTHandle.maxWidth;
int maxHeight = RTHandle.maxHeight;
m_CameraScaleBias.x = (float)m_ActualWidth / maxWidth;
m_CameraScaleBias.y = (float)m_ActualHeight / maxHeight;
screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
}
void ConfigureStereoMatrices()
{
for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
tempDesc.msaaSamples = 1;
viewMatrixStereo[eyeIndex] = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);
projMatrixStereo[eyeIndex] = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
projMatrixStereo[eyeIndex] = GL.GetGPUProjectionMatrix(projMatrixStereo[eyeIndex], true);
tempDesc.depthBufferBits = 0;
tempDesc.autoGenerateMips = false;
tempDesc.useMipMap = false;
tempDesc.enableRandomWrite = false;
tempDesc.memoryless = RenderTextureMemoryless.None;
renderTextureDesc = tempDesc;
// TODO: Fetch the single cull matrix stuff
}
// Warning: different views can use the same camera!

cmd.SetGlobalVector(HDShaderIDs._ViewParam, viewParam);
cmd.SetGlobalVector(HDShaderIDs._InvProjParam, invProjParam);
cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, scaleBias);
}
public void SetupGlobalStereoParams(CommandBuffer cmd)
{
var invProjStereo = new Matrix4x4[2];
var invViewProjStereo = new Matrix4x4[2];
for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
{
var proj = projMatrixStereo[eyeIndex];
invProjStereo[eyeIndex] = proj.inverse;
var vp = proj * viewMatrixStereo[eyeIndex];
invViewProjStereo[eyeIndex] = vp.inverse;
}
// corresponds to UnityPerPassStereo
// TODO: Migrate the other stereo matrices to HDRP-managed UnityPerPassStereo?
cmd.SetGlobalMatrixArray(HDShaderIDs._InvProjMatrixStereo, invProjStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewProjMatrixStereo, invViewProjStereo);
}
// TODO: We should set all the value below globally and not let it under the control of Unity,

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugColorPicker.shader


{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
output.texcoord = GetFullScreenTriangleTexCoord(input.vertexID);
output.texcoord = GetNormalizedFullScreenTriangleTexCoord(input.vertexID);
return output;
}

{
if (_RequireToFlipInputTexture > 0.0)
{
input.texcoord.y = 1.0 - input.texcoord.y;
input.texcoord.y = 1.0 * _ScreenToTargetScale.y - input.texcoord.y;
}
float4 result = SAMPLE_TEXTURE2D(_DebugColorPickerTexture, sampler_DebugColorPickerTexture, input.texcoord);

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs


public float debugOverlayRatio = 0.33f;
public FullScreenDebugMode fullScreenDebugMode = FullScreenDebugMode.None;
public float fullscreenDebugMip = 0;
public float fullscreenDebugMip = 0.0f;
public MaterialDebugSettings materialDebugSettings = new MaterialDebugSettings();
public LightingDebugSettings lightingDebugSettings = new LightingDebugSettings();

DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, float>(kShadowMinValueDebug, () => lightingDebugSettings.shadowMinValue, (value) => lightingDebugSettings.shadowMinValue = (float)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, float>(kShadowMaxValueDebug, () => lightingDebugSettings.shadowMaxValue, (value) => lightingDebugSettings.shadowMaxValue = (float)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, int>(kFullScreenDebugMode, () => (int)fullScreenDebugMode, (value) => fullScreenDebugMode = (FullScreenDebugMode)value, DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.lightingFullScreenDebugStrings, DebugDisplaySettings.lightingFullScreenDebugValues));
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, float>(
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, uint>(
kFullScreenDebugMip,
() =>
{

break;
}
var size = Shader.GetGlobalVector(id);
var lodCount = Mathf.FloorToInt(Mathf.Log(Mathf.Min(size.x, size.y), 2f));
var lodCount = size.z;
return (uint)(fullscreenDebugMip * lodCount);
},

break;
}
var size = Shader.GetGlobalVector(id);
var lodCount = Mathf.Floor(Mathf.Log(Mathf.Min(size.x, size.y), 2f));
var lodCount = size.z;
fullscreenDebugMip = (float)Convert.ChangeType(value, typeof(Single)) / lodCount;
},
DebugItemFlag.None,

break;
}
var size = Shader.GetGlobalVector(id);
var lodCount = Mathf.FloorToInt(Mathf.Log(Mathf.Min(size.x, size.y), 2f));
var lodCount = size.z;
return (uint)lodCount;
})
);

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl


// Draw a signed integer
// Can't display more than 16 digit
void DrawInteger(int intValue, float3 fontColor, uint2 currentUnormCoord, inout uint2 fixedUnormCoord, bool flipY, inout float3 color)
// The two following parameter are for float representation
// leading0 is used when drawing frac part of a float to draw the leading 0 (call is in charge of it)
// forceNegativeSign is used to force to display a negative sign as -0 is not recognize
void DrawInteger(int intValue, float3 fontColor, uint2 currentUnormCoord, inout uint2 fixedUnormCoord, bool flipY, inout float3 color, int leading0, bool forceNegativeSign)
{
const uint maxStringSize = 16;

int numEntries = min((intValue == 0 ? 0 : log10(absIntValue)) + (intValue < 0 ? 1 : 0), maxStringSize);
int numEntries = min((intValue == 0 ? 0 : log10(absIntValue)) + ((intValue < 0 || forceNegativeSign) ? 1 : 0) + leading0, maxStringSize);
for (uint i = 0; i < maxStringSize; ++i)
for (uint j = 0; j < maxStringSize; ++j)
{
// Numeric value incurrent font start on the second row at 0
DrawCharacter((absIntValue % 10) + '0', fontColor, currentUnormCoord, fixedUnormCoord, flipY, color, -1);

}
// 4. Display sign
if (intValue < 0)
// 4. Display leading 0
for (int i = 0; i < leading0; ++i)
{
DrawCharacter('0', fontColor, currentUnormCoord, fixedUnormCoord, flipY, color, -1);
}
// 5. Display sign
if (intValue < 0 || forceNegativeSign)
// 5. Reset cursor at end location
// 6. Reset cursor at end location
}
void DrawInteger(int intValue, float3 fontColor, uint2 currentUnormCoord, inout uint2 fixedUnormCoord, bool flipY, inout float3 color)
{
DrawInteger(intValue, fontColor, currentUnormCoord, fixedUnormCoord, flipY, color, 0, false);
}
void DrawFloat(float floatValue, float3 fontColor, uint2 currentUnormCoord, inout uint2 fixedUnormCoord, bool flipY, inout float3 color)

else
{
int intValue = int(floatValue);
DrawInteger(intValue, fontColor, currentUnormCoord, fixedUnormCoord, flipY, color);
bool forceNegativeSign = floatValue >= 0.0f ? false : true;
DrawInteger(intValue, fontColor, currentUnormCoord, fixedUnormCoord, flipY, color, 0, forceNegativeSign);
int fracValue = int(frac(floatValue) * 1e6); // 6 digit
DrawInteger(fracValue, fontColor, currentUnormCoord, fixedUnormCoord, flipY, color);
int fracValue = int(frac(abs(floatValue)) * 1e6); // 6 digit
int leading0 = 6 - (int(log10(fracValue)) + 1); // Counting leading0 to add in front of the float
DrawInteger(fracValue, fontColor, currentUnormCoord, fixedUnormCoord, flipY, color, leading0, false);
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplayLatlong.shader


{
Pass
{
ZWrite Off
ZWrite On
ZTest Always
Blend Off
Cull Off

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader


{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
output.texcoord = GetFullScreenTriangleTexCoord(input.vertexID);
output.texcoord = GetNormalizedFullScreenTriangleTexCoord(input.vertexID);
return output;
}

{
if (_RequireToFlipInputTexture > 0.0)
{
input.texcoord.y = 1.0 - input.texcoord.y;
// Texcoord are already scaled by _ScreenToTargetScale but we need to account for the flip here anyway.
input.texcoord.y = 1.0 * _ScreenToTargetScale.y - input.texcoord.y;
}
// SSAO

if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_NAN_TRACKER)
{
float4 color = SAMPLE_TEXTURE2D(_DebugFullScreenTexture, s_point_clamp_sampler, input.texcoord);
if (AnyIsNan(color) || any(isinf(color)))
{
color = float4(1.0, 0.0, 0.0, 1.0);

{
arrow_coord.y = 1.0 - arrow_coord.y;
}
arrow_coord *= _ScreenToTargetScale.xy;
float2 mv_arrow = SampleMotionVectors(arrow_coord);

if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_DEFERRED_SHADOWS)
{
float4 color = SAMPLE_TEXTURE2D(_DebugFullScreenTexture, s_point_clamp_sampler, input.texcoord);
return float4(color.rgb, 0.0);
return float4(color.rrr, 0.0);
}
if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_PRE_REFRACTION_COLOR_PYRAMID
|| _FullScreenDebugMode == FULLSCREENDEBUGMODE_FINAL_COLOR_PYRAMID)

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebugPanel.cs


break;
}
default:
fullScreenDebugMipHandler.SetValue(0f);
fullScreenDebugMipHandler.SetValue(0);
break;
}

117
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/MaterialDebug.cs


}
// className include the additional "/"
void FillWithProperties(Type type, GUIContent[] debugViewMaterialStrings, int[] debugViewMaterialValues, string className, ref int index)
void FillWithProperties(Type type, ref List<GUIContent> debugViewMaterialStringsList, ref List<int> debugViewMaterialValuesList, string className)
{
var attributes = type.GetCustomAttributes(true);
// Get attribute to get the start number of the value for the enum

var localIndex = 0;
foreach (var field in fields)
{
var fieldName = field.Name;
// Note: One field can have multiple name. This is to allow to have different debug view mode for the same field
// like for example display normal in world space or in view space. Same field but two different modes.
List<String> displayNames = new List<string>();
displayNames.Add(field.Name);
if (propertyAttr[0].displayName != "")
if (propertyAttr[0].displayNames.Length > 0 && propertyAttr[0].displayNames[0] != "")
fieldName = propertyAttr[0].displayName;
displayNames.Clear();
displayNames.AddRange(propertyAttr[0].displayNames);
fieldName = className + fieldName;
debugViewMaterialStrings[index] = new GUIContent(fieldName);
debugViewMaterialValues[index] = attr.paramDefinesStart + (int)localIndex;
index++;
localIndex++;
foreach (string fieldName in displayNames)
{
debugViewMaterialStringsList.Add(new GUIContent(className + fieldName));
debugViewMaterialValuesList.Add(attr.paramDefinesStart + (int)localIndex);
localIndex++;
}
void FillWithPropertiesEnum(Type type, GUIContent[] debugViewMaterialStrings, int[] debugViewMaterialValues, string prefix, ref int index)
void FillWithPropertiesEnum(Type type, ref List<GUIContent> debugViewMaterialStringsList, ref List<int> debugViewMaterialValuesList, string prefix)
{
var names = Enum.GetNames(type);

var valueName = prefix + names[localIndex];
debugViewMaterialStrings[index] = new GUIContent(valueName);
debugViewMaterialValues[index] = (int)value;
index++;
debugViewMaterialStringsList.Add(new GUIContent(valueName));
debugViewMaterialValuesList.Add((int)value);
localIndex++;
}
}

materialItems.Add(item);
}
// Material properties debug
var num = typeof(Builtin.BuiltinData).GetFields().Length * materialList.Count // BuildtinData are duplicated for each material
+ numSurfaceDataFields + 1; // +1 for None case
// Init list
List<GUIContent> debugViewMaterialStringsList = new List<GUIContent>();
List<int> debugViewMaterialValuesList = new List<int>();
List<GUIContent> debugViewEngineStringsList = new List<GUIContent>();
List<int> debugViewEngineValuesList = new List<int>();
List<GUIContent> debugViewMaterialVaryingStringsList = new List<GUIContent>();
List<int> debugViewMaterialVaryingValuesList = new List<int>();
List<GUIContent> debugViewMaterialPropertiesStringsList = new List<GUIContent>();
List<int> debugViewMaterialPropertiesValuesList = new List<int>();
List<GUIContent> debugViewMaterialTextureStringsList = new List<GUIContent>();
List<int> debugViewMaterialTextureValuesList = new List<int>();
List<GUIContent> debugViewMaterialGBufferStringsList = new List<GUIContent>();
List<int> debugViewMaterialGBufferValuesList = new List<int>();
debugViewMaterialStrings = new GUIContent[num];
debugViewMaterialValues = new int[num];
// First element is a reserved location and should not be used (allow to track error)
debugViewMaterialStrings[0] = new GUIContent("None");
debugViewMaterialValues[0] = 0;
var index = 1;
// 0 is a reserved number and should not be used (allow to track error)
debugViewMaterialStringsList.Add(new GUIContent("None"));
debugViewMaterialValuesList.Add(0);
FillWithProperties(typeof(Builtin.BuiltinData), debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index);
FillWithProperties(item.surfaceDataType, debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index);
FillWithProperties(typeof(Builtin.BuiltinData), ref debugViewMaterialStringsList, ref debugViewMaterialValuesList, item.className);
FillWithProperties(item.surfaceDataType, ref debugViewMaterialStringsList, ref debugViewMaterialValuesList, item.className);
num = numBSDFDataFields + 1; // +1 for None case
debugViewEngineStrings = new GUIContent[num];
debugViewEngineValues = new int[num];
// 0 is a reserved number and should not be used (allow to track error)
debugViewEngineStrings[0] = new GUIContent("None");
debugViewEngineValues[0] = 0;
index = 1;
// First element is a reserved location and should not be used (allow to track error)
// Special case for None since it cannot be inferred from SurfaceData/BuiltinData
debugViewEngineStringsList.Add(new GUIContent("None"));
debugViewEngineValuesList.Add(0);
FillWithProperties(item.bsdfDataType, debugViewEngineStrings, debugViewEngineValues, item.className, ref index);
FillWithProperties(item.bsdfDataType, ref debugViewEngineStringsList, ref debugViewEngineValuesList, item.className);
// For the following, no need to reserve the 0 case as it is handled in the Enum
var varyingNames = Enum.GetNames(typeof(Attributes.DebugViewVarying));
debugViewMaterialVaryingStrings = new GUIContent[varyingNames.Length];
debugViewMaterialVaryingValues = new int[varyingNames.Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewVarying), debugViewMaterialVaryingStrings, debugViewMaterialVaryingValues, "", ref index);
FillWithPropertiesEnum(typeof(Attributes.DebugViewVarying), ref debugViewMaterialVaryingStringsList, ref debugViewMaterialVaryingValuesList, "");
var propertiesNames = Enum.GetNames(typeof(Attributes.DebugViewProperties));
debugViewMaterialPropertiesStrings = new GUIContent[propertiesNames.Length];
debugViewMaterialPropertiesValues = new int[propertiesNames.Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewProperties), debugViewMaterialPropertiesStrings, debugViewMaterialPropertiesValues, "", ref index);
FillWithPropertiesEnum(typeof(Attributes.DebugViewProperties), ref debugViewMaterialPropertiesStringsList, ref debugViewMaterialPropertiesValuesList, "");
var gbufferNames = Enum.GetNames(typeof(Attributes.DebugViewGbuffer));
debugViewMaterialGBufferStrings = new GUIContent[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length];
debugViewMaterialGBufferValues = new int[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewGbuffer), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index);
FillWithProperties(typeof(Lit.BSDFData), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index);
FillWithPropertiesEnum(typeof(Attributes.DebugViewGbuffer), ref debugViewMaterialGBufferStringsList, ref debugViewMaterialGBufferValuesList, "");
FillWithProperties(typeof(Lit.BSDFData), ref debugViewMaterialGBufferStringsList, ref debugViewMaterialGBufferValuesList, "");
// Convert to array for UI
debugViewMaterialStrings = debugViewMaterialStringsList.ToArray();
debugViewMaterialValues = debugViewMaterialValuesList.ToArray();
debugViewEngineStrings = debugViewEngineStringsList.ToArray();
debugViewEngineValues = debugViewEngineValuesList.ToArray();
debugViewMaterialVaryingStrings = debugViewMaterialVaryingStringsList.ToArray();
debugViewMaterialVaryingValues = debugViewMaterialVaryingValuesList.ToArray();
debugViewMaterialPropertiesStrings = debugViewMaterialPropertiesStringsList.ToArray();
debugViewMaterialPropertiesValues = debugViewMaterialPropertiesValuesList.ToArray();
debugViewMaterialTextureStrings = debugViewMaterialTextureStringsList.ToArray();
debugViewMaterialTextureValues = debugViewMaterialTextureValuesList.ToArray();
debugViewMaterialGBufferStrings = debugViewMaterialGBufferStringsList.ToArray();
debugViewMaterialGBufferValues = debugViewMaterialGBufferValuesList.ToArray();
isDebugViewMaterialInit = true;
}

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraEditor.cs


if (m_PreviewTexture != null)
m_PreviewTexture.Release();
var baseDesc = m_PreviewHDCamera.renderTextureDesc;
baseDesc.width = width;
baseDesc.height = height;
CoreUtils.UpdateRenderTextureDescriptor(ref baseDesc, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true);
m_PreviewTexture = new RenderTexture(baseDesc);
m_PreviewTexture = new RenderTexture(width, height, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_PreviewTexture.enableRandomWrite = true;
m_PreviewTexture.Create();
}
return m_PreviewTexture;
}

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Camera/HDCameraUI.cs


public static readonly CED.IDrawer SectionCaptureSettings = CED.FoldoutGroup(
"Capture Settings",
(s, p, o) => s.isSectionExpandedCaptureSettings,
true,
FoldoutOption.Indent,
CED.Action(Drawer_FieldOcclusionCulling),
CED.Action(Drawer_FieldNormalizedViewPort));

true,
FoldoutOption.Indent,
#if ENABLE_MULTIPLE_DISPLAYS
CED.Action(Drawer_SectionMultiDisplay),
#endif

public static readonly CED.IDrawer SectionXRSettings = CED.FadeGroup(
(s, d, o, i) => s.isSectionAvailableXRSettings,
false,
FadeOption.Animate,
true,
FoldoutOption.Indent,
false,
FadeOption.Animate,
CED.Select(
(s, d, o) => s.frameSettingsUI,
(s, d, o) => d.frameSettings,

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Decal/DecalMenuItems.cs


{
public class DecalMenuItems
{
[MenuItem("GameObject/Graphics/DecalProjector", priority = CoreUtils.gameObjectMenuPriority)]
[MenuItem("GameObject/Rendering/DecalProjector", priority = CoreUtils.gameObjectMenuPriority)]
static void CreateDecal(MenuCommand menuCommand)
{
// Create a custom game object

94
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/EditorRenderPipelineResources/ReflectionProbesPreview.shader


// Upgrade NOTE: replaced '_Object2World' with 'unity_ObjectToWorld'
Shader "Debug/ReflectionProbePreview"
Shader "Debug/ReflectionProbePreview"
{
Properties
{

SubShader
{
Tags{ "RenderType" = "Opaque" "Queue" = "Transparent" }
LOD 100
LOD 100
{
Name "ForwardUnlit"
Tags{ "LightMode" = "Forward" }
{
Name "ForwardUnlit"
Tags{ "LightMode" = "Forward" }
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#include "CoreRP/ShaderLibrary/common.hlsl"
#include "HDRP/ShaderVariables.hlsl"
struct appdata
{
float4 positionOS : POSITION;
float3 normalOS : NORMAL;
};
struct appdata
{
float4 vertex : POSITION;
float3 normal : NORMAL;
};
struct v2f
{
float4 positionCS : SV_POSITION;
float3 normalWS : NORMAL;
float3 positionWS : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float3 normal : NORMAL;
float3 worldpos : TEXCOORD0;
};
TEXTURECUBE(_Cubemap);
SAMPLER(sampler_Cubemap);
samplerCUBE _Cubemap;
float3 _CameraWorldPosition;
float _MipLevel;
float _Exposure;
float3 _CameraWorldPosition;
float _MipLevel;
float _Exposure;
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.worldpos = mul(unity_ObjectToWorld, v.vertex);
o.normal = mul(unity_ObjectToWorld, float4(v.normal, 0)).xyz;
return o;
}
v2f vert(appdata v)
{
v2f o;
// Transform local to world before custom vertex code
o.positionWS = TransformObjectToWorld(v.positionOS.xyz);
o.positionWS = GetCameraRelativePositionWS(o.positionWS);
o.positionCS = TransformWorldToHClip(o.positionWS);
o.normalWS = TransformObjectToWorldNormal(v.normalOS);
return o;
}
float4 frag(v2f i) : SV_Target
{
//float3 view = normalize(i.worldpos - _CameraWorldPosition);
float3 V = normalize(i.positionWS - GetPrimaryCameraPosition());
float3 R = reflect(V, i.normalWS);
float4 color = SAMPLE_TEXTURECUBE_LOD(_Cubemap, sampler_Cubemap, R, _MipLevel).rgba;
color = color * exp2(_Exposure);
float4 frag(v2f i) : SV_Target
{
//float3 view = normalize(i.worldpos - _CameraWorldPosition);
float3 view = normalize(i.worldpos - _WorldSpaceCameraPos);
float3 reflected = reflect(view, i.normal);
float4 col = texCUBElod(_Cubemap,float4(reflected,_MipLevel));
col = col*exp2(_Exposure);
return col;
}
ENDCG
}
return float4(color);
}
ENDHLSL
}
}
}

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs


}
}
[MenuItem("Assets/Create/Graphics/High Definition Render Pipeline Asset", priority = CoreUtils.assetCreateMenuPriority1)]
[MenuItem("Assets/Create/Rendering/High Definition Render Pipeline Asset", priority = CoreUtils.assetCreateMenuPriority1)]
static void CreateHDRenderPipeline()
{
var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon");

// General
newAsset.cameraMotionVectors = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CameraMotionVectors.shader");
newAsset.copyStencilBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyStencilBuffer.shader");
newAsset.copyDepthBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyDepthBuffer.shader");
newAsset.blit = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/Blit.shader");
// Sky

}
}
[MenuItem("Assets/Create/Graphics/High Definition Render Pipeline Resources", priority = CoreUtils.assetCreateMenuPriority1)]
[MenuItem("Assets/Create/Rendering/High Definition Render Pipeline Resources", priority = CoreUtils.assetCreateMenuPriority1)]
static void CreateRenderPipelineResources()
{
var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon");

62
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDRenderPipelineMenuItems.cs


}
}
[MenuItem("Internal/HDRenderPipeline/Update/Update material for subsurface")]
static void UpdateMaterialForSubsurface()
{
try
{
var matIds = AssetDatabase.FindAssets("t:Material");
for (int i = 0, length = matIds.Length; i < length; i++)
{
var path = AssetDatabase.GUIDToAssetPath(matIds[i]);
var mat = AssetDatabase.LoadAssetAtPath<Material>(path);
EditorUtility.DisplayProgressBar(
"Setup materials Keywords...",
string.Format("{0} / {1} materials subsurface updated.", i, length),
i / (float)(length - 1));
bool VSCEnabled = (UnityEditor.VersionControl.Provider.enabled && UnityEditor.VersionControl.Provider.isActive);
if (mat.shader.name == "HDRenderPipeline/LitTessellation" ||
mat.shader.name == "HDRenderPipeline/Lit" ||
mat.shader.name == "HDRenderPipeline/LayeredLit" ||
mat.shader.name == "HDRenderPipeline/LayeredLitTessellation")
{
float materialID = mat.GetInt("_MaterialID");
if (materialID != 0.0)
continue;
if (mat.HasProperty("_SSSAndTransmissionType"))
{
CheckOutFile(VSCEnabled, mat);
int materialSSSAndTransmissionID = mat.GetInt("_SSSAndTransmissionType");
// Both;, SSS only, Transmission only
if (materialSSSAndTransmissionID == 2.0)
{
mat.SetInt("_MaterialID", 5);
}
else
{
if (materialSSSAndTransmissionID == 0.0)
mat.SetFloat("_TransmissionEnable", 1.0f);
else
mat.SetFloat("_TransmissionEnable", 0.0f);
}
EditorUtility.SetDirty(mat);
}
}
}
}
finally
{
EditorUtility.ClearProgressBar();
}
}
//
[MenuItem("Internal/HDRenderPipeline/Update/Update Height Maps parametrization")]
static void UpdateHeightMapParametrization()

}
}
[MenuItem("GameObject/Graphics/Scene Settings", priority = CoreUtils.gameObjectMenuPriority)]
[MenuItem("GameObject/Rendering/Scene Settings", priority = CoreUtils.gameObjectMenuPriority)]
static void CreateCustomGameObject(MenuCommand menuCommand)
{
var sceneSettings = new GameObject("Scene Settings");

class DoCreateNewAssetDiffusionProfileSettings : DoCreateNewAsset<DiffusionProfileSettings> {}
[MenuItem("Assets/Create/Graphics/Diffusion profile Settings", priority = CoreUtils.assetCreateMenuPriority2)]
[MenuItem("Assets/Create/Rendering/Diffusion profile Settings", priority = CoreUtils.assetCreateMenuPriority2)]
static void MenuCreateDiffusionProfile()
{
var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon");

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs


if (m_LightShape != LightShape.Directional)
settings.DrawRange(false);
EditorGUI.BeginChangeCheck(); // For GI we need to detect any change on additional data and call SetLightDirty
// LightShape is HD specific, it need to drive LightType from the original LightType
// when it make sense, so the GI is still in sync with the light shape
switch (m_LightShape)

Debug.Assert(false, "Not implemented light type");
break;
}
if (EditorGUI.EndChangeCheck())
{
((Light)target).SetLightDirty(); // Should be apply only to parameter that's affect GI, but make the code cleaner
}
}
void DrawLightSettings()

settings.DrawBounceIntensity();
settings.DrawLightmapping();
EditorGUI.BeginChangeCheck(); // For GI we need to detect any change on additional data and call SetLightDirty
// No cookie with area light (maybe in future textured area light ?)
if (m_LightShape != LightShape.Rectangle && m_LightShape != LightShape.Line)

EditorGUILayout.PropertyField(m_AdditionalLightData.lightDimmer, s_Styles.lightDimmer);
EditorGUILayout.PropertyField(m_AdditionalLightData.applyRangeAttenuation, s_Styles.applyRangeAttenuation);
EditorGUI.indentLevel--;
}
if (EditorGUI.EndChangeCheck())
{
((Light)target).SetLightDirty(); // Should be apply only to parameter that's affect GI, but make the code cleaner
}
}

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/HDReflectionProbeUI.Drawers.cs


CED.Action(Drawer_ReflectionProbeMode),
CED.FadeGroup((s, p, o, i) => s.IsSectionExpandedMode((ReflectionProbeMode)i),
true,
FadeOption.Indent | FadeOption.Animate,
CED.noop, // Baked
CED.Action(Drawer_ModeSettingsRealtime), // Realtime
CED.Action(Drawer_ModeSettingsCustom) // Custom

public static readonly CED.IDrawer SectionInfluenceVolumeSettings = CED.FoldoutGroup(
"Influence volume settings",
(s, p, o) => s.isSectionExpandedInfluenceVolume,
true,
FoldoutOption.Indent,
false,
FadeOption.Animate,
CED.Action(Drawer_InfluenceBoxSettings), // Box
CED.Action(Drawer_InfluenceSphereSettings) // Sphere
)/*,

public static readonly CED.IDrawer SectionSeparateProjectionVolumeSettings = CED.FadeGroup(
(s, p, o, i) => s.isSectionExpandedSeparateProjection,
false,
FadeOption.Animate,
true,
FoldoutOption.Indent,
false,
FadeOption.Animate,
CED.Action(Drawer_ProjectionBoxSettings), // Box
CED.Action(Drawer_ProjectionSphereSettings) // Sphere
)

public static readonly CED.IDrawer SectionCaptureSettings = CED.FoldoutGroup(
"Capture settings",
(s, p, o) => s.isSectionExpandedCaptureSettings,
true,
FoldoutOption.Indent,
CED.Action(Drawer_CaptureSettings)
);

true,
FoldoutOption.Indent,
CED.Action(Drawer_AdditionalSettings)
);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Drawers.cs


public static readonly CED.IDrawer SectionFoldoutInfluenceSettings = CED.FoldoutGroup(
"Influence Settings",
(s, d, o) => s.isSectionExpandedInfluenceSettings,
true,
FoldoutOption.Indent,
CED.Action(Drawer_SectionInfluenceSettings)
);

SectionFoldoutCaptureSettings = CED.FoldoutGroup(
"Capture Settings",
(s, d, o) => s.isSectionExpandedCaptureSettings,
true,
FoldoutOption.Indent,
CED.Action(Drawer_SectionCaptureSettings),
CED.FadeGroup(
(s, d, o, i) =>

case 1: return s.isSectionExpandedCaptureStaticSettings;
}
},
false,
FadeOption.Animate,
SectionCaptureMirrorSettings,
SectionCaptureStaticSettings)
);

CED.FadeGroup(
(s, d, o, i) => s.IsSectionExpandedReflectionProbeMode((ReflectionProbeMode)i),
true,
FadeOption.Animate | FadeOption.Indent,
SectionProbeModeBakedSettings,
SectionProbeModeRealtimeSettings,
SectionProbeModeCustomSettings

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/Volume/InfluenceVolumeUI.Drawers.cs


CED.Action(Drawer_FieldShapeType),
CED.FadeGroup(
(s, d, o, i) => s.IsSectionExpanded_Shape((ShapeType)i),
true,
FadeOption.Animate | FadeOption.Indent,
SectionShapeBox,
SectionShapeSphere
)

CED.FoldoutGroup(
"Influence Volume",
(s, d, o) => s.isSectionExpandedShape,
true,
FoldoutOption.Indent,
false,
FadeOption.Animate,
SectionShapeBox,
SectionShapeSphere
)

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/Reflection/Volume/ProxyVolumeUI.cs


CED.Action(Drawer_FieldShapeType),
CED.FadeGroup(
(s, d, o, i) => s.IsSectionExpanded_Shape((ShapeType)i),
true,
FadeOption.Animate | FadeOption.Indent,
SectionShapeBox,
SectionShapeSphere
)

69
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/LayeredLit/LayeredLitUI.cs


public readonly GUIContent inheritBaseNormalText = new GUIContent("Normal influence", "Inherit the normal from the base layer.");
public readonly GUIContent inheritBaseHeightText = new GUIContent("Heightmap influence", "Inherit the height from the base layer.");
public readonly GUIContent inheritBaseColorText = new GUIContent("BaseColor influence", "Inherit the base color from the base layer.");
public readonly GUIContent heightOffset = new GUIContent("Height Offset", "Offset applied to the height before layering.");
public readonly GUIContent heightTransition = new GUIContent("Height Transition", "Size in world units of the smooth transition between layers.");
public readonly GUIContent perPixelDisplacementLayersWarning = new GUIContent("For pixel displacement to work correctly, all layers with a heightmap must use the same UV mapping");

const string kInheritBaseColor = "_InheritBaseColor";
// Height blend
MaterialProperty[] heightBlendOffset = new MaterialProperty[kMaxLayerCount];
const string kHeightBlendOffset = "_HeightOffset";
MaterialProperty heightTransition = null;
const string kHeightTransition = "_HeightTransition";

{
// Density/opacity mode
opacityAsDensity[i] = FindProperty(string.Format("{0}{1}", kOpacityAsDensity, i), props);
heightBlendOffset[i] = FindProperty(string.Format("{0}{1}", kHeightBlendOffset, i), props);
showLayer[i] = FindProperty(string.Format("{0}{1}", kShowLayer, i), props);
if (i != 0)

Material material = m_MaterialEditor.target as Material;
bool mainLayerInfluenceEnable = useMainLayerInfluence.floatValue > 0.0f;
bool heightBasedBlend = useHeightBasedBlend.floatValue > 0.0f;
EditorGUILayout.LabelField(styles.layeringOptionText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
// Main layer does not have any options but height base blend.
if (layerIndex > 0)
if(mainLayerInfluenceEnable) // This is the only case where we need this sub category.
int paramIndex = layerIndex - 1;
EditorGUILayout.LabelField(styles.layeringOptionText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(opacityAsDensity[layerIndex], styles.opacityAsDensityText);
// Main layer does not have any options but height base blend.
if (layerIndex > 0)
{
int paramIndex = layerIndex - 1;
if (mainLayerInfluenceEnable)
{
m_MaterialEditor.ShaderProperty(inheritBaseColor[paramIndex], styles.inheritBaseColorText);
m_MaterialEditor.ShaderProperty(inheritBaseNormal[paramIndex], styles.inheritBaseNormalText);
// Main height influence is only available if the shader use the heightmap for displacement (per vertex or per level)
// We always display it as it can be tricky to know when per pixel displacement is enabled or not
m_MaterialEditor.ShaderProperty(inheritBaseHeight[paramIndex], styles.inheritBaseHeightText);
m_MaterialEditor.ShaderProperty(opacityAsDensity[layerIndex], styles.opacityAsDensityText);
if (mainLayerInfluenceEnable)
{
m_MaterialEditor.ShaderProperty(inheritBaseColor[paramIndex], styles.inheritBaseColorText);
m_MaterialEditor.ShaderProperty(inheritBaseNormal[paramIndex], styles.inheritBaseNormalText);
// Main height influence is only available if the shader use the heightmap for displacement (per vertex or per level)
// We always display it as it can be tricky to know when per pixel displacement is enabled or not
m_MaterialEditor.ShaderProperty(inheritBaseHeight[paramIndex], styles.inheritBaseHeightText);
}
}
else
{
if (!useMainLayerInfluence.hasMixedValue && useMainLayerInfluence.floatValue != 0.0f)
else
m_MaterialEditor.TexturePropertySingleLine(styles.layerInfluenceMapMaskText, layerInfluenceMaskMap);
if (!useMainLayerInfluence.hasMixedValue && useMainLayerInfluence.floatValue != 0.0f)
{
m_MaterialEditor.TexturePropertySingleLine(styles.layerInfluenceMapMaskText, layerInfluenceMaskMap);
}
}
if (heightBasedBlend)
{
m_MaterialEditor.ShaderProperty(heightBlendOffset[layerIndex], styles.heightOffset);
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUI.indentLevel--;
EditorGUILayout.Space();
DoLayerGUI(material, layerIndex, true);

CoreUtils.SetKeyword(material, "_DENSITY_MODE", useDensityModeEnable);
BaseLitGUI.MaterialId materialId = (BaseLitGUI.MaterialId)material.GetFloat(kMaterialID);
BaseLitGUI.SSSAndTransmissionType sssAndTransmissionType = (BaseLitGUI.SSSAndTransmissionType)material.GetFloat(kSSSAndTransmissionType);
if (materialId == BaseLitGUI.MaterialId.LitSSSAndTransmission)
{
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSOnly);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitTransmissionOnly);
}
else
{
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", false);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", false);
}
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", materialId == BaseLitGUI.MaterialId.LitSSS);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", materialId == BaseLitGUI.MaterialId.LitTranslucent || (materialId == BaseLitGUI.MaterialId.LitSSS && material.GetFloat(kTransmissionEnable) > 0.0f));
}
public override void OnGUI(MaterialEditor materialEditor, MaterialProperty[] props)

43
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/BaseLitUI.cs


// Material ID
public static GUIContent materialIDText = new GUIContent("Material type", "Select a material feature to enable on top of regular material");
public static GUIContent SSSAndTransmissionTypeText = new GUIContent("SSS and Transmission type", "Subsurface Scattering for translucent materials such as skin, vegetation, fruit, marble, wax and milk., Transmission for back lighting");
public static GUIContent transmissionEnableText = new GUIContent("Enable Transmission", "Enable Transmission for getting back lighting");
// Per pixel displacement
public static GUIContent ppdMinSamplesText = new GUIContent("Minimum steps", "Minimum steps (texture sample) to use with per pixel displacement mapping");

public enum DoubleSidedNormalMode
{
Flip,
Mirror
Mirror,
None
}
public enum TessellationMode

public enum MaterialId
{
LitSSSAndTransmission = 0,
LitSSS = 0,
LitSpecular = 4
};
public enum SSSAndTransmissionType
{
LitSSSAndTransmission = 0,
LitSSSOnly = 1,
LitTransmissionOnly = 2,
LitSpecular = 4,
LitTranslucent = 5
};
public enum HeightmapParametrization

// Material ID
protected MaterialProperty materialID = null;
protected const string kMaterialID = "_MaterialID";
protected MaterialProperty sssAndTransmissionType = null;
protected const string kSSSAndTransmissionType = "_SSSAndTransmissionType";
protected MaterialProperty transmissionEnable = null;
protected const string kTransmissionEnable = "_TransmissionEnable";
protected const string kStencilRef = "_StencilRef";
protected const string kStencilWriteMask = "_StencilWriteMask";

// MaterialID
materialID = FindProperty(kMaterialID, props);
sssAndTransmissionType = FindProperty(kSSSAndTransmissionType, props);
transmissionEnable = FindProperty(kTransmissionEnable, props);
displacementMode = FindProperty(kDisplacementMode, props);
displacementLockObjectScale = FindProperty(kDisplacementLockObjectScale, props);

m_MaterialEditor.ShaderProperty(materialID, StylesBaseLit.materialIDText);
if ((int)materialID.floatValue == (int)BaseLitGUI.MaterialId.LitSSSAndTransmission)
if ((int)materialID.floatValue == (int)BaseLitGUI.MaterialId.LitSSS)
m_MaterialEditor.ShaderProperty(sssAndTransmissionType, StylesBaseLit.SSSAndTransmissionTypeText);
}
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(transmissionEnable, StylesBaseLit.transmissionEnableText);
EditorGUI.indentLevel--;
}
m_MaterialEditor.ShaderProperty(supportDBuffer, StylesBaseLit.supportDBufferText);

case DoubleSidedNormalMode.Flip: // Flip mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(-1.0f, -1.0f, -1.0f, 0.0f));
break;
case DoubleSidedNormalMode.None: // None mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(1.0f, 1.0f, 1.0f, 0.0f));
break;
if ((int)material.GetFloat(kMaterialID) == (int)BaseLitGUI.MaterialId.LitSSSAndTransmission)
if ((int)material.GetFloat(kMaterialID) == (int)BaseLitGUI.MaterialId.LitSSS)
if ((int)material.GetFloat(kSSSAndTransmissionType) == (int)BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission ||
(int)material.GetFloat(kSSSAndTransmissionType) == (int)BaseLitGUI.SSSAndTransmissionType.LitSSSOnly)
{
stencilRef = (int)StencilLightingUsage.SplitLighting;
}
stencilRef = (int)StencilLightingUsage.SplitLighting;
}
// As we tag both during velocity pass and Gbuffer pass we need a separate state and we need to use the write mask
material.SetInt(kStencilRef, stencilRef);

123
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Lit/LitUI.cs


// clear coat
coatMask = FindProperty(kCoatMask, props);
coatMaskMap = FindProperty(kCoatMaskMap, props);
coatMaskMap = FindProperty(kCoatMaskMap, props);
// Transparency
refractionMode = FindProperty(kRefractionMode, props, false);

diffusionProfileID[layerIndex].floatValue = profileID;
}
if ((int)sssAndTransmissionType.floatValue == (int)BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || (int)sssAndTransmissionType.floatValue == (int)BaseLitGUI.SSSAndTransmissionType.LitSSSOnly)
if ((int)materialID.floatValue == (int)BaseLitGUI.MaterialId.LitSSS)
if ((int)sssAndTransmissionType.floatValue == (int)BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || (int)sssAndTransmissionType.floatValue == (int)BaseLitGUI.SSSAndTransmissionType.LitTransmissionOnly)
if ((int)materialID.floatValue == (int)BaseLitGUI.MaterialId.LitTranslucent ||
((int)materialID.floatValue == (int)BaseLitGUI.MaterialId.LitSSS && transmissionEnable.floatValue > 0.0f))
{
m_MaterialEditor.TexturePropertySingleLine(Styles.thicknessMapText, thicknessMap[layerIndex]);
if (thicknessMap[layerIndex].textureValue != null)

switch ((BaseLitGUI.MaterialId)materialID.floatValue)
{
case BaseLitGUI.MaterialId.LitSSSAndTransmission:
case BaseLitGUI.MaterialId.LitSSS:
case BaseLitGUI.MaterialId.LitTranslucent:
ShaderSSSAndTransmissionInputGUI(material, layerIndex);
break;
case BaseLitGUI.MaterialId.LitStandard:

if (!isLayeredLit)
{
ShaderClearCoatInputGUI();
}
}
EditorGUILayout.Space();

EditorGUI.indentLevel++;
m_MaterialEditor.TexturePropertySingleLine(Styles.detailMapNormalText, detailMap[layerIndex]);
// When Planar or Triplanar is enable the UVDetail use the same mode, so we disable the choice on UVDetail
if (uvBaseMapping == UVBaseMapping.Planar)
if (material.GetTexture(isLayeredLit ? kDetailMap + layerIndex : kDetailMap))
EditorGUILayout.LabelField(Styles.UVDetailMappingText.text + ": Planar");
}
else if (uvBaseMapping == UVBaseMapping.Triplanar)
{
EditorGUILayout.LabelField(Styles.UVDetailMappingText.text + ": Triplanar");
}
else
{
m_MaterialEditor.ShaderProperty(UVDetail[layerIndex], Styles.UVDetailMappingText);
}
EditorGUI.indentLevel++;
// When Planar or Triplanar is enable the UVDetail use the same mode, so we disable the choice on UVDetail
if (uvBaseMapping == UVBaseMapping.Planar)
{
EditorGUILayout.LabelField(Styles.UVDetailMappingText.text + ": Planar");
}
else if (uvBaseMapping == UVBaseMapping.Triplanar)
{
EditorGUILayout.LabelField(Styles.UVDetailMappingText.text + ": Triplanar");
}
else
{
m_MaterialEditor.ShaderProperty(UVDetail[layerIndex], Styles.UVDetailMappingText);
}
// Setup the UVSet for detail, if planar/triplanar is use for base, it will override the mapping of detail (See shader code)
X = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV0) ? 1.0f : 0.0f;
Y = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV1) ? 1.0f : 0.0f;
Z = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV2) ? 1.0f : 0.0f;
W = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV3) ? 1.0f : 0.0f;
UVDetailsMappingMask[layerIndex].colorValue = new Color(X, Y, Z, W);
// Setup the UVSet for detail, if planar/triplanar is use for base, it will override the mapping of detail (See shader code)
X = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV0) ? 1.0f : 0.0f;
Y = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV1) ? 1.0f : 0.0f;
Z = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV2) ? 1.0f : 0.0f;
W = ((UVDetailMapping)UVDetail[layerIndex].floatValue == UVDetailMapping.UV3) ? 1.0f : 0.0f;
UVDetailsMappingMask[layerIndex].colorValue = new Color(X, Y, Z, W);
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(linkDetailsWithBase[layerIndex], Styles.linkDetailsWithBaseText);
EditorGUI.indentLevel--;
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(linkDetailsWithBase[layerIndex], Styles.linkDetailsWithBaseText);
EditorGUI.indentLevel--;
m_MaterialEditor.TextureScaleOffsetProperty(detailMap[layerIndex]);
if ((DisplacementMode)displacementMode.floatValue == DisplacementMode.Pixel && (UVDetail[layerIndex].floatValue != UVBase[layerIndex].floatValue))
{
if (material.GetTexture(kDetailMap + m_PropertySuffixes[layerIndex]))
EditorGUILayout.HelpBox(Styles.perPixelDisplacementDetailsWarning.text, MessageType.Warning);
m_MaterialEditor.TextureScaleOffsetProperty(detailMap[layerIndex]);
if ((DisplacementMode)displacementMode.floatValue == DisplacementMode.Pixel && (UVDetail[layerIndex].floatValue != UVBase[layerIndex].floatValue))
{
if (material.GetTexture(kDetailMap + m_PropertySuffixes[layerIndex]))
EditorGUILayout.HelpBox(Styles.perPixelDisplacementDetailsWarning.text, MessageType.Warning);
}
m_MaterialEditor.ShaderProperty(detailAlbedoScale[layerIndex], Styles.detailAlbedoScaleText);
m_MaterialEditor.ShaderProperty(detailNormalScale[layerIndex], Styles.detailNormalScaleText);
m_MaterialEditor.ShaderProperty(detailSmoothnessScale[layerIndex], Styles.detailSmoothnessScaleText);
EditorGUI.indentLevel--;
m_MaterialEditor.ShaderProperty(detailAlbedoScale[layerIndex], Styles.detailAlbedoScaleText);
m_MaterialEditor.ShaderProperty(detailNormalScale[layerIndex], Styles.detailNormalScaleText);
m_MaterialEditor.ShaderProperty(detailSmoothnessScale[layerIndex], Styles.detailSmoothnessScaleText);
EditorGUI.indentLevel--;
var surfaceTypeValue = (SurfaceType)surfaceType.floatValue;

}
m_MaterialEditor.TexturePropertySingleLine(Styles.emissiveText, emissiveColorMap, emissiveColor);
m_MaterialEditor.ShaderProperty(UVEmissive, Styles.UVBaseMappingText);
UVBaseMapping uvEmissiveMapping = (UVBaseMapping)UVEmissive.floatValue;
if (material.GetTexture(kEmissiveColorMap))
{
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(UVEmissive, Styles.UVBaseMappingText);
UVBaseMapping uvEmissiveMapping = (UVBaseMapping)UVEmissive.floatValue;
float X, Y, Z, W;
X = (uvEmissiveMapping == UVBaseMapping.UV0) ? 1.0f : 0.0f;
Y = (uvEmissiveMapping == UVBaseMapping.UV1) ? 1.0f : 0.0f;
Z = (uvEmissiveMapping == UVBaseMapping.UV2) ? 1.0f : 0.0f;
W = (uvEmissiveMapping == UVBaseMapping.UV3) ? 1.0f : 0.0f;
float X, Y, Z, W;
X = (uvEmissiveMapping == UVBaseMapping.UV0) ? 1.0f : 0.0f;
Y = (uvEmissiveMapping == UVBaseMapping.UV1) ? 1.0f : 0.0f;
Z = (uvEmissiveMapping == UVBaseMapping.UV2) ? 1.0f : 0.0f;
W = (uvEmissiveMapping == UVBaseMapping.UV3) ? 1.0f : 0.0f;
UVMappingMaskEmissive.colorValue = new Color(X, Y, Z, W);
UVMappingMaskEmissive.colorValue = new Color(X, Y, Z, W);
if ((uvEmissiveMapping == UVBaseMapping.Planar) || (uvEmissiveMapping == UVBaseMapping.Triplanar))
{
m_MaterialEditor.ShaderProperty(TexWorldScaleEmissive, Styles.texWorldScaleText);
}
if ((uvEmissiveMapping == UVBaseMapping.Planar) || (uvEmissiveMapping == UVBaseMapping.Triplanar))
{
m_MaterialEditor.ShaderProperty(TexWorldScaleEmissive, Styles.texWorldScaleText);
m_MaterialEditor.TextureScaleOffsetProperty(emissiveColorMap);
EditorGUI.indentLevel--;
m_MaterialEditor.TextureScaleOffsetProperty(emissiveColorMap);
m_MaterialEditor.ShaderProperty(emissiveIntensity, Styles.emissiveIntensityText);
m_MaterialEditor.ShaderProperty(albedoAffectEmissive, Styles.albedoAffectEmissiveText);

}
BaseLitGUI.MaterialId materialId = (BaseLitGUI.MaterialId)material.GetFloat(kMaterialID);
BaseLitGUI.SSSAndTransmissionType sssAndTransmissionType = (BaseLitGUI.SSSAndTransmissionType)material.GetFloat(kSSSAndTransmissionType);
if (materialId == BaseLitGUI.MaterialId.LitSSSAndTransmission)
{
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSOnly);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitSSSAndTransmission || sssAndTransmissionType == BaseLitGUI.SSSAndTransmissionType.LitTransmissionOnly);
}
else
{
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", false);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", false);
}
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_SUBSURFACE_SCATTERING", materialId == BaseLitGUI.MaterialId.LitSSS);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_TRANSMISSION", materialId == BaseLitGUI.MaterialId.LitTranslucent || (materialId == BaseLitGUI.MaterialId.LitSSS && material.GetFloat(kTransmissionEnable) > 0.0f));
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_ANISOTROPY", materialId == BaseLitGUI.MaterialId.LitAniso);
// No material Id for clear coat, just test the attribute

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Unlit/BaseUnlitUI.cs


// Disable all passes except distortion
// Distortion is setup in code above
material.SetShaderPassEnabled(HDShaderPassNames.s_ForwardStr, enablePass);
material.SetShaderPassEnabled(HDShaderPassNames.s_ForwardDebugDisplayStr, true);
material.SetShaderPassEnabled(HDShaderPassNames.s_ForwardOnlyDebugDisplayStr, true);
material.SetShaderPassEnabled(HDShaderPassNames.s_GBufferDebugDisplayStr, true);
material.SetShaderPassEnabled(HDShaderPassNames.s_TransparentBackfaceDebugDisplayStr, enablePass);
material.SetShaderPassEnabled(HDShaderPassNames.s_TransparentDepthPostpassStr, enablePass);
material.SetShaderPassEnabled(HDShaderPassNames.s_MetaStr, enablePass);
material.SetShaderPassEnabled(HDShaderPassNames.s_ShadowCasterStr, enablePass);

if (backFaceEnable)
{
material.SetShaderPassEnabled(HDShaderPassNames.s_TransparentBackfaceStr, true);
material.SetShaderPassEnabled(HDShaderPassNames.s_TransparentBackfaceDebugDisplayStr, true);
material.SetShaderPassEnabled(HDShaderPassNames.s_TransparentBackfaceDebugDisplayStr, false);
}
}

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/FrameSettingsUI.cs


public static CED.IDrawer SectionRenderingPasses = CED.FoldoutGroup(
"Rendering Passes",
(s, p, o) => s.isSectionExpandedRenderingPasses,
true,
FoldoutOption.Indent,
CED.LabelWidth(200, CED.Action(Drawer_SectionRenderingPasses))
);

true,
FoldoutOption.Indent,
false,
FadeOption.Animate,
true,
FadeOption.Animate | FadeOption.Indent,
CED.Action(Drawer_FieldRenderAlphaTestOnlyInDeferredPrepass))),
CED.Action(Drawer_SectionOtherRenderingSettings)
)

(s, d, o, i) => s.isSectionExpandedXRSupported,
false,
FadeOption.Animate,
true,
FoldoutOption.Indent,
true,
FoldoutOption.Indent,
CED.LabelWidth(250, CED.Action(Drawer_SectionLightingSettings)));
public AnimBool isSectionExpandedRenderingPasses { get { return m_AnimBools[0]; } }

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/LightLoopSettingsUI.cs


public static CED.IDrawer SectionLightLoopSettings = CED.FoldoutGroup(
"Light Loop Settings",
(s, p, o) => s.isSectionExpandedLightLoopSettings,
true,
FoldoutOption.Indent,
CED.LabelWidth(250, CED.Action(Drawer_SectionLightLoopSettings)));
public AnimBool isSectionExpandedLightLoopSettings { get { return m_AnimBools[0]; } }

if (EditorGUILayout.BeginFadeGroup(s.isSectionExpandedEnableTileAndCluster.faded))
{
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(p.isFptlEnabled, _.GetContent("Enable FPTL"));
EditorGUILayout.PropertyField(p.enableFptlForForwardOpaque, _.GetContent("Enable FPTL For Forward Opaque"));
EditorGUILayout.PropertyField(p.enableBigTilePrepass, _.GetContent("Enable Big Tile Prepass"));
EditorGUILayout.PropertyField(p.enableComputeLightEvaluation, _.GetContent("Enable Compute Light Evaluation"));

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/RenderPipelineSettingsUI.cs


EditorGUILayout.PropertyField(d.supportSSR, _.GetContent("Support SSR"));
EditorGUILayout.PropertyField(d.supportSSAO, _.GetContent("Support SSAO"));
EditorGUILayout.PropertyField(d.supportDBuffer, _.GetContent("Support Decal Buffer"));
EditorGUILayout.PropertyField(d.msaaSampleCount, _.GetContent("MSAA Sample Count"));
EditorGUILayout.PropertyField(d.supportMSAA, _.GetContent("Support Multi Sampling Anti-Aliasing"));
EditorGUILayout.PropertyField(d.MSAASampleCount, _.GetContent("MSAA Sample Count"));
EditorGUILayout.PropertyField(d.supportForwardOnly, _.GetContent("Support Forward Only"));
EditorGUILayout.PropertyField(d.supportMotionVectors, _.GetContent("Support Motion Vectors"));
EditorGUILayout.PropertyField(d.supportStereo, _.GetContent("Support Stereo Rendering"));
--EditorGUI.indentLevel;
}
}

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedRenderPipelineSettings.cs


public SerializedProperty supportSSR;
public SerializedProperty supportSSAO;
public SerializedProperty supportDBuffer;
public SerializedProperty msaaSampleCount;
public SerializedProperty supportMSAA;
public SerializedProperty MSAASampleCount;
public SerializedProperty supportForwardOnly;
public SerializedProperty supportMotionVectors;
public SerializedProperty supportStereo;
public SerializedGlobalLightLoopSettings lightLoopSettings;
public SerializedShadowInitParameters shadowInitParams;

supportSSR = root.Find((RenderPipelineSettings s) => s.supportSSR);
supportSSAO = root.Find((RenderPipelineSettings s) => s.supportSSAO);
supportDBuffer = root.Find((RenderPipelineSettings s) => s.supportDBuffer);
msaaSampleCount = root.Find((RenderPipelineSettings s) => s.msaaSampleCount);
supportMSAA = root.Find((RenderPipelineSettings s) => s.supportMSAA);
MSAASampleCount = root.Find((RenderPipelineSettings s) => s.msaaSampleCount);
supportForwardOnly = root.Find((RenderPipelineSettings s) => s.supportForwardOnly);
supportMotionVectors = root.Find((RenderPipelineSettings s) => s.supportMotionVectors);
supportStereo = root.Find((RenderPipelineSettings s) => s.supportStereo);
lightLoopSettings = new SerializedGlobalLightLoopSettings(root.Find((RenderPipelineSettings s) => s.lightLoopSettings));
shadowInitParams = new SerializedShadowInitParameters(root.Find((RenderPipelineSettings s) => s.shadowInitParams));

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs


PyramidDepth,
PostProcessing,
RenderDebug,
InitAndClearBuffer,
InitGBuffersAndClearDepthStencil,
ClearBuffers,
ClearDepthStencil,
ClearSSSDiffuseTarget,
ClearSSSFilteringTarget,
ClearAndCopyStencilTexture,

HDRenderPipelineRender,
CullResultsCull,
CopyDepthForSceneView,
// Profile sampler for tile pass
TPPrepareLightsForGPU,

725
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
文件差异内容过多而无法显示
查看文件

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.asset


enableBigTilePrepass: 1
isFptlEnabled: 1
renderPipelineSettings:
supportShadowMask: 1
supportShadowMask: 0
supportsForwardOnly: 0
supportMSAA: 0
supportMSAAAntiAliasing: 0
msaaSampleCount: 1
supportsMotionVectors: 1
supportsStereo: 0
lightLoopSettings:
spotCookieSize: 128
cookieTexArraySize: 16

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs


m_FrameSettings.CopyTo(m_FrameSettingsRuntime);
m_frameSettingsIsDirty = false;
// In Editor we can have plenty of camera that are not render at the same time as SceneView.
// It is really tricky to keep in sync with them. To have a coherent state. When a change is done
// on HDRenderPipelineAsset, we tag all camera as dirty so we are sure that they will get the
// correct default FrameSettings when the camera will be in the HDRenderPipeline.Render() call
// otherwise, as SceneView and Game camera are not in the same call Render(), Game camera that use default
// will not be update correctly.
#if UNITY_EDITOR
Camera[] cameras = Camera.allCameras;
foreach (Camera camera in cameras)
{
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
if (additionalCameraData)
{
// Call OnAfterDeserialize that set dirty on FrameSettings
additionalCameraData.OnAfterDeserialize();
}
}
#endif
}
}

55
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


// ShaderPass string - use to have consistent name through the code
public static readonly string s_EmptyStr = "";
public static readonly string s_ForwardStr = "Forward";
public static readonly string s_ForwardDebugDisplayStr = "ForwardDebugDisplay";
public static readonly string s_ForwardOnlyDebugDisplayStr = "ForwardOnlyDebugDisplay";
public static readonly string s_GBufferDebugDisplayStr = "GBufferDebugDisplay";
public static readonly string s_TransparentBackfaceDebugDisplayStr = "TransparentBackfaceDebugDisplay";
public static readonly string s_TransparentDepthPostpassStr = "TransparentDepthPostpass";
public static readonly string s_MetaStr = "Meta";
public static readonly string s_ShadowCasterStr = "ShadowCaster";

public static readonly ShaderPassName s_ForwardName = new ShaderPassName(s_ForwardStr);
public static readonly ShaderPassName s_ForwardDebugDisplayName = new ShaderPassName(s_ForwardDebugDisplayStr);
public static readonly ShaderPassName s_ForwardOnlyDebugDisplayName = new ShaderPassName(s_ForwardOnlyDebugDisplayStr);
public static readonly ShaderPassName s_GBufferDebugDisplayName = new ShaderPassName(s_GBufferDebugDisplayStr);
public static readonly ShaderPassName s_TransparentBackfaceDebugDisplayName = new ShaderPassName(s_TransparentBackfaceDebugDisplayStr);
public static readonly ShaderPassName s_TransparentDepthPostpassName = new ShaderPassName(s_TransparentDepthPostpassStr);
// Legacy name

public static readonly int g_mScrProjection = Shader.PropertyToID("g_mScrProjection");
public static readonly int g_mInvScrProjection = Shader.PropertyToID("g_mInvScrProjection");
public static readonly int g_iLog2NumClusters = Shader.PropertyToID("g_iLog2NumClusters");
public static readonly int g_screenSize = Shader.PropertyToID("g_screenSize");
public static readonly int g_iNumSamplesMSAA = Shader.PropertyToID("g_iNumSamplesMSAA");
public static readonly int g_fNearPlane = Shader.PropertyToID("g_fNearPlane");
public static readonly int g_fFarPlane = Shader.PropertyToID("g_fFarPlane");
public static readonly int g_fClustScale = Shader.PropertyToID("g_fClustScale");

public static readonly int _StencilRef = Shader.PropertyToID("_StencilRef");
public static readonly int _StencilCmp = Shader.PropertyToID("_StencilCmp");
public static readonly int _InputDepth = Shader.PropertyToID("_InputDepthTexture");
public static readonly int _SrcBlend = Shader.PropertyToID("_SrcBlend");
public static readonly int _DstBlend = Shader.PropertyToID("_DstBlend");

public static readonly int _DecalHTileTexture = Shader.PropertyToID("_DecalHTileTexture");
public static readonly int _ViewMatrix = Shader.PropertyToID("_ViewMatrix");
public static readonly int _InvViewMatrix = Shader.PropertyToID("_InvViewMatrix");

public static readonly int _ViewParam = Shader.PropertyToID("_ViewParam");
public static readonly int _InvProjParam = Shader.PropertyToID("_InvProjParam");
public static readonly int _ScreenSize = Shader.PropertyToID("_ScreenSize");
public static readonly int _ScreenToTargetScale = Shader.PropertyToID("_ScreenToTargetScale");
public static readonly int _InvProjMatrixStereo = Shader.PropertyToID("_InvProjMatrixStereo");
public static readonly int _InvViewProjMatrixStereo = Shader.PropertyToID("_InvViewProjMatrixStereo");
public static readonly int _DepthTexture = Shader.PropertyToID("_DepthTexture");
public static readonly int _CameraColorTexture = Shader.PropertyToID("_CameraColorTexture");
public static readonly int _CameraSssDiffuseLightingBuffer = Shader.PropertyToID("_CameraSssDiffuseLightingTexture");

public static readonly int _GaussianPyramidColorTexture = Shader.PropertyToID("_GaussianPyramidColorTexture");
public static readonly int _PyramidDepthTexture = Shader.PropertyToID("_PyramidDepthTexture");
public static readonly int _GaussianPyramidColorMipSize = Shader.PropertyToID("_GaussianPyramidColorMipSize");
public static readonly int[] _GaussianPyramidColorMips =
{
Shader.PropertyToID("_GaussianColorMip0"),
Shader.PropertyToID("_GaussianColorMip1"),
Shader.PropertyToID("_GaussianColorMip2"),
Shader.PropertyToID("_GaussianColorMip3"),
Shader.PropertyToID("_GaussianColorMip4"),
Shader.PropertyToID("_GaussianColorMip5"),
Shader.PropertyToID("_GaussianColorMip6"),
Shader.PropertyToID("_GaussianColorMip7"),
Shader.PropertyToID("_GaussianColorMip8"),
Shader.PropertyToID("_GaussianColorMip9"),
Shader.PropertyToID("_GaussianColorMip10"),
Shader.PropertyToID("_GaussianColorMip11"),
Shader.PropertyToID("_GaussianColorMip12"),
Shader.PropertyToID("_GaussianColorMip13"),
Shader.PropertyToID("_GaussianColorMip14"),
};
public static readonly int[] _DepthPyramidMips =
{
Shader.PropertyToID("_DepthPyramidMip0"),
Shader.PropertyToID("_DepthPyramidMip1"),
Shader.PropertyToID("_DepthPyramidMip2"),
Shader.PropertyToID("_DepthPyramidMip3"),
Shader.PropertyToID("_DepthPyramidMip4"),
Shader.PropertyToID("_DepthPyramidMip5"),
Shader.PropertyToID("_DepthPyramidMip6"),
Shader.PropertyToID("_DepthPyramidMip7"),
Shader.PropertyToID("_DepthPyramidMip8"),
Shader.PropertyToID("_DepthPyramidMip9"),
Shader.PropertyToID("_DepthPyramidMip10"),
Shader.PropertyToID("_DepthPyramidMip11"),
Shader.PropertyToID("_DepthPyramidMip12"),
Shader.PropertyToID("_DepthPyramidMip13"),
Shader.PropertyToID("_DepthPyramidMip14"),
};
public static readonly int _DebugColorPickerTexture = Shader.PropertyToID("_DebugColorPickerTexture");
public static readonly int _ColorPickerParam = Shader.PropertyToID("_ColorPickerParam");

public static readonly int _DebugFullScreenTexture = Shader.PropertyToID("_DebugFullScreenTexture");
public static readonly int _BlitTexture = Shader.PropertyToID("_BlitTexture");
public static readonly int _BlitScaleBias = Shader.PropertyToID("_BlitScaleBias");
public static readonly int _BlitMipLevel = Shader.PropertyToID("_BlitMipLevel");
public static readonly int _WorldScales = Shader.PropertyToID("_WorldScales");
public static readonly int _FilterKernels = Shader.PropertyToID("_FilterKernels");

197
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


using System;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

public const RendererConfiguration k_RendererConfigurationBakedLightingWithShadowMask = k_RendererConfigurationBakedLighting | RendererConfiguration.PerObjectOcclusionProbe | RendererConfiguration.PerObjectOcclusionProbeProxyVolume | RendererConfiguration.PerObjectShadowMask;
static public HDAdditionalReflectionData s_DefaultHDAdditionalReflectionData { get { return ComponentSingleton<HDAdditionalReflectionData>.instance; } }
static public HDAdditionalLightData s_DefaultHDAdditionalLightData { get { return ComponentSingleton<HDAdditionalLightData>.instance; } }
static public HDAdditionalCameraData s_DefaultHDAdditionalCameraData { get { return ComponentSingleton<HDAdditionalCameraData>.instance; } }
public static Material GetBlitMaterial()
{
HDRenderPipeline hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;
if (hdPipeline != null)
{
return hdPipeline.GetBlitMaterial();
}
return null;
}
public static RenderPipelineSettings hdrpSettings
{
get
{
HDRenderPipeline hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;
if (hdPipeline != null)
{
return hdPipeline.renderPipelineSettings;
}
return null;
}
}
static MaterialPropertyBlock s_PropertyBlock = new MaterialPropertyBlock();
public static List<RenderPipelineMaterial> GetRenderPipelineMaterialList()
{

// Transpose for HLSL.
return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
// This set of RenderTarget management methods is supposed to be used when rendering into a camera dependent render texture.
// This will automatically set the viewport based on the camera size and the RTHandle scaling info.
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
cmd.SetRenderTarget(buffer, miplevel, cubemapFace, depthSlice);
SetViewport(cmd, camera, buffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
SetRenderTarget(cmd, camera, buffer, clearFlag, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
SetRenderTarget(cmd, camera, colorBuffer, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
SetRenderTarget(cmd, camera, colorBuffer, depthBuffer, clearFlag, CoreUtils.clearColorAllBlack, miplevel, cubemapFace, depthSlice);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
{
CoreUtils.SetRenderTarget(cmd, colorBuffer, depthBuffer, miplevel, cubemapFace, depthSlice);
SetViewport(cmd, camera, colorBuffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer)
{
CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer, ClearFlag.None, CoreUtils.clearColorAllBlack);
SetViewport(cmd, camera, depthBuffer);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
{
CoreUtils.SetRenderTarget(cmd, colorBuffers, depthBuffer); // Don't clear here, viewport needs to be set before we do.
SetViewport(cmd, camera, depthBuffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, CoreUtils.clearColorAllBlack);
}
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
{
cmd.SetRenderTarget(colorBuffers, depthBuffer);
SetViewport(cmd, camera, depthBuffer);
CoreUtils.ClearRenderTarget(cmd, clearFlag, clearColor);
}
// Scaling viewport is done for auto-scaling render targets.
// In the context of HDRP, every auto-scaled RT is scaled against the maximum RTHandles reference size (that can only grow).
// When we render using a camera whose viewport is smaller than the RTHandles reference size (and thus smaller than the RT actual size), we need to set it explicitly (otherwise, native code will set the viewport at the size of the RT)
// For auto-scaled RTs (like for example a half-resolution RT), we need to scale this viewport accordingly.
// For non scaled RTs we just do nothing, the native code will set the viewport at the size of the RT anyway.
public static void SetViewport(CommandBuffer cmd, HDCamera camera, RTHandle target)
{
if (target.useScaling)
{
Debug.Assert(camera != null, "Missing HDCamera when setting up Render Target with auto-scale and Viewport.");
Vector2Int scaledViewportSize = target.GetScaledSize(new Vector2Int(camera.actualWidth, camera.actualHeight));
cmd.SetViewport(new Rect(0.0f, 0.0f, scaledViewportSize.x, scaledViewportSize.y));
}
}
public static void BlitTexture(CommandBuffer cmd, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
{
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);
s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevel);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 1 : 0, MeshTopology.Triangles, 3, 1, s_PropertyBlock);
}
// In the context of HDRP, the internal render targets used during the render loop are the same for all cameras, no matter the size of the camera.
// It means that we can end up rendering inside a partial viewport for one of these "camera space" rendering.
// In this case, we need to make sure than when we blit from one such camera texture to another, we only blit the necessary portion corresponding to the camera viewport.
// Here, both source and destination are camera-scaled.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
BlitTexture(cmd, source, destination, camera.scaleBias, mipLevel, bilinear);
}
// This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
BlitTexture(cmd, source, destination, scaleBias, mipLevel, bilinear);
}
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
{
SetRenderTarget(cmd, camera, destination);
cmd.SetViewport(destViewport);
BlitTexture(cmd, source, destination, camera.scaleBias, mipLevel, bilinear);
}
// This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RenderTargetIdentifier destination)
{
cmd.Blit(source, destination, new Vector2(camera.scaleBias.x, camera.scaleBias.y), Vector2.zero);
}
// This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandle destination)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);
cmd.SetGlobalTexture(HDShaderIDs._BlitTexture, source);
cmd.SetGlobalVector(HDShaderIDs._BlitScaleBias, new Vector4(1.0f, 1.0f, 0.0f, 0.0f));
cmd.SetGlobalFloat(HDShaderIDs._BlitMipLevel, 0.0f);
// Wanted to make things clean and not use SetGlobalXXX APIs but can't use MaterialPropertyBlock with RenderTargetIdentifier so YEY
//s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
//s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, camera.scaleBias);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), 0, MeshTopology.Triangles, 3, 1);
}
// These method should be used to render full screen triangles sampling auto-scaling RTs.
// This will set the proper viewport and UV scale.
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer);
commandBuffer.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, camera.scaleBias);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassId, MeshTopology.Triangles, 3, 1, properties);
}
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer, RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer, depthStencilBuffer);
commandBuffer.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, camera.scaleBias);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassId, MeshTopology.Triangles, 3, 1, properties);
}
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RenderTargetIdentifier[] colorBuffers, RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffers, depthStencilBuffer);
commandBuffer.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, camera.scaleBias);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassId, MeshTopology.Triangles, 3, 1, properties);
}
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RenderTargetIdentifier colorBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
CoreUtils.SetRenderTarget(commandBuffer, colorBuffer);
commandBuffer.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, camera.scaleBias);
commandBuffer.DrawProcedural(Matrix4x4.identity, material, shaderPassId, MeshTopology.Triangles, 3, 1, properties);
}
// Returns mouse coordinates: (x,y) in pixels and (z,w) normalized inside the render target (not the viewport)
public static Vector4 GetMouseCoordinates(HDCamera camera)
{
Vector2 mousePixelCoord = MousePositionDebug.instance.GetMousePosition(camera.screenSize.y);
return new Vector4(mousePixelCoord.x, mousePixelCoord.y, camera.scaleBias.x * mousePixelCoord.x / camera.screenSize.x, camera.scaleBias.y * mousePixelCoord.y / camera.screenSize.y);
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader


// the deferred shader will require to use multicompile.
#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl
#include "../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../Lighting/Lighting.hlsl" // This include Material.hlsl
//-------------------------------------------------------------------------------------

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/DeferredDirectionalShadow.compute


float sampleStep = ((i + 1) * step + step * dither);
// UVs for the current sample
float2 sampleUV = startUV + rayUV * sampleStep;
float2 sampleUV = (startUV + rayUV * sampleStep) * _ScreenToTargetScale.xy;
// Ray depth for this sample
float raySampleDepth = startDepth + rayDepth * sampleStep;

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs


public int pointCookieSize = 512;
public int cubeCookieTexArraySize = 16;
public int reflectionProbeCacheSize = 128;
public int planarReflectionProbeCacheSize = 128;
public int reflectionProbeCacheSize = 4;
public int planarReflectionProbeCacheSize = 1024;
public int reflectionCubemapSize = 128;
public int planarReflectionTextureSize = 128;
public bool reflectionCacheCompressed = false;

105
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


using System.Collections.Generic;
using UnityEngine.Experimental.Rendering.HDPipeline.Internal;
using UnityEngine.Rendering;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering.PostProcessing;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

{
public static int s_MaxNrBigTileLightsPlusOne = 512; // may be overkill but the footprint is 2 bits per pixel using uint16.
public static float s_ViewportScaleZ = 1.0f;
public static int s_UseLeftHandCameraSpace = 1;
public static int s_TileSizeFptl = 16;
public static int s_TileSizeClustered = 32;

static Texture2DArray s_DefaultTexture2DArray;
static Cubemap s_DefaultTextureCube;
static HDAdditionalReflectionData defaultHDAdditionalReflectionData { get { return ComponentSingleton<HDAdditionalReflectionData>.instance; } }
static HDAdditionalLightData defaultHDAdditionalLightData { get { return ComponentSingleton<HDAdditionalLightData>.instance; } }
static HDAdditionalCameraData defaultHDAdditionalCameraData { get { return ComponentSingleton<HDAdditionalCameraData>.instance; } }
PlanarReflectionProbeCache m_ReflectionPlanarProbeCache;
ReflectionProbeCache m_ReflectionProbeCache;

}
}
int GetNumTileFtplX(Camera camera)
int GetNumTileFtplX(HDCamera hdCamera)
return (camera.pixelWidth + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
int GetNumTileFtplY(Camera camera)
int GetNumTileFtplY(HDCamera hdCamera)
return (camera.pixelHeight + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeFptl - 1)) / LightDefinitions.s_TileSizeFptl;
int GetNumTileClusteredX(Camera camera)
int GetNumTileClusteredX(HDCamera hdCamera)
return (camera.pixelWidth + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return (hdCamera.actualWidth + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
int GetNumTileClusteredY(Camera camera)
int GetNumTileClusteredY(HDCamera hdCamera)
return (camera.pixelHeight + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
return (hdCamera.actualHeight + (LightDefinitions.s_TileSizeClustered - 1)) / LightDefinitions.s_TileSizeClustered;
}
public bool GetFeatureVariantsEnabled()

directionalLightData.positionWS = light.light.transform.position;
directionalLightData.color = GetLightColor(light);
// Caution: This is bad but if additionalData == defaultHDAdditionalLightData it mean we are trying to promote legacy lights, which is the case for the preview for example, so we need to multiply by PI as legacy Unity do implicit divide by PI for direct intensity.
// So we expect that all light with additionalData == defaultHDAdditionalLightData are currently the one from the preview, light in scene MUST have additionalData
directionalLightData.color *= (defaultHDAdditionalLightData == additionalData) ? Mathf.PI : 1.0f;
// Caution: This is bad but if additionalData == HDUtils.s_DefaultHDAdditionalLightData it mean we are trying to promote legacy lights, which is the case for the preview for example, so we need to multiply by PI as legacy Unity do implicit divide by PI for direct intensity.
// So we expect that all light with additionalData == HDUtils.s_DefaultHDAdditionalLightData are currently the one from the preview, light in scene MUST have additionalData
directionalLightData.color *= (HDUtils.s_DefaultHDAdditionalLightData == additionalData) ? Mathf.PI : 1.0f;
directionalLightData.diffuseScale = additionalData.affectDiffuse ? diffuseDimmer : 0.0f;
directionalLightData.specularScale = additionalData.affectSpecular ? specularDimmer : 0.0f;

Vector3 camPosWS = camera.transform.position;
// We need to properly reset this here otherwise if we go from 1 light to no visible light we would keep the old reference active.
m_CurrentSunLight = null;
m_CurrentSunLightShadowIndex = -1;
// Note: Light with null intensity/Color are culled by the C++, no need to test it here
if (cullResults.visibleLights.Count != 0 || cullResults.visibleReflectionProbes.Count != 0)
{

{
var light = cullResults.visibleLights[lightIndex];
// Light should always have additional data, however preview light right don't have, so we must handle the case by assigning defaultHDAdditionalLightData
// Light should always have additional data, however preview light right don't have, so we must handle the case by assigning HDUtils.s_DefaultHDAdditionalLightData
var additionalData = GetHDAdditionalLightData(light);
LightCategory lightCategory = LightCategory.Count;

// will be use...)
// The lightLoop is in charge, not the shadow pass.
// For now we will still apply the maximum of shadow here but we don't apply the sorting by priority + slot allocation yet
m_CurrentSunLight = null;
m_CurrentSunLightShadowIndex = -1;
// 2. Go through all lights, convert them to GPU format.
// Create simultaneously data for culling (LigthVolumeData and rendering)

m_enableBakeShadowMask = m_enableBakeShadowMask || IsBakedShadowMaskLight(light.light);
// Light should always have additional data, however preview light right don't have, so we must handle the case by assigning defaultHDAdditionalLightData
// Light should always have additional data, however preview light right don't have, so we must handle the case by assigning HDUtils.s_DefaultHDAdditionalLightData
var additionalLightData = GetHDAdditionalLightData(light);
var additionalShadowData = light.light.GetComponent<AdditionalShadowData>(); // Can be null

return (uint)logVolume << 20 | (uint)lightVolumeType << 17 | listType << 16 | ((uint)probeIndex & 0xFFFF);
}
void VoxelLightListGeneration(CommandBuffer cmd, Camera camera, Matrix4x4 projscr, Matrix4x4 invProjscr, RenderTargetIdentifier cameraDepthBufferRT)
void VoxelLightListGeneration(CommandBuffer cmd, HDCamera hdCamera, Matrix4x4 projscr, Matrix4x4 invProjscr, RenderTargetIdentifier cameraDepthBufferRT)
Camera camera = hdCamera.camera;
// clear atomic offset index
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, 1, 1, 1);

cmd.SetComputeMatrixParam(buildPerVoxelLightListShader, HDShaderIDs.g_mInvScrProjection, invProjscr);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs.g_iLog2NumClusters, k_Log2NumClusters);
cmd.SetComputeVectorParam(buildPerVoxelLightListShader, HDShaderIDs.g_screenSize, hdCamera.screenSize);
cmd.SetComputeIntParam(buildPerVoxelLightListShader, HDShaderIDs.g_iNumSamplesMSAA, (int)hdCamera.msaaSamples);
//Vector4 v2_near = invProjscr * new Vector4(0.0f, 0.0f, 0.0f, 1.0f);
//Vector4 v2_far = invProjscr * new Vector4(0.0f, 0.0f, 1.0f, 1.0f);

cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
var numTilesX = GetNumTileClusteredX(camera);
var numTilesY = GetNumTileClusteredY(camera);
var numTilesX = GetNumTileClusteredX(hdCamera);
var numTilesY = GetNumTileClusteredY(hdCamera);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, numTilesX, numTilesY, 1);
}

cmd.DispatchCompute(buildPerBigTileLightListShader, s_GenListPerBigTileKernel, numBigTilesX, numBigTilesY, 1);
}
var numTilesX = GetNumTileFtplX(camera);
var numTilesY = GetNumTileFtplY(camera);
var numTilesX = GetNumTileFtplX(hdCamera);
var numTilesY = GetNumTileFtplY(hdCamera);
var numTiles = numTilesX * numTilesY;
bool enableFeatureVariants = GetFeatureVariantsEnabled();

}
// Cluster
VoxelLightListGeneration(cmd, camera, projscr, invProjscr, cameraDepthBufferRT);
VoxelLightListGeneration(cmd, hdCamera, projscr, invProjscr, cameraDepthBufferRT);
if (enableFeatureVariants)
{

cmd.SetRenderTarget(BuiltinRenderTextureType.None);
BuildGPULightListsCommon(hdCamera, cmd, cameraDepthBufferRT, stencilTextureRT, skyEnabled);
PushGlobalParams(hdCamera.camera, cmd);
PushGlobalParams(hdCamera, cmd);
}
public GPUFence BuildGPULightListsAsyncBegin(HDCamera hdCamera, ScriptableRenderContext renderContext, RenderTargetIdentifier cameraDepthBufferRT, RenderTargetIdentifier stencilTextureRT, GPUFence startFence, bool skyEnabled)

return completeFence;
}
public void BuildGPULightListAsyncEnd(Camera camera, CommandBuffer cmd, GPUFence doneFence)
public void BuildGPULightListAsyncEnd(HDCamera hdCamera, CommandBuffer cmd, GPUFence doneFence)
PushGlobalParams(camera, cmd);
PushGlobalParams(hdCamera, cmd);
}
private void UpdateDataBuffers()

var add = probe.probe.GetComponent<HDAdditionalReflectionData>();
if (add == null)
{
add = defaultHDAdditionalReflectionData;
add = HDUtils.s_DefaultHDAdditionalReflectionData;
add.blendDistancePositive = Vector3.one * probe.blendDistance;
add.blendDistanceNegative = add.blendDistancePositive;
add.influenceShape = ShapeType.Box;

var add = light.light.GetComponent<HDAdditionalLightData>();
if (add == null)
{
add = defaultHDAdditionalLightData;
add = HDUtils.s_DefaultHDAdditionalLightData;
void PushGlobalParams(Camera camera, CommandBuffer cmd)
void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd)
Camera camera = hdCamera.camera;
// Shadows
m_ShadowMgr.SyncData();
m_ShadowMgr.BindResources(cmd, null, 0);

cmd.SetGlobalInt(HDShaderIDs._EnvLightCount, m_lightList.envLights.Count);
cmd.SetGlobalBuffer(HDShaderIDs._ShadowDatas, s_shadowDatas);
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplX, GetNumTileFtplX(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplY, GetNumTileFtplY(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplX, GetNumTileFtplX(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileFtplY, GetNumTileFtplY(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredX, GetNumTileClusteredX(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredY, GetNumTileClusteredY(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredX, GetNumTileClusteredX(hdCamera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredY, GetNumTileClusteredY(hdCamera));
if (m_FrameSettings.lightLoopSettings.enableBigTilePrepass)
cmd.SetGlobalBuffer(HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);

cmd.SetGlobalFloat(HDShaderIDs.g_fNearPlane, camera.nearClipPlane);
cmd.SetGlobalFloat(HDShaderIDs.g_fFarPlane, camera.farClipPlane);
cmd.SetGlobalInt(HDShaderIDs.g_iLog2NumClusters, k_Log2NumClusters);
cmd.SetGlobalInt(HDShaderIDs.g_isLogBaseBufferEnabled, k_UseDepthBuffer ? 1 : 0);
cmd.SetGlobalBuffer(HDShaderIDs.g_vLayeredOffsetsBuffer, s_PerVoxelOffset);

public bool outputSplitLighting;
}
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RenderTargetIdentifier deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
{
cmd.SetGlobalTexture(HDShaderIDs._DeferredShadowTexture, RuntimeUtilities.blackTexture);
}
using (new ProfilingSample(cmd, "Deferred Directional Shadow", CustomSamplerId.TPDeferredDirectionalShadow.GetSampler()))
{

cmd.SetComputeTextureParam(deferredDirectionalShadowComputeShader, kernel, HDShaderIDs._MainDepthTexture, depthTexture);
int deferredShadowTileSize = 16; // Must match DeferreDirectionalShadow.compute
int numTilesX = (hdCamera.camera.pixelWidth + (deferredShadowTileSize - 1)) / deferredShadowTileSize;
int numTilesY = (hdCamera.camera.pixelHeight + (deferredShadowTileSize - 1)) / deferredShadowTileSize;
int numTilesX = (hdCamera.actualWidth + (deferredShadowTileSize - 1)) / deferredShadowTileSize;
int numTilesY = (hdCamera.actualHeight + (deferredShadowTileSize - 1)) / deferredShadowTileSize;
cmd.SetGlobalTexture(HDShaderIDs._DeferredShadowTexture, deferredShadowRT);
}
}

}
cmd.SetComputeTextureParam(deferredComputeShader, kernel, HDShaderIDs._MainDepthTexture, depthTexture);
// TODO: Should remove this setting but can't remove it else get error: Property (_AmbientOcclusionTexture) at kernel index (32) is not set. Check why
cmd.SetComputeTextureParam(deferredComputeShader, kernel, HDShaderIDs._AmbientOcclusionTexture, HDShaderIDs._AmbientOcclusionTexture);
// TODO: Is it possible to setup this outside the loop ? Can figure out how, get this: Property (specularLightingUAV) at kernel index (21) is not set
cmd.SetComputeTextureParam(deferredComputeShader, kernel, HDShaderIDs.specularLightingUAV, colorBuffers[0]);

if (lightingDebug.tileClusterDebug != LightLoop.TileClusterDebug.None)
{
int w = hdCamera.camera.pixelWidth;
int h = hdCamera.camera.pixelHeight;
int w = hdCamera.actualWidth;
int h = hdCamera.actualHeight;
Vector2 mousePixelCoord = MousePositionDebug.instance.GetMousePosition(hdCamera.screenSize.y);
// Debug tiles
if (lightingDebug.tileClusterDebug == LightLoop.TileClusterDebug.MaterialFeatureVariants)

// featureVariants
m_DebugViewTilesMaterial.SetInt(HDShaderIDs._NumTiles, numTiles);
m_DebugViewTilesMaterial.SetInt(HDShaderIDs._ViewTilesFlags, (int)lightingDebug.tileClusterDebugByCategory);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, mousePixelCoord);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, HDUtils.GetMouseCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_TileList, s_TileList);
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_DispatchIndirectBuffer, s_DispatchIndirectBuffer);
m_DebugViewTilesMaterial.EnableKeyword("USE_FPTL_LIGHTLIST");

// lightCategories
m_DebugViewTilesMaterial.SetInt(HDShaderIDs._ViewTilesFlags, (int)lightingDebug.tileClusterDebugByCategory);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, mousePixelCoord);
m_DebugViewTilesMaterial.SetVector(HDShaderIDs._MousePixelCoord, HDUtils.GetMouseCoordinates(hdCamera));
m_DebugViewTilesMaterial.SetBuffer(HDShaderIDs.g_vLightListGlobal, bUseClustered ? s_PerVoxelLightLists : s_LightList);
m_DebugViewTilesMaterial.EnableKeyword(bUseClustered ? "USE_CLUSTERED_LIGHTLIST" : "USE_FPTL_LIGHTLIST");
m_DebugViewTilesMaterial.DisableKeyword(!bUseClustered ? "USE_CLUSTERED_LIGHTLIST" : "USE_FPTL_LIGHTLIST");

for (uint i = 0; i < faceCount; ++i)
{
m_ShadowMgr.DisplayShadow(cmd, index, i, x, y, overlaySize, overlaySize, lightingDebug.shadowMinValue, lightingDebug.shadowMaxValue);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.camera.pixelWidth);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}
}

HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.camera.pixelWidth);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs.hlsl


//
// UnityEngine.Experimental.Rendering.HDPipeline.LightDefinitions: static fields
//
#define MAX_NR_LIGHTS_PER_CAMERA (1024)
#define MAX_NR_BIG_TILE_LIGHTS_PLUS_ONE (512)
#define VIEWPORT_SCALE_Z (1)
#define USE_LEFT_HAND_CAMERA_SPACE (1)

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


if (cacheType == ENVCACHETYPE_TEXTURE2D)
{
//_Env2DCaptureVP is in capture space
float4 ndc = ComputeClipSpaceCoordinates(texCoord, _Env2DCaptureVP[index]);
ndc *= rcp(ndc.w);
ndc.xy = ndc.xy * 0.5 + 0.5;
float3 ndc = ComputeNormalizedDeviceCoordinatesWithZ(texCoord, _Env2DCaptureVP[index]);
color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, 0).rgb;
color.a = any(ndc.xyz < 0) || any(ndc.xyz > 1) ? 0.0 : 1.0;

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopSettings.cs


[Serializable]
public class LightLoopSettings
{
public static string kEnableFptlForForwardOpaque = "Enable Fptl for Forward Opaque";
public static string kEnableTileCluster = "Enable Tile/Cluster";
public static string kEnableBigTile = "Enable Big Tile";
public static string kEnableComputeLighting = "Enable Compute Lighting";

// When MSAA is enabled we disable Fptl as it become expensive compare to cluster
// In HD, MSAA is only supported for forward only rendering, no MSAA in deferred mode (for code complexity reasons)
aggregate.enableFptlForForwardOpaque = aggregate.enableFptlForForwardOpaque && !aggregateFrameSettings.enableMSAA;
// disable FPTL for stereo for now
aggregate.enableFptlForForwardOpaque = aggregate.enableFptlForForwardOpaque && !aggregateFrameSettings.enableStereo;
// If Deferred, enable Fptl. If we are forward renderer only and not using Fptl for forward opaque, disable Fptl
aggregate.isFptlEnabled = !aggregateFrameSettings.enableForwardRenderingOnly || aggregate.enableFptlForForwardOpaque;
}

DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableFptlForForwardOpaque, () => lightLoopSettings.enableFptlForForwardOpaque, (value) => lightLoopSettings.enableFptlForForwardOpaque = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableTileCluster, () => lightLoopSettings.enableTileAndCluster, (value) => lightLoopSettings.enableTileAndCluster = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableBigTile, () => lightLoopSettings.enableBigTilePrepass, (value) => lightLoopSettings.enableBigTilePrepass = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>(menuName, kEnableComputeLighting, () => lightLoopSettings.enableComputeLightEvaluation, (value) => lightLoopSettings.enableComputeLightEvaluation = (bool)value);

28
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute


float g_fNearPlane;
float g_fFarPlane;
int g_iLog2NumClusters; // numClusters = (1<<g_iLog2NumClusters)
float4 g_screenSize;
int g_iNumSamplesMSAA;
CBUFFER_END
#include "ClusteredUtils.hlsl"

uint2 tileIDX = u3GroupID.xy;
uint t=threadID;
uint iWidth;
uint iHeight;
#ifdef MSAA_ENABLED
uint iNumSamplesMSAA;
g_depth_tex.GetDimensions(iWidth, iHeight, iNumSamplesMSAA);
#else
g_depth_tex.GetDimensions(iWidth, iHeight);
#endif
uint nrTilesX = (iWidth+(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesY = (iHeight+(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesX = ((uint)g_screenSize.x +(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint nrTilesY = ((uint)g_screenSize.y +(TILE_SIZE_CLUSTERED-1))>>log2TileSize;
uint2 viTilUR = min( viTilLL+uint2(TILE_SIZE_CLUSTERED,TILE_SIZE_CLUSTERED), uint2(iWidth, iHeight) ); // not width and height minus 1 since viTilUR represents the end of the tile corner.
uint2 viTilUR = min( viTilLL+uint2(TILE_SIZE_CLUSTERED,TILE_SIZE_CLUSTERED), uint2(g_screenSize.x, g_screenSize.y) ); // not width and height minus 1 since viTilUR represents the end of the tile corner.
if(t==0)
{

for(int idx=t; idx<(TILE_SIZE_CLUSTERED*TILE_SIZE_CLUSTERED); idx+=NR_THREADS)
{
uint2 uPixCrd = min( uint2(viTilLL.x+(idx&(TILE_SIZE_CLUSTERED-1)), viTilLL.y+(idx>>log2TileSize)), uint2(iWidth-1, iHeight-1) );
uint2 uPixCrd = min( uint2(viTilLL.x+(idx&(TILE_SIZE_CLUSTERED-1)), viTilLL.y+(idx>>log2TileSize)), uint2(g_screenSize.x-1, g_screenSize.y-1) );
for(uint i=0; i<iNumSamplesMSAA; i++)
for(int i=0; i<g_iNumSamplesMSAA; i++)
{
const float fDpth = FetchDepthMSAA(g_depth_tex, uPixCrd, i);
#else

if(dpt_ma<=0.0) dpt_ma = VIEWPORT_SCALE_Z; // assume sky pixel
#endif
float2 vTileLL = float2(viTilLL.x/(float) iWidth, viTilLL.y/(float) iHeight);
float2 vTileUR = float2(viTilUR.x/(float) iWidth, viTilUR.y/(float) iHeight);
float2 vTileLL = float2(viTilLL.x/g_screenSize.x, viTilLL.y/g_screenSize.y);
float2 vTileUR = float2(viTilUR.x/g_screenSize.x, viTilUR.y/g_screenSize.y);
// build coarse list using AABB
#ifdef USE_TWO_PASS_TILED_LIGHTING

int iNrCoarseLights = min(lightOffs,MAX_NR_COARSE_ENTRIES);
#ifdef PERFORM_SPHERICAL_INTERSECTION_TESTS
iNrCoarseLights = SphericalIntersectionTests( t, iNrCoarseLights, float2(min(viTilLL.xy+uint2(TILE_SIZE_CLUSTERED/2,TILE_SIZE_CLUSTERED/2), uint2(iWidth-1, iHeight-1))) );
iNrCoarseLights = SphericalIntersectionTests( t, iNrCoarseLights, float2(min(viTilLL.xy+uint2(TILE_SIZE_CLUSTERED/2,TILE_SIZE_CLUSTERED/2), uint2(g_screenSize.x-1, g_screenSize.y-1))) );
#endif
#ifdef ENABLE_DEPTH_TEXTURE_BACKPLANE

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/PlanarReflectionProbeCache.cs


void Initialize()
{
if(m_TempRenderTexture == null)
if (m_TempRenderTexture == null)
m_TempRenderTexture.hideFlags = HideFlags.HideAndDontSave;
m_TempRenderTexture.name = CoreUtils.GetRenderTargetAutoName(m_ProbeSize, m_ProbeSize, RenderTextureFormat.ARGBHalf, "PlanarReflection", mips : true);
m_ConvolutionTargetTexture.hideFlags = HideFlags.HideAndDontSave;
m_ConvolutionTargetTexture.name = CoreUtils.GetRenderTargetAutoName(m_ProbeSize, m_ProbeSize, RenderTextureFormat.ARGBHalf, "PlanarReflectionConvolution", mips: true);
m_ConvolutionTargetTexture.Create();
m_ConvertTextureMaterial = CoreUtils.CreateEngineMaterial("Hidden/SRP/BlitCubeTextureFace");

public void Release()
{
if(m_TextureCache != null)
if (m_TextureCache != null)
if(m_TempRenderTexture != null)
if (m_TempRenderTexture != null)
if (m_ConvolutionTargetTexture != null)
{
m_ConvolutionTargetTexture.Release();
m_ConvolutionTargetTexture = null;
}
m_ProbeBakingState = null;
}

RenderTexture convolutionSourceTexture = null;
if (texture2D != null)
{
// if the size if different from the cache probe size or if the input texture format is compressed, we need to convert it
// 1) to a format for which we can generate mip maps
// if the size if different from the cache probe size or if the input texture format is compressed, we need to convert it
// 1) to a format for which we can generate mip maps
// 2) to the proper reflection probe cache size
var sizeMismatch = texture2D.width != m_ProbeSize || texture2D.height != m_ProbeSize;
var formatMismatch = texture2D.format != TextureFormat.RGBAHalf; // Temporary RT for convolution is always FP16

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ProbeWrapper.cs


class VisibleReflectionProbeWrapper : ProbeWrapper
{
static HDAdditionalReflectionData defaultHDAdditionalReflectionData { get { return ComponentSingleton<HDAdditionalReflectionData>.instance; } }
VisibleReflectionProbe probe;
HDAdditionalReflectionData additional;

var add = probe.probe.GetComponent<HDAdditionalReflectionData>();
if (add == null)
{
add = defaultHDAdditionalReflectionData;
add = HDUtils.s_DefaultHDAdditionalReflectionData;
add.blendDistancePositive = Vector3.one * probe.blendDistance;
add.blendDistanceNegative = add.blendDistancePositive;
add.influenceShape = ShapeType.Box;

20
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionProbeCache.cs


using UnityEngine.Rendering;
using System;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

{
// Temporary RT used for convolution and compression
m_TempRenderTexture = new RenderTexture(m_ProbeSize, m_ProbeSize, 1, RenderTextureFormat.ARGBHalf);
m_TempRenderTexture.hideFlags = HideFlags.HideAndDontSave;
m_TempRenderTexture.name = CoreUtils.GetRenderTargetAutoName(m_ProbeSize, m_ProbeSize, RenderTextureFormat.ARGBHalf, "PlanarReflection", mips : true);
m_ConvolutionTargetTexture.hideFlags = HideFlags.HideAndDontSave;
m_ConvolutionTargetTexture.name = CoreUtils.GetRenderTargetAutoName(m_ProbeSize, m_ProbeSize, RenderTextureFormat.ARGBHalf, "PlanarReflection", mips : true);
m_ConvolutionTargetTexture.Create();
m_ConvertTextureMaterial = CoreUtils.CreateEngineMaterial("Hidden/SRP/BlitCubeTextureFace");

public void Release()
{
if(m_TextureCache != null)
if (m_TextureCache != null)
if(m_TempRenderTexture != null)
if (m_TempRenderTexture != null)
}
if (m_ConvolutionTargetTexture != null)
{
m_ConvolutionTargetTexture.Release();
m_ConvolutionTargetTexture = null;
}
m_ProbeBakingState = null;
}

RenderTexture convolutionSourceTexture = null;
if (cubeTexture != null)
{
// if the size if different from the cache probe size or if the input texture format is compressed, we need to convert it
// 1) to a format for which we can generate mip maps
// if the size if different from the cache probe size or if the input texture format is compressed, we need to convert it
// 1) to a format for which we can generate mip maps
// 2) to the proper reflection probe cache size
bool sizeMismatch = cubeTexture.width != m_ProbeSize || cubeTexture.height != m_ProbeSize;
bool formatMismatch = cubeTexture.format != TextureFormat.RGBAHalf; // Temporary RT for convolution is always FP16

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline.Internal

public RenderTexture NewRenderTarget(PlanarReflectionProbe probe)
{
var desc = GetRenderHDCamera(probe).renderTextureDesc;
desc.width = m_Parameters.planarReflectionProbeSize;
desc.height = m_Parameters.planarReflectionProbeSize;
desc.colorFormat = RenderTextureFormat.ARGBHalf;
desc.useMipMap = true;
var rt = new RenderTexture(desc);
rt.name = "PlanarProbeRT " + probe.name;
var rt = new RenderTexture(m_Parameters.planarReflectionProbeSize, m_Parameters.planarReflectionProbeSize, 0, RenderTextureFormat.ARGBHalf);
// No hide and don't save for this one
rt.useMipMap = true;
rt.autoGenerateMips = false;
rt.name = CoreUtils.GetRenderTargetAutoName(m_Parameters.planarReflectionProbeSize, m_Parameters.planarReflectionProbeSize, RenderTextureFormat.ARGBHalf, "PlanarProbeRT");
rt.Create();
return rt;
}

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute


float3 totalRadiance = 0;
float opticalDepth = 0;
uint sliceCountHack = max(VBUFFER_SLICE_COUNT, (uint)_VBufferDepthEncodingParams.w); // Prevent unrolling...
#ifdef LIGHTLOOP_TILE_PASS
// Our voxel is not necessarily completely inside a single light cluster.
// Note that Z-binning can solve this problem, as we can iterate over all Z-bins

clusterDepths[0] = GetLightClusterMinLinearDepth(posInput.tileCoord, clusterIndices[0]);
#endif // LIGHTLOOP_TILE_PASS
#if defined(SHADER_API_METAL)
[fastopt]
for (uint slice = 0; slice < VBUFFER_SLICE_COUNT; slice++)
#else
uint sliceCountHack = max(VBUFFER_SLICE_COUNT, (uint)_VBufferDepthEncodingParams.w); // Prevent unrolling...
#endif
#if defined(SHADER_API_METAL)
float z1 = DecodeLogarithmicDepth(e1, _VBufferDepthDecodingParams);
#else
#endif
float t1 = ray.strataDirInvViewZ * z1; // Convert view space Z to distance along the stratified ray
float dt = t1 - t0;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


for (int i = 0; i < n; i++)
{
this.lightingRTEX[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
this.lightingRTEX[i].hideFlags = HideFlags.HideAndDontSave;
this.lightingRTEX[i].name = CoreUtils.GetRenderTargetAutoName(w, h, RenderTextureFormat.ARGBHalf, String.Format("Volumetric{0}", i));
this.lightingRTID[i] = new RenderTargetIdentifier(this.lightingRTEX[i]);
}
}

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs


return RenderTextureFormat.ARGB32;
}
public static RenderTextureReadWrite GetShadowMaskBufferReadWrite()
public static bool GetShadowMaskSRGBFlag()
return RenderTextureReadWrite.Linear;
return false;
}
public static RenderTextureFormat GetVelocityBufferFormat()

public static RenderTextureReadWrite GetVelocityBufferReadWrite()
public static bool GetVelocityBufferSRGBFlag()
return RenderTextureReadWrite.Linear;
return false;
}
public static RenderTextureFormat GetDistortionBufferFormat()

}
public static RenderTextureReadWrite GetDistortionBufferReadWrite()
public static bool GetDistortionBufferSRGBFlag()
return RenderTextureReadWrite.Linear;
return false;
}
}
}

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Builtin/BuiltinData.cs.hlsl


//
#define DEBUGVIEW_BUILTIN_BUILTINDATA_OPACITY (100)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_BAKE_DIFFUSE_LIGHTING (101)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK0 (102)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK1 (103)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK2 (104)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK3 (105)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_0 (102)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_1 (103)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_2 (104)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_3 (105)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_EMISSIVE_COLOR (106)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_EMISSIVE_INTENSITY (107)
#define DEBUGVIEW_BUILTIN_BUILTINDATA_VELOCITY (108)

result = builtindata.bakeDiffuseLighting;
needLinearToSRGB = true;
break;
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK0:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_0:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK1:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_1:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK2:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_2:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK3:
case DEBUGVIEW_BUILTIN_BUILTINDATA_SHADOW_MASK_3:
result = builtindata.shadowMask3.xxx;
break;
case DEBUGVIEW_BUILTIN_BUILTINDATA_EMISSIVE_COLOR:

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs


static public int GetMaterialDBufferCount() { return (int)DBufferMaterial.Count; }
static RenderTextureFormat[] m_RTFormat = { RenderTextureFormat.ARGB32, RenderTextureFormat.ARGB32, RenderTextureFormat.ARGB32 };
static RenderTextureReadWrite[] m_RTReadWrite = { RenderTextureReadWrite.sRGB, RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear };
static bool[] m_sRGBFlags= { true, false, false };
static public void GetMaterialDBufferDescription(out RenderTextureFormat[] RTFormat, out RenderTextureReadWrite[] RTReadWrite)
static public void GetMaterialDBufferDescription(out RenderTextureFormat[] RTFormat, out bool[] sRGBFlags)
RTReadWrite = m_RTReadWrite;
sRGBFlags = m_sRGBFlags;
}
}
}

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs.hlsl


// UnityEngine.Experimental.Rendering.HDPipeline.Decal+DecalSurfaceData: static fields
//
#define DEBUGVIEW_DECAL_DECALSURFACEDATA_BASE_COLOR (10000)
#define DEBUGVIEW_DECAL_DECALSURFACEDATA_NORMAL_WS (10001)
#define DEBUGVIEW_DECAL_DECALSURFACEDATA_NORMAL (10001)
#define DEBUGVIEW_DECAL_DECALSURFACEDATA_MASK (10002)
#define DEBUGVIEW_DECAL_DECALSURFACEDATA_HTILE_MASK (10003)

float4 baseColor;
float4 normalWS;
float4 mask;
int HTileMask;
uint HTileMask;
};
//

result = decalsurfacedata.baseColor.xyz;
needLinearToSRGB = true;
break;
case DEBUGVIEW_DECAL_DECALSURFACEDATA_NORMAL_WS:
case DEBUGVIEW_DECAL_DECALSURFACEDATA_NORMAL:
result = decalsurfacedata.normalWS.xyz;
break;
case DEBUGVIEW_DECAL_DECALSURFACEDATA_MASK:

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.hlsl


UNITY_INSTANCING_BUFFER_END(matrix)
RW_TEXTURE2D(float, _DecalHTile); // DXGI_FORMAT_R8_UINT is not supported by Unity
TEXTURE2D(_DecalHTileTexture);
// Must be in sync with RT declared in HDRenderPipeline.cs ::Rebuild
void EncodeIntoDBuffer( DecalSurfaceData surfaceData,

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DecalUtilities.hlsl


#include "Decal.hlsl"
DECLARE_DBUFFER_TEXTURE(_DBufferTexture);
DECLARE_DBUFFER_TEXTURE(_DBufferTexture);
void AddDecalContribution(uint2 unPositionSS, inout SurfaceData surfaceData)
{

FETCH_DBUFFER(DBuffer, _DBufferTexture, unPositionSS);
DecalSurfaceData decalSurfaceData;
DECODE_FROM_DBUFFER(DBuffer, decalSurfaceData);
uint mask = UnpackByte(_DecalHTile[unPositionSS / 8]);
uint mask = UnpackByte(LOAD_TEXTURE2D(_DecalHTileTexture, unPositionSS / 8).x);
// using alpha compositing https://developer.nvidia.com/gpugems/GPUGems3/gpugems3_ch23.html
if(mask & DBUFFERHTILEBIT_DIFFUSE)

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GGXConvolution/RuntimeFilterIBL.cs


m_GgxIblSampleData.autoGenerateMips = false;
m_GgxIblSampleData.enableRandomWrite = true;
m_GgxIblSampleData.filterMode = FilterMode.Point;
m_GgxIblSampleData.name = CoreUtils.GetRenderTargetAutoName(m_GgxIblMaxSampleCount, k_GgxIblMipCountMinusOne, RenderTextureFormat.ARGBHalf, "GGXIblSampleData");
m_GgxIblSampleData.Create();
m_ComputeGgxIblSampleDataCS.SetTexture(m_ComputeGgxIblSampleDataKernel, "output", m_GgxIblSampleData);

92
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLit.shader


_LayerInfluenceMaskMap("LayerInfluenceMaskMap", 2D) = "white" {}
[ToggleUI] _UseHeightBasedBlend("UseHeightBasedBlend", Float) = 0.0
_HeightOffset0("Height Offset0", Float) = 0
_HeightOffset1("Height Offset1", Float) = 0
_HeightOffset2("Height Offset2", Float) = 0
_HeightOffset3("Height Offset3", Float) = 0
_HeightTransition("Height Transition", Range(0, 1.0)) = 0.0
[ToggleUI] _UseDensityMode("Use Density mode", Float) = 0.0

[ToggleUI] _EnableBlendModePreserveSpecularLighting("Enable Blend Mode Preserve Specular Lighting", Float) = 1.0
[ToggleUI] _DoubleSidedEnable("Double sided enable", Float) = 0.0
[Enum(Flip, 0, Mirror, 1)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Flip, 0, Mirror, 1, None, 2)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Subsurface Scattering and Transmissison, 0, Standard, 1)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[Enum(Both, 0, SSS only, 1, Transmission only, 2)] _SSSAndTransmissionType("SSSandTransmissionType", Int) = 0 // SSSandTransmissionType.Both
[Enum(Subsurface Scattering, 0, Standard, 1, Translucent, 5)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[ToggleUI] _TransmissionEnable("_TransmissionEnable", Float) = 1.0
[Enum(None, 0, Vertex displacement, 1, Pixel displacement, 2)] _DisplacementMode("DisplacementMode", Int) = 0
[ToggleUI] _DisplacementLockObjectScale("displacement lock object scale", Float) = 1.0

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS_GBUFFER_BYPASS_ALPHA_TEST
#include "../../ShaderVariables.hlsl"
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"
#include "../../ShaderPass/ShaderPassGBuffer.hlsl"
ENDHLSL
}
Pass
{
Name "GBufferDebugDisplay" // Name is not used
Tags{ "LightMode" = "GBufferDebugDisplay" } // This will be only for opaque object based on the RenderQueue index
Cull [_CullMode]
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
HLSLPROGRAM
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_GBUFFER
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}
Pass
{
Name "ForwardDebugDisplay" // Name is not used
Tags{ "LightMode" = "ForwardDebugDisplay" } // This will be only for transparent object based on the RenderQueue index
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
Blend[_SrcBlend][_DstBlend]
ZWrite[_ZWrite]
Cull[_CullMode]
HLSLPROGRAM
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Lighting/Lighting.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitData.hlsl


// Returns layering blend mask after application of height based blend.
float4 ApplyHeightBlend(float4 heights, float4 blendMask)
{
// Add offsets for all the layers.
heights = heights + float4(_HeightOffset0, _HeightOffset1, _HeightOffset2, _HeightOffset3);
// We need to mask out inactive layers so that their height does not impact the result.
float4 maskedHeights = heights * blendMask.argb;

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitDataDisplacement.hlsl


// Since the result is used as a 'depthOffsetVS', it needs to be positive, so we flip the sign. { height = -height + 1 }.
float verticalDisplacement = maxHeight - height * maxHeight;
return verticalDisplacement / max(NdotV, 0.001);
return verticalDisplacement / ClampNdotV(NdotV);
#else
return 0.0;
#endif

98
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/LayeredLit/LayeredLitTessellation.shader


_LayerInfluenceMaskMap("LayerInfluenceMaskMap", 2D) = "white" {}
[ToggleUI] _UseHeightBasedBlend("UseHeightBasedBlend", Float) = 0.0
_HeightOffset0("Height Offset0", Float) = 0
_HeightOffset1("Height Offset1", Float) = 0
_HeightOffset2("Height Offset2", Float) = 0
_HeightOffset3("Height Offset3", Float) = 0
_HeightTransition("Height Transition", Range(0, 1.0)) = 0.0
[ToggleUI] _UseDensityMode("Use Density mode", Float) = 0.0

[ToggleUI] _EnableBlendModePreserveSpecularLighting("Enable Blend Mode Preserve Specular Lighting", Float) = 1.0
[ToggleUI] _DoubleSidedEnable("Double sided enable", Float) = 0.0
[Enum(Flip, 0, Mirror, 1)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Flip, 0, Mirror, 1, None, 2)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Subsurface Scattering and Transmissison, 0, Standard, 1)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[Enum(Both, 0, SSS only, 1, Transmission only, 2)] _SSSAndTransmissionType("SSSandTransmissionType", Int) = 0 // SSSandTransmissionType.Both
[Enum(Subsurface Scattering, 0, Standard, 1, Translucent, 5)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[ToggleUI] _TransmissionEnable("_TransmissionEnable", Float) = 1.0
[Enum(None, 0, Tessellation displacement, 3)] _DisplacementMode("DisplacementMode", Int) = 3
[ToggleUI] _DisplacementLockObjectScale("displacement lock object scale", Float) = 1.0

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS_GBUFFER_BYPASS_ALPHA_TEST
#include "../../ShaderVariables.hlsl"
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"
#include "../../ShaderPass/ShaderPassGBuffer.hlsl"
ENDHLSL
}
Pass
{
Name "GBufferDebugDisplay" // Name is not used
Tags{ "LightMode" = "GBufferDebugDisplay" } // This will be only for opaque object based on the RenderQueue index
Cull [_CullMode]
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
HLSLPROGRAM
#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_GBUFFER
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Material/Material.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}
Pass
{
Name "ForwardDebugDisplay" // Name is not used
Tags{ "LightMode" = "ForwardDebugDisplay" } // This will be only for transparent object based on the RenderQueue index
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
Blend[_SrcBlend][_DstBlend]
ZWrite[_ZWrite]
Cull[_CullMode]
HLSLPROGRAM
#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define DEBUG_DISPLAY
#ifdef DEBUG_DISPLAY
#endif
#include "../../Lighting/Lighting.hlsl"
#include "../Lit/ShaderPass/LitSharePass.hlsl"
#include "LayeredLitData.hlsl"

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs


using System;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline

[SurfaceDataAttributes("Specular Occlusion")]
public float specularOcclusion;
[SurfaceDataAttributes("Normal", true)]
[SurfaceDataAttributes(new string[]{"Normal", "Normal View Space"}, true)]
public Vector3 normalWS;
[SurfaceDataAttributes("Smoothness")]
public float perceptualSmoothness;

public float specularOcclusion;
[SurfaceDataAttributes("", true)]
[SurfaceDataAttributes(new string[] { "Normal WS", "Normal View Space" }, true)]
public Vector3 normalWS;
public float perceptualRoughness;

public override int GetMaterialGBufferCount() { return (int)GBufferMaterial.Count; }
RenderTextureFormat[] m_RTFormat4 = { RenderTextureFormat.ARGB32, RenderTextureFormat.ARGB32, RenderTextureFormat.ARGB32, RenderTextureFormat.RGB111110Float };
RenderTextureReadWrite[] m_RTReadWrite4 = { RenderTextureReadWrite.sRGB, RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear };
bool[] m_RTsRGBFlag4 = { true, false, false, false };
public override void GetMaterialGBufferDescription(out RenderTextureFormat[] RTFormat, out RenderTextureReadWrite[] RTReadWrite)
public override void GetMaterialGBufferDescription(out RenderTextureFormat[] RTFormat, out bool[] sRGBFlag)
RTReadWrite = m_RTReadWrite4;
sRGBFlag = m_RTsRGBFlag4;
}
//-----------------------------------------------------------------------------

m_InitPreFGD = CoreUtils.CreateEngineMaterial("Hidden/HDRenderPipeline/PreIntegratedFGD");
m_PreIntegratedFGD = new RenderTexture(128, 128, 0, RenderTextureFormat.ARGB2101010, RenderTextureReadWrite.Linear);
m_PreIntegratedFGD.hideFlags = HideFlags.HideAndDontSave;
m_PreIntegratedFGD.name = CoreUtils.GetRenderTargetAutoName(128, 128, RenderTextureFormat.ARGB2101010, "PreIntegratedFGD");
m_PreIntegratedFGD.Create();
m_LtcData = new Texture2DArray(k_LtcLUTResolution, k_LtcLUTResolution, 3, TextureFormat.RGBAHalf, false /*mipmap*/, true /* linear */)

84
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs.hlsl


#define DEBUGVIEW_LIT_SURFACEDATA_MATERIAL_FEATURES (1000)
#define DEBUGVIEW_LIT_SURFACEDATA_BASE_COLOR (1001)
#define DEBUGVIEW_LIT_SURFACEDATA_SPECULAR_OCCLUSION (1002)
#define DEBUGVIEW_LIT_SURFACEDATA_NORMAL_WS (1003)
#define DEBUGVIEW_LIT_SURFACEDATA_PERCEPTUAL_SMOOTHNESS (1004)
#define DEBUGVIEW_LIT_SURFACEDATA_AMBIENT_OCCLUSION (1005)
#define DEBUGVIEW_LIT_SURFACEDATA_METALLIC (1006)
#define DEBUGVIEW_LIT_SURFACEDATA_COAT_MASK (1007)
#define DEBUGVIEW_LIT_SURFACEDATA_SPECULAR_COLOR (1008)
#define DEBUGVIEW_LIT_SURFACEDATA_DIFFUSION_PROFILE (1009)
#define DEBUGVIEW_LIT_SURFACEDATA_SUBSURFACE_MASK (1010)
#define DEBUGVIEW_LIT_SURFACEDATA_THICKNESS (1011)
#define DEBUGVIEW_LIT_SURFACEDATA_TANGENT_WS (1012)
#define DEBUGVIEW_LIT_SURFACEDATA_ANISOTROPY (1013)
#define DEBUGVIEW_LIT_SURFACEDATA_THICKNESS_IRID (1014)
#define DEBUGVIEW_LIT_SURFACEDATA_IOR (1015)
#define DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_COLOR (1016)
#define DEBUGVIEW_LIT_SURFACEDATA_AT_DISTANCE (1017)
#define DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_MASK (1018)
#define DEBUGVIEW_LIT_SURFACEDATA_NORMAL (1003)
#define DEBUGVIEW_LIT_SURFACEDATA_NORMAL_VIEW_SPACE (1004)
#define DEBUGVIEW_LIT_SURFACEDATA_SMOOTHNESS (1005)
#define DEBUGVIEW_LIT_SURFACEDATA_AMBIENT_OCCLUSION (1006)
#define DEBUGVIEW_LIT_SURFACEDATA_METALLIC (1007)
#define DEBUGVIEW_LIT_SURFACEDATA_COAT_MASK (1008)
#define DEBUGVIEW_LIT_SURFACEDATA_SPECULAR_COLOR (1009)
#define DEBUGVIEW_LIT_SURFACEDATA_DIFFUSION_PROFILE (1010)
#define DEBUGVIEW_LIT_SURFACEDATA_SUBSURFACE_MASK (1011)
#define DEBUGVIEW_LIT_SURFACEDATA_THICKNESS (1012)
#define DEBUGVIEW_LIT_SURFACEDATA_TANGENT (1013)
#define DEBUGVIEW_LIT_SURFACEDATA_ANISOTROPY (1014)
#define DEBUGVIEW_LIT_SURFACEDATA_THICKNESS_IRID (1015)
#define DEBUGVIEW_LIT_SURFACEDATA_INDEX_OF_REFRACTION (1016)
#define DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_COLOR (1017)
#define DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_ABSORPTION_DISTANCE (1018)
#define DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_MASK (1019)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+BSDFData: static fields

#define DEBUGVIEW_LIT_BSDFDATA_FRESNEL0 (1032)
#define DEBUGVIEW_LIT_BSDFDATA_SPECULAR_OCCLUSION (1033)
#define DEBUGVIEW_LIT_BSDFDATA_NORMAL_WS (1034)
#define DEBUGVIEW_LIT_BSDFDATA_PERCEPTUAL_ROUGHNESS (1035)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_MASK (1036)
#define DEBUGVIEW_LIT_BSDFDATA_DIFFUSION_PROFILE (1037)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_MASK (1038)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS (1039)
#define DEBUGVIEW_LIT_BSDFDATA_USE_THICK_OBJECT_MODE (1040)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE (1041)
#define DEBUGVIEW_LIT_BSDFDATA_TANGENT_WS (1042)
#define DEBUGVIEW_LIT_BSDFDATA_BITANGENT_WS (1043)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_T (1044)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_B (1045)
#define DEBUGVIEW_LIT_BSDFDATA_ANISOTROPY (1046)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS_IRID (1047)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_ROUGHNESS (1048)
#define DEBUGVIEW_LIT_BSDFDATA_IOR (1049)
#define DEBUGVIEW_LIT_BSDFDATA_ABSORPTION_COEFFICIENT (1050)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE_MASK (1051)
#define DEBUGVIEW_LIT_BSDFDATA_NORMAL_VIEW_SPACE (1035)
#define DEBUGVIEW_LIT_BSDFDATA_PERCEPTUAL_ROUGHNESS (1036)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_MASK (1037)
#define DEBUGVIEW_LIT_BSDFDATA_DIFFUSION_PROFILE (1038)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_MASK (1039)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS (1040)
#define DEBUGVIEW_LIT_BSDFDATA_USE_THICK_OBJECT_MODE (1041)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE (1042)
#define DEBUGVIEW_LIT_BSDFDATA_TANGENT_WS (1043)
#define DEBUGVIEW_LIT_BSDFDATA_BITANGENT_WS (1044)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_T (1045)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_B (1046)
#define DEBUGVIEW_LIT_BSDFDATA_ANISOTROPY (1047)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS_IRID (1048)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_ROUGHNESS (1049)
#define DEBUGVIEW_LIT_BSDFDATA_IOR (1050)
#define DEBUGVIEW_LIT_BSDFDATA_ABSORPTION_COEFFICIENT (1051)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE_MASK (1052)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+GBufferMaterial: static fields

case DEBUGVIEW_LIT_SURFACEDATA_SPECULAR_OCCLUSION:
result = surfacedata.specularOcclusion.xxx;
break;
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL_WS:
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL:
case DEBUGVIEW_LIT_SURFACEDATA_PERCEPTUAL_SMOOTHNESS:
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL_VIEW_SPACE:
result = surfacedata.normalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_SURFACEDATA_SMOOTHNESS:
result = surfacedata.perceptualSmoothness.xxx;
break;
case DEBUGVIEW_LIT_SURFACEDATA_AMBIENT_OCCLUSION:

case DEBUGVIEW_LIT_SURFACEDATA_THICKNESS:
result = surfacedata.thickness.xxx;
break;
case DEBUGVIEW_LIT_SURFACEDATA_TANGENT_WS:
case DEBUGVIEW_LIT_SURFACEDATA_TANGENT:
result = surfacedata.tangentWS * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_SURFACEDATA_ANISOTROPY:

result = surfacedata.thicknessIrid.xxx;
break;
case DEBUGVIEW_LIT_SURFACEDATA_IOR:
case DEBUGVIEW_LIT_SURFACEDATA_INDEX_OF_REFRACTION:
case DEBUGVIEW_LIT_SURFACEDATA_AT_DISTANCE:
case DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_ABSORPTION_DISTANCE:
result = surfacedata.atDistance.xxx;
break;
case DEBUGVIEW_LIT_SURFACEDATA_TRANSMITTANCE_MASK:

result = bsdfdata.specularOcclusion.xxx;
break;
case DEBUGVIEW_LIT_BSDFDATA_NORMAL_WS:
result = bsdfdata.normalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_BSDFDATA_NORMAL_VIEW_SPACE:
result = bsdfdata.normalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_BSDFDATA_PERCEPTUAL_ROUGHNESS:

92
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


TEXTURE2D(_PyramidDepthTexture);
CBUFFER_START(UnityGaussianPyramidParameters)
float4 _GaussianPyramidColorMipSize;
float4 _GaussianPyramidColorMipSize; // (x,y) = PyramidToScreenScale, z = lodCount
float4 _PyramidDepthMipSize;
CBUFFER_END

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(surfaceData.normalWS) * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_SURFACEDATA_MATERIAL_FEATURES:
result = (surfaceData.materialFeatures.xxx) / 255.0; // Aloow to read with color picker debug mode
break;

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_BSDFDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(bsdfData.normalWS) * 0.5 + 0.5;
break;
case DEBUGVIEW_LIT_BSDFDATA_MATERIAL_FEATURES:
result = (bsdfData.materialFeatures.xxx) / 255.0; // Aloow to read with color picker debug mode
break;

// Precomputed lighting data to send to the various lighting functions
struct PreLightData
{
// General
float clampNdotV; // clamped NdotV
float NdotV; // Could be negative due to normal mapping, use ClampNdotV()
// GGX
float partLambdaV;

float3 iblR; // Dominant specular direction, used for IBL in EvaluateBSDF_Env()
float iblPerceptualRoughness;
float3 specularFGD; // Store preconvoled BRDF for both specular and diffuse
float diffuseFGD;
float3 specularFGD; // Store preconvoled BRDF for both specular and diffuse
float diffuseFGD;
float3x3 orthoBasisViewNormal; // Right-handed view-dependent orthogonal basis around the normal (6x VGPRs)
float3x3 ltcTransformDiffuse; // Inverse transformation for Lambertian or Disney Diffuse (4x VGPRs)
float3x3 ltcTransformSpecular; // Inverse transformation for GGX (4x VGPRs)
float3x3 orthoBasisViewNormal; // Right-handed view-dependent orthogonal basis around the normal (6x VGPRs)
float3x3 ltcTransformDiffuse; // Inverse transformation for Lambertian or Disney Diffuse (4x VGPRs)
float3x3 ltcTransformSpecular; // Inverse transformation for GGX (4x VGPRs)
float ltcMagnitudeDiffuse;
float3 ltcMagnitudeFresnel;

float coatIblF; // Fresnel term for view vector
float3x3 ltcTransformCoat; // Inverse transformation for GGX (4x VGPRs)
float coatIblF; // Fresnel term for view vector
float3x3 ltcTransformCoat; // Inverse transformation for GGX (4x VGPRs)
float3 transparentRefractV; // refracted view vector after exiting the shape
float3 transparentPositionWS; // start of the refracted ray after exiting the shape
float3 transparentTransmittance; // transmittance due to absorption
float transparentSSMipLevel; // mip level of the screen space gaussian pyramid for rough refraction
float3 transparentRefractV; // refracted view vector after exiting the shape
float3 transparentPositionWS; // start of the refracted ray after exiting the shape
float3 transparentTransmittance; // transmittance due to absorption
float transparentSSMipLevel; // mip level of the screen space gaussian pyramid for rough refraction
};
PreLightData GetPreLightData(float3 V, PositionInputs posInput, BSDFData bsdfData)

float3 N = bsdfData.normalWS;
float NdotV = saturate(dot(N, V));
preLightData.clampNdotV = NdotV; // Caution: The handling of edge cases where N is directed away from the screen is handled during Gbuffer/forward pass, so here do nothing
preLightData.NdotV = dot(N, V);
float NdotV = ClampNdotV(preLightData.NdotV);
if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
{

preLightData.ltcTransformSpecular._m00_m02_m11_m20 = SAMPLE_TEXTURE2D_ARRAY_LOD(_LtcData, s_linear_clamp_sampler, uv, LTC_GGX_MATRIX_INDEX, 0);
// Construct a right-handed view-dependent orthogonal basis around the normal
preLightData.orthoBasisViewNormal[0] = normalize(V - N * NdotV);
preLightData.orthoBasisViewNormal[0] = normalize(V - N * preLightData.NdotV); // Do not clamp NdotV here
preLightData.orthoBasisViewNormal[2] = N;
preLightData.orthoBasisViewNormal[1] = cross(preLightData.orthoBasisViewNormal[2], preLightData.orthoBasisViewNormal[0]);

{
float3 N = bsdfData.normalWS;
float NdotV = preLightData.clampNdotV;
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2.0 * LdotV + 2.0, FLT_EPS)); // invLenLV = rcp(length(L + V)) - caution about the case where V and L are opposite, it can happen, use max to avoid this
float NdotH = saturate((NdotL + NdotV) * invLenLV);
float LdotH = saturate(invLenLV * LdotV + invLenLV);
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2.0 * LdotV + 2.0, FLT_EPS)); // invLenLV = rcp(length(L + V)), clamp to avoid rsqrt(0) = NaN
float NdotH = saturate((NdotL + preLightData.NdotV) * invLenLV); // Do not clamp NdotV here
float LdotH = saturate(invLenLV * LdotV + invLenLV);
float NdotV = ClampNdotV(preLightData.NdotV);
float3 F = F_Schlick(bsdfData.fresnel0, LdotH);
float DV;

float3 H = (L + V) * invLenLV;
// For anisotropy we must not saturate these values
float TdotH = dot(bsdfData.tangentWS, H);
float TdotL = dot(bsdfData.tangentWS, L);

[branch] if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_TRANSMISSION))
{
// We use diffuse lighting for accumulation since it is going to be blurred during the SSS pass.
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, preLightData.clampNdotV, attenuation * lightData.diffuseScale);
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, ClampNdotV(preLightData.NdotV), attenuation * lightData.diffuseScale);
}
// Save ALU by applying light and cookie colors only once.

[branch] if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_TRANSMISSION))
{
// We use diffuse lighting for accumulation since it is going to be blurred during the SSS pass.
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, preLightData.clampNdotV, attenuation * lightData.diffuseScale);
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, ClampNdotV(preLightData.NdotV), attenuation * lightData.diffuseScale);
}
// Save ALU by applying light and cookie colors only once.

return lighting;
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BakeLightingData bakeLightingData)
{
if (lightData.lightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_SSLighting for screen space lighting
// ----------------------------------------------------------------------------

}
// Map the roughness to the correct mip map level of the color pyramid
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC, preLightData.transparentSSMipLevel).rgb;
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC * _GaussianPyramidColorMipSize.xy, preLightData.transparentSSMipLevel).rgb;
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transparentTransmittance;

#endif
return lighting;
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BakeLightingData bakeLightingData)
{
if (lightData.lightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------

#endif
float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(preLightData.clampNdotV, indirectAmbientOcclusion, roughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(preLightData.NdotV), indirectAmbientOcclusion, roughness);
// Try to mimic multibounce with specular color. Not the point of the original formula but ok result.
// Take the min of screenspace specular occlusion and visibility cone specular occlusion
#if GTAO_MULTIBOUNCE_APPROX

91
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.shader


[ToggleUI] _EnableBlendModePreserveSpecularLighting("Enable Blend Mode Preserve Specular Lighting", Float) = 1.0
[ToggleUI] _DoubleSidedEnable("Double sided enable", Float) = 0.0
[Enum(Flip, 0, Mirror, 1)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Flip, 0, Mirror, 1, None, 2)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[HideInInspector] _DoubleSidedConstants("_DoubleSidedConstants", Vector) = (1, 1, -1, 0)
[Enum(UV0, 0, UV1, 1, UV2, 2, UV3, 3, Planar, 4, Triplanar, 5)] _UVBase("UV Set for base", Float) = 0

// Following enum should be material feature flags (i.e bitfield), however due to Gbuffer encoding constrain many combination exclude each other
// so we use this enum as "material ID" which can be interpreted as preset of bitfield of material feature
// The only material feature flag that can be added in all cases is clear coat
[Enum(Subsurface Scattering and Transmissison, 0, Standard, 1, Anisotropy, 2, Iridescence, 3, Specular Color, 4)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[Enum(Both, 0, SSS only, 1, Transmission only, 2)] _SSSAndTransmissionType("SSSandTransmissionType", Int) = 0 // SSSandTransmissionType.Both
[Enum(Subsurface Scattering, 0, Standard, 1, Anisotropy, 2, Iridescence, 3, Specular Color, 4, Translucent, 5)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[ToggleUI] _TransmissionEnable("_TransmissionEnable", Float) = 1.0
[Enum(None, 0, Vertex displacement, 1, Pixel displacement, 2)] _DisplacementMode("DisplacementMode", Int) = 0
[ToggleUI] _DisplacementLockObjectScale("displacement lock object scale", Float) = 1.0

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS_GBUFFER_BYPASS_ALPHA_TEST
#include "../../ShaderVariables.hlsl"
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassGBuffer.hlsl"
ENDHLSL
}
Pass
{
Name "GBufferDebugDisplay" // Name is not used
Tags{ "LightMode" = "GBufferDebugDisplay" } // This will be only for opaque object based on the RenderQueue index
Cull [_CullMode]
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
HLSLPROGRAM
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_GBUFFER
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

HLSLPROGRAM
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}
Pass
{
Name "ForwardDebugDisplay" // Name is not used
Tags{ "LightMode" = "ForwardDebugDisplay" } // This will be only for transparent object based on the RenderQueue index
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
Blend[_SrcBlend][_DstBlend]
ZWrite[_ZWrite]
Cull[_CullModeForward]
HLSLPROGRAM
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitData.hlsl


float alpha = GetSurfaceData(input, layerTexCoord, surfaceData, normalTS, bentNormalTS);
GetNormalWS(input, V, normalTS, surfaceData.normalWS);
// Ensure that the normal is front-facing.
float NdotV;
surfaceData.normalWS = GetViewReflectedNormal(surfaceData.normalWS, V, NdotV);
// Use bent normal to sample GI if available
#ifdef _BENTNORMALMAP
GetNormalWS(input, V, bentNormalTS, bentNormalWS);

// If we have bent normal and ambient occlusion, process a specular occlusion
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAO(V, bentNormalWS, surfaceData);
#elif defined(_MASKMAP)
surfaceData.specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(NdotV, surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
surfaceData.specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(dot(surfaceData.normalWS, V)), surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
#else
surfaceData.specularOcclusion = 1.0;
#endif

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitDataDisplacement.hlsl


// Since POM "pushes" geometry inwards (rather than extrude it), { height = height - 1 }.
// Since the result is used as a 'depthOffsetVS', it needs to be positive, so we flip the sign.
float verticalDisplacement = maxHeight - height * maxHeight;
return verticalDisplacement / max(NdotV, 0.001);
return verticalDisplacement / ClampNdotV(NdotV);
#else
return 0.0;
#endif

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitReference.hlsl


uint sampleCount = 4096)
{
float3x3 localToWorld = float3x3(bsdfData.tangentWS, bsdfData.bitangentWS, bsdfData.normalWS);
float NdotV = preLightData.clampNdotV;
float NdotV = ClampNdotV(dot(bsdfData.normalWS, V));
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d

localToWorld = GetLocalFrame(bsdfData.normalWS);
}
float NdotV = preLightData.clampNdotV;
float NdotV = ClampNdotV(dot(bsdfData.normalWS, V));
float3 acc = float3(0.0, 0.0, 0.0);
// Add some jittering on Hammersley2d

97
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/LitTessellation.shader


[ToggleUI] _EnableBlendModePreserveSpecularLighting("Enable Blend Mode Preserve Specular Lighting", Float) = 1.0
[ToggleUI] _DoubleSidedEnable("Double sided enable", Float) = 0.0
[Enum(Flip, 0, Mirror, 1)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[Enum(Flip, 0, Mirror, 1, None, 2)] _DoubleSidedNormalMode("Double sided normal mode", Float) = 1
[HideInInspector] _DoubleSidedConstants("_DoubleSidedConstants", Vector) = (1, 1, -1, 0)
[Enum(UV0, 0, UV1, 1, UV2, 2, UV3, 3, Planar, 4, Triplanar, 5)] _UVBase("UV Set for base", Float) = 0

// Following enum should be material feature flags (i.e bitfield), however due to Gbuffer encoding constrain many combination exclude each other
// so we use this enum as "material ID" which can be interpreted as preset of bitfield of material feature
// The only material feature flag that can be added in all cases is clear coat
[Enum(Subsurface Scattering and Transmissison, 0, Standard, 1, Anisotropy, 2, Iridescence, 3, Specular Color, 4)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[Enum(Both, 0, SSS only, 1, Transmission only, 2)] _SSSAndTransmissionType("SSSandTransmissionType", Int) = 0 // SSSandTransmissionType.Both
[Enum(Subsurface Scattering, 0, Standard, 1, Anisotropy, 2, Iridescence, 3, Specular Color, 4, Translucent, 5)] _MaterialID("MaterialId", Int) = 1 // MaterialId.Standard
[ToggleUI] _TransmissionEnable("_TransmissionEnable", Float) = 1.0
[Enum(None, 0, Tessellation displacement, 3)] _DisplacementMode("DisplacementMode", Int) = 3
[ToggleUI] _DisplacementLockObjectScale("displacement lock object scale", Float) = 1.0

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS_GBUFFER_BYPASS_ALPHA_TEST
#include "../../ShaderVariables.hlsl"
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassGBuffer.hlsl"
ENDHLSL
}
Pass
{
Name "GBufferDebugDisplay" // Name is not used
Tags{ "LightMode" = "GBufferDebugDisplay" } // This will be only for opaque object based on the RenderQueue index
Cull [_CullMode]
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
HLSLPROGRAM
#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
#define DEBUG_DISPLAY
#define SHADERPASS SHADERPASS_GBUFFER
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#endif
#include "../../Material/Material.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}
Pass
{
Name "ForwardDebugDisplay" // Name is not used
Tags{ "LightMode" = "ForwardDebugDisplay" } // This will be only for transparent object based on the RenderQueue index
Stencil
{
WriteMask [_StencilWriteMask]
Ref [_StencilRef]
Comp Always
Pass Replace
}
Blend[_SrcBlend][_DstBlend]
ZWrite[_ZWrite]
Cull[_CullModeForward]
HLSLPROGRAM
#pragma hull Hull
#pragma domain Domain
#pragma multi_compile _ DEBUG_DISPLAY
#pragma multi_compile _ LIGHTMAP_ON
#pragma multi_compile _ DIRLIGHTMAP_COMBINED
#pragma multi_compile _ DYNAMICLIGHTMAP_ON

#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define DEBUG_DISPLAY
#ifdef DEBUG_DISPLAY
#endif
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/MaterialUtilities.hlsl


positionCS.xy = positionCS.xy / positionCS.w;
previousPositionCS.xy = previousPositionCS.xy / previousPositionCS.w;
return (positionCS.xy - previousPositionCS.xy);
float2 velocity = (positionCS.xy - previousPositionCS.xy);
#if UNITY_UV_STARTS_AT_TOP
velocity.y = -velocity.y;
#endif
return velocity;
#else
return float2(0.0, 0.0);
#endif

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/RenderPipelineMaterial.cs


{
// GBuffer management
public virtual int GetMaterialGBufferCount() { return 0; }
public virtual void GetMaterialGBufferDescription(out RenderTextureFormat[] RTFormat, out RenderTextureReadWrite[] RTReadWrite)
public virtual void GetMaterialGBufferDescription(out RenderTextureFormat[] RTFormat, out bool[] sRGBFlag)
RTReadWrite = null;
sRGBFlag = null;
}
// Regular interface

105
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


// Currently we only support SSSBuffer with one buffer. If the shader code change, it may require to update the shader manager
public const int k_MaxSSSBuffer = 1;
readonly int m_SSSBuffer0;
readonly RenderTargetIdentifier m_SSSBuffer0RT;
RenderTargetIdentifier[] m_ColorMRTs;
RenderTargetIdentifier[] m_RTIDs = new RenderTargetIdentifier[k_MaxSSSBuffer];
RTHandle[] m_ColorMRTs = new RTHandle[k_MaxSSSBuffer];
bool[] m_ExternalBuffer = new bool[k_MaxSSSBuffer];
// Disney SSS Model
ComputeShader m_SubsurfaceScatteringCS;

RenderTexture m_HTile;
RenderTargetIdentifier m_HTileRT;
RTHandle m_HTile;
// End Disney SSS Model
// Jimenez SSS Model

// Jimenez need an extra buffer and Disney need one for some platform
readonly int m_CameraFilteringBuffer;
readonly RenderTargetIdentifier m_CameraFilteringBufferRT;
RTHandle m_CameraFilteringBuffer;
// This is use to be able to read stencil value in compute shader
Material m_CopyStencilForSplitLighting;

m_SSSBuffer0 = HDShaderIDs._SSSBufferTexture[0];
m_SSSBuffer0RT = new RenderTargetIdentifier(m_SSSBuffer0);
// Use with Jimenez
m_CameraFilteringBuffer = HDShaderIDs._CameraFilteringBuffer;
m_CameraFilteringBufferRT = new RenderTargetIdentifier(m_CameraFilteringBuffer);
// In case of deferred, we must be in sync with SubsurfaceScattering.hlsl and lit.hlsl files and setup the correct buffers
// for SSS
public void InitSSSBuffersFromGBuffer(GBufferManager gbufferManager)
public void InitSSSBuffers(GBufferManager gbufferManager, RenderPipelineSettings settings)
m_RTIDs[0] = gbufferManager.GetGBuffers()[0]; // Note: This buffer must be sRGB (which is the case with Lit.shader)
}
// TODO: For MSAA, at least initially, we can only support Jimenez, because we can't create MSAA + UAV render targets.
if (settings.supportForwardOnly)
{
// In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
// TODO: Provide a way to reuse a render target
m_ColorMRTs[0] = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: true, name: "SSSBuffer");
m_ExternalBuffer[0] = false;
}
else
{
// In case of deferred, we must be in sync with SubsurfaceScattering.hlsl and lit.hlsl files and setup the correct buffers
m_ColorMRTs[0] = gbufferManager.GetBuffer(0); // Note: This buffer must be sRGB (which is the case with Lit.shader)
m_ExternalBuffer[0] = true;
}
// In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
// TODO: Provide a way to reuse a render target
public void InitSSSBuffers(RenderTextureDescriptor desc, CommandBuffer cmd)
{
m_RTIDs[0] = m_SSSBuffer0RT;
if (ShaderConfig.k_UseDisneySSS == 0 || NeedTemporarySubsurfaceBuffer())
{
// Caution: must be same format as m_CameraSssDiffuseLightingBuffer
m_CameraFilteringBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "SSSCameraFiltering"); // Enable UAV
}
desc.depthBufferBits = 0;
desc.colorFormat = RenderTextureFormat.ARGB32;
desc.sRGB = true; // Note: This buffer must be sRGB to match deferred case (which is the case with Lit.shader)
cmd.ReleaseTemporaryRT(m_SSSBuffer0);
cmd.GetTemporaryRT(m_SSSBuffer0, desc, FilterMode.Point);
// We use 8x8 tiles in order to match the native GCN HTile as closely as possible.
m_HTile = RTHandle.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "SSSHtile"); // Enable UAV
public RenderTargetIdentifier GetSSSBuffers(int index)
public RTHandle GetSSSBuffer(int index)
return m_RTIDs[index];
return m_ColorMRTs[index];
}
public void Build(HDRenderPipelineAsset hdAsset)

CoreUtils.Destroy(m_SssVerticalFilterPass);
CoreUtils.Destroy(m_SssHorizontalFilterAndCombinePass);
CoreUtils.Destroy(m_CopyStencilForSplitLighting);
}
public void Resize(HDCamera hdCamera)
{
CoreUtils.ResizeHTile(ref m_HTile, ref m_HTileRT, hdCamera.renderTextureDesc);
for (int i = 0; i < k_MaxSSSBuffer; ++i)
{
if (!m_ExternalBuffer[i])
{
RTHandle.Release(m_ColorMRTs[i]);
}
}
RTHandle.Release(m_CameraFilteringBuffer);
RTHandle.Release(m_HTile);
}
public void PushGlobalParams(CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings)

// Combines specular lighting and diffuse lighting with subsurface scattering.
public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings,
RenderTargetIdentifier colorBufferRT, RenderTargetIdentifier diffuseBufferRT, RenderTargetIdentifier depthStencilBufferRT, RenderTargetIdentifier depthTextureRT)
RTHandle colorBufferRT, RTHandle diffuseBufferRT, RTHandle depthStencilBufferRT, RTHandle depthTextureRT)
{
if (sssParameters == null || !frameSettings.enableSubsurfaceScattering)
return;

// For Jimenez we always need an extra buffer, for Disney it depends on platform
if (ShaderConfig.k_UseDisneySSS == 0 || NeedTemporarySubsurfaceBuffer())
{
// Caution: must be same format as m_CameraSssDiffuseLightingBuffer
cmd.ReleaseTemporaryRT(m_CameraFilteringBuffer);
if (frameSettings.enableMSAA)
CoreUtils.CreateCmdTemporaryRT(cmd, m_CameraFilteringBuffer, hdCamera.renderTextureDesc, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear); // no UAV
else
CoreUtils.CreateCmdTemporaryRT(cmd, m_CameraFilteringBuffer, hdCamera.renderTextureDesc, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
CoreUtils.SetRenderTarget(cmd, m_CameraFilteringBufferRT, ClearFlag.Color, CoreUtils.clearColorAllBlack);
HDUtils.SetRenderTarget(cmd, hdCamera, m_CameraFilteringBuffer, ClearFlag.Color, CoreUtils.clearColorAllBlack);
}
}

{
// Currently, Unity does not offer a way to access the GCN HTile even on PS4 and Xbox One.
// Therefore, it's computed in a pixel shader, and optimized to only contain the SSS bit.
CoreUtils.SetRenderTarget(cmd, m_HTileRT, ClearFlag.Color, CoreUtils.clearColorAllBlack);
HDUtils.SetRenderTarget(cmd, hdCamera, m_HTile, ClearFlag.Color, CoreUtils.clearColorAllBlack);
CoreUtils.SetRenderTarget(cmd, depthStencilBufferRT); // No need for color buffer here
HDUtils.SetRenderTarget(cmd, hdCamera, depthStencilBufferRT); // No need for color buffer here
CoreUtils.DrawFullScreen(cmd, m_CopyStencilForSplitLighting, null, 2);
cmd.ClearRandomWriteTargets();
}

cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._DepthTexture, depthTextureRT);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._SSSHTile, m_HTileRT);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._SSSHTile, m_HTile);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._SSSBufferTexture[i], GetSSSBuffers(i));
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._SSSBufferTexture[i], GetSSSBuffer(i));
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._CameraFilteringBuffer, m_CameraFilteringBufferRT);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._CameraFilteringBuffer, m_CameraFilteringBuffer);
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBufferRT); // Cannot set a RT on a material
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBuffer); // Cannot set a RT on a material
// Additively blend diffuse and specular lighting into 'm_CameraColorBufferRT'.
CoreUtils.DrawFullScreen(cmd, m_CombineLightingPass, colorBufferRT, depthStencilBufferRT);

{
for (int i = 0; i < sssBufferCount; ++i)
{
cmd.SetGlobalTexture(HDShaderIDs._SSSBufferTexture[i], GetSSSBuffers(i));
cmd.SetGlobalTexture(HDShaderIDs._SSSBufferTexture[i], GetSSSBuffer(i));
}
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, diffuseBufferRT); // Cannot set a RT on a material

CoreUtils.DrawFullScreen(cmd, m_SssVerticalFilterPass, m_CameraFilteringBufferRT, depthStencilBufferRT);
CoreUtils.DrawFullScreen(cmd, m_SssVerticalFilterPass, m_CameraFilteringBuffer, depthStencilBufferRT);
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBufferRT); // Cannot set a RT on a material
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraFilteringBuffer); // Cannot set a RT on a material
m_SssHorizontalFilterAndCombinePass.SetVectorArray(HDShaderIDs._FilterKernelsBasic, sssParameters.filterKernelsBasic);
m_SssHorizontalFilterAndCombinePass.SetVectorArray(HDShaderIDs._HalfRcpWeightedVariances, sssParameters.halfRcpWeightedVariances);
// Perform the horizontal SSS filtering pass, and combine diffuse and specular lighting into 'm_CameraColorBufferRT'.

25
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Unlit/Unlit.shader


HLSLPROGRAM
#define SHADERPASS SHADERPASS_DEPTH_ONLY
#include "../../ShaderVariables.hlsl"
#include "../../Material/Material.hlsl"
#include "ShaderPass/UnlitDepthPass.hlsl"
#include "UnlitData.hlsl"

HLSLPROGRAM
#define SHADERPASS SHADERPASS_FORWARD_UNLIT
#include "../../Material/Material.hlsl"
#include "ShaderPass/UnlitSharePass.hlsl"
#include "UnlitData.hlsl"
#include "../../ShaderPass/ShaderPassForwardUnlit.hlsl"
#pragma multi_compile _ DEBUG_DISPLAY
ENDHLSL
}
#ifdef DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#endif
Pass
{
Name "ForwardDebugDisplay"
Tags { "LightMode" = "ForwardOnlyDebugDisplay" }
Blend [_SrcBlend] [_DstBlend]
ZWrite [_ZWrite]
Cull [_CullMode]
HLSLPROGRAM
#define DEBUG_DISPLAY
#include "../../Debug/DebugDisplay.hlsl"
#include "../../Material/Material.hlsl"
#include "ShaderPass/UnlitSharePass.hlsl"
#include "UnlitData.hlsl"

39
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


frameSettings.enablePostprocess = this.enablePostprocess;
frameSettings.enableStereo = this.enableStereo;
frameSettings.enableForwardRenderingOnly = this.enableForwardRenderingOnly;
frameSettings.enableOpaqueObjects = this.enableOpaqueObjects;
frameSettings.enableTransparentObjects = this.enableTransparentObjects;

// We have to fall back to forward-only rendering when scene view is using wireframe rendering mode
// as rendering everything in wireframe + deferred do not play well together
aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe;
aggregate.enableForwardRenderingOnly = srcFrameSettings.enableForwardRenderingOnly || GL.wireframe || renderPipelineSettings.supportForwardOnly;
aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors;
aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors;
aggregate.enableMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;

// Planar and real time cubemap doesn't need post process and render in FP16
aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both);
// Force forward if we request stereo. TODO: We should not enforce that, users should be able to chose deferred
aggregate.enableForwardRenderingOnly = aggregate.enableForwardRenderingOnly || aggregate.enableStereo;
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo;
aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute;

aggregate.enableMSAA = srcFrameSettings.enableMSAA && ((int)renderPipelineSettings.msaaSampleCount > 1);
aggregate.msaaSampleCount = aggregate.enableMSAA ? renderPipelineSettings.msaaSampleCount : MSAASamples.None;
aggregate.enableMSAA = srcFrameSettings.enableMSAA && renderPipelineSettings.supportMSAA;
aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;
aggregate.enableShadowMask = srcFrameSettings.enableShadowMask && renderPipelineSettings.supportShadowMask;
aggregate.ConfigureStereoDependentSettings();
if (camera.cameraType == CameraType.Preview)
{

enableSSR = false;
enableSubsurfaceScattering = false;
enableTransparentObjects = false; // waiting on depth pyramid generation
}
}
public void ConfigureStereoDependentSettings()
{
if (enableStereo)
{
// Force forward if we request stereo. TODO: We should not enforce that, users should be able to chose deferred
enableForwardRenderingOnly = true;
// TODO: The work will be implemented piecemeal to support all passes
enableMotionVectors = false;
enableDBuffer = false;
enableDistortion = false;
enablePostprocess = false;
enableRoughRefraction = false;
enableSSAO = false;
enableSSR = false;
enableSubsurfaceScattering = false;
enableTransparentObjects = false;
}
}

24
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/RenderPipelineSettings.cs


// RenderPipelineSettings define settings that can't be change during runtime. It is equivalent to the GraphicsSettings of Unity (Tiers + shader variant removal).
// This allow to allocate resource or not for a given feature.
// FrameSettings control within a frame what is enable or not(enableShadow, enableStereo, enableDistortion...).
// HDRenderPipelineAsset reference the current RenderPipelineSettings use, there is one per supported platform(Currently this feature is not implemented and only one GlobalFrameSettings is available).
// A Camera with HDAdditionalData have one FrameSettings that configure how it will render.For example a camera use for reflection will disable distortion and postprocess.
// Additionally on a Camera there is another FrameSettings call ActiveFrameSettings that is created on the fly based on FrameSettings and allow modification for debugging purpose at runtime without being serialized on disk.
// The ActiveFrameSettings is register in the debug windows at the creation of the camera.
// A Camera with HDAdditionalData have a RenderPath that define if it use a "Default" FrameSettings, a preset of FrameSettings or a custom one.
// HDRenderPipelineAsset contain a "Default" FrameSettings that can be reference by any camera with RenderPath.Defaut or when the camera don't have HDAdditionalData like the camera of the Editor.
// It also contain a DefaultActiveFrameSettings
// HDRenderPipelineAsset reference the current RenderPipelineSettings used, there is one per supported platform(Currently this feature is not implemented and only one GlobalFrameSettings is available).
// A Camera with HDAdditionalData has one FrameSettings that configures how it will render. For example a camera used for reflection will disable distortion and post-process.
// Additionally, on a Camera there is another FrameSettings called ActiveFrameSettings that is created on the fly based on FrameSettings and allows modifications for debugging purpose at runtime without being serialized on disk.
// The ActiveFrameSettings is registered in the debug windows at the creation of the camera.
// A Camera with HDAdditionalData has a RenderPath that defines if it uses a "Default" FrameSettings, a preset of FrameSettings or a custom one.
// HDRenderPipelineAsset contains a "Default" FrameSettings that can be referenced by any camera with RenderPath.Defaut or when the camera doesn't have HDAdditionalData like the camera of the Editor.
// It also contains a DefaultActiveFrameSettings
// RenderPipelineSettings represent settings that are immutable at runtime.
// RenderPipelineSettings represents settings that are immutable at runtime.
// There is a dedicated RenderPipelineSettings for each platform
[Serializable]
public class RenderPipelineSettings

public bool supportSSR = true;
public bool supportSSAO = true;
public bool supportSubsurfaceScattering = true;
public bool supportForwardOnly = false;
public bool supportDBuffer = false;
public MSAASamples msaaSampleCount = MSAASamples.None;
public bool supportDBuffer = false;
public bool supportMSAA = false;
public MSAASamples msaaSampleCount = MSAASamples.None;
public bool supportMotionVectors = true;
public bool supportStereo = false;
public GlobalLightLoopSettings lightLoopSettings = new GlobalLightLoopSettings();
public ShadowInitParameters shadowInitParams = new ShadowInitParameters();

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/Blit.shader


TEXTURE2D(_BlitTexture);
SamplerState sampler_PointClamp;
SamplerState sampler_LinearClamp;
uniform float4 _BlitScaleBias;
uniform float _BlitMipLevel;
struct Attributes
{

{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
output.texcoord = GetFullScreenTriangleTexCoord(input.vertexID);
output.texcoord = GetFullScreenTriangleTexCoord(input.vertexID) * _BlitScaleBias.xy + _BlitScaleBias.zw;
return SAMPLE_TEXTURE2D(_BlitTexture, sampler_PointClamp, input.texcoord);
return SAMPLE_TEXTURE2D_LOD(_BlitTexture, sampler_PointClamp, input.texcoord, _BlitMipLevel);
return SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, input.texcoord);
return SAMPLE_TEXTURE2D_LOD(_BlitTexture, sampler_LinearClamp, input.texcoord, _BlitMipLevel);
}
ENDHLSL

251
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


using UnityEngine.Rendering;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

GPUCopy m_GPUCopy;
ComputeShader m_ColorPyramidCS;
RenderTextureDescriptor m_ColorRenderTextureDescriptor;
int[] m_ColorPyramidMips = new int[0];
RTHandle m_ColorPyramidBuffer;
List<RTHandle> m_ColorPyramidMips = new List<RTHandle>();
RenderTextureDescriptor m_DepthRenderTextureDescriptor;
int[] m_DepthPyramidMips = new int[0];
RTHandle m_DepthPyramidBuffer;
List<RTHandle> m_DepthPyramidMips = new List<RTHandle>();
public RenderTextureDescriptor colorRenderTextureDescriptor { get { return m_ColorRenderTextureDescriptor; } }
public int colorUsedMipMapCount { get { return Mathf.Min(colorBufferMipMapCount, m_ColorPyramidMips.Length); } }
public int colorBufferMipMapCount
public RTHandle colorPyramid { get { return m_ColorPyramidBuffer; } }
public RTHandle depthPyramid { get { return m_DepthPyramidBuffer; } }
public BufferPyramid(
ComputeShader colorPyramidCS,
ComputeShader depthPyramidCS, GPUCopy gpuCopy)
{
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthPyramidKernel_8 = m_DepthPyramidCS.FindKernel("KMain_8");
m_DepthPyramidKernel_1 = m_DepthPyramidCS.FindKernel("KMain_1");
}
float GetXRscale()
{
// for stereo double-wide, each half of the texture will represent a single eye's pyramid
float scale = 1.0f;
//if (m_Asset.renderPipelineSettings.supportsStereo && (desc.dimension != TextureDimension.Tex2DArray))
// scale = 2.0f; // double-wide
return scale;
}
public void CreateBuffers()
get
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false, name: "ColorPymarid");
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "DepthPyramid"); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
public void DestroyBuffers()
{
RTHandle.Release(m_ColorPyramidBuffer);
RTHandle.Release(m_DepthPyramidBuffer);
foreach (var rth in m_ColorPyramidMips)
{
RTHandle.Release(rth);
}
foreach (var rth in m_DepthPyramidMips)
var minSize = Mathf.Min(colorRenderTextureDescriptor.width, colorRenderTextureDescriptor.height);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
RTHandle.Release(rth);
public RenderTextureDescriptor depthRenderTextureDescriptor { get { return m_DepthRenderTextureDescriptor; } }
public int depthUsedMipMapCount { get { return Mathf.Min(depthBufferMipMapCount, m_DepthPyramidMips.Length); } }
public int depthBufferMipMapCount
public int GetPyramidLodCount(HDCamera camera)
get
var minSize = Mathf.Min(camera.actualWidth, camera.actualHeight);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)
{
return new Vector2Int(baseMipSize.x >> mipIndex, baseMipSize.y >> mipIndex);
}
Vector2Int CalculatePyramidSize(Vector2Int size)
{
// Instead of using the screen size, we round up to the next power of 2 because currently some platforms don't support NPOT Render Texture with mip maps (PS4 for example)
// Then we render in a Screen Sized viewport.
// Note that even if PS4 supported POT Mips, the buffers would be padded to the next power of 2 anyway (TODO: check with other platforms...)
int pyramidSize = (int)Mathf.NextPowerOfTwo(Mathf.Max(size.x, size.y));
return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandle> mipList, int lodCount)
{
int currentLodCount = mipList.Count;
if (lodCount > currentLodCount)
var minSize = Mathf.Min(depthRenderTextureDescriptor.width, depthRenderTextureDescriptor.height);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
for (int i = currentLodCount; i < lodCount; ++i)
{
int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
RTHandle newMip = RTHandle.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
mipList.Add(newMip);
}
public BufferPyramid(
ComputeShader colorPyramidCS, int[] colorMipIds,
ComputeShader depthPyramidCS, GPUCopy gpuCopy, int[] depthMipIds)
public Vector2 GetPyramidToScreenScale(HDCamera camera)
m_ColorPyramidCS = colorPyramidCS;
m_ColorPyramidKernel = m_ColorPyramidCS.FindKernel("KMain");
m_ColorPyramidMips = colorMipIds;
m_DepthPyramidCS = depthPyramidCS;
m_GPUCopy = gpuCopy;
m_DepthPyramidMips = depthMipIds;
m_DepthPyramidKernel_8 = m_DepthPyramidCS.FindKernel("KMain_8");
m_DepthPyramidKernel_1 = m_DepthPyramidCS.FindKernel("KMain_1");
return new Vector2((float)camera.actualWidth / m_DepthPyramidBuffer.rt.width, (float)camera.actualHeight / m_DepthPyramidBuffer.rt.height);
}
public void RenderDepthPyramid(

RenderTargetIdentifier depthTexture,
RenderTargetIdentifier targetTexture)
RTHandle depthTexture)
var depthPyramidDesc = m_DepthRenderTextureDescriptor;
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_DepthPyramidBuffer.rt.format, m_DepthPyramidMips, lodCount);
var lodCount = depthBufferMipMapCount;
if (lodCount > m_DepthPyramidMips.Length)
{
Debug.LogWarningFormat("Cannot compute all mipmaps of the depth pyramid, max texture size supported: {0}", (2 << m_DepthPyramidMips.Length).ToString());
lodCount = m_DepthPyramidMips.Length;
}
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidMipSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, lodCount, 0.0f));
cmd.ReleaseTemporaryRT(m_DepthPyramidMips[0]);
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, depthTexture, m_DepthPyramidBuffer, new Vector2(hdCamera.actualWidth, hdCamera.actualHeight));
depthPyramidDesc.sRGB = false;
depthPyramidDesc.enableRandomWrite = true;
depthPyramidDesc.useMipMap = false;
Vector2 scale = GetPyramidToScreenScale(hdCamera);
cmd.GetTemporaryRT(m_DepthPyramidMips[0], depthPyramidDesc, FilterMode.Bilinear);
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, depthTexture, m_DepthPyramidMips[0], new Vector2(depthPyramidDesc.width, depthPyramidDesc.height));
cmd.CopyTexture(m_DepthPyramidMips[0], 0, 0, targetTexture, 0, 0);
RTHandle src = m_DepthPyramidBuffer;
var srcMipWidth = depthPyramidDesc.width;
var srcMipHeight = depthPyramidDesc.height;
depthPyramidDesc.width = srcMipWidth >> 1;
depthPyramidDesc.height = srcMipHeight >> 1;
RTHandle dest = m_DepthPyramidMips[i];
var srcMipWidth = hdCamera.actualWidth >> i;
var srcMipHeight = hdCamera.actualHeight >> i;
var dstMipWidth = srcMipWidth >> 1;
var dstMipHeight = srcMipHeight >> 1;
if (depthPyramidDesc.width < 4 * k_DepthBlockSize
|| depthPyramidDesc.height < 4 * k_DepthBlockSize)
if (dstMipWidth < 4 * k_DepthBlockSize
|| dstMipHeight < 4 * k_DepthBlockSize)
cmd.ReleaseTemporaryRT(m_DepthPyramidMips[i + 1]);
cmd.GetTemporaryRT(m_DepthPyramidMips[i + 1], depthPyramidDesc, FilterMode.Bilinear);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, m_DepthPyramidMips[i]);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, m_DepthPyramidMips[i + 1]);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(srcMipWidth, srcMipHeight, 1f / srcMipWidth, 1f / srcMipHeight));
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Source, src);
cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(srcMipWidth, srcMipHeight, (1.0f / srcMipWidth) * scale.x, (1.0f / srcMipHeight) * scale.y));
Mathf.CeilToInt(depthPyramidDesc.width / kernelBlockSize),
Mathf.CeilToInt(depthPyramidDesc.height / kernelBlockSize),
Mathf.CeilToInt(dstMipWidth / kernelBlockSize),
Mathf.CeilToInt(dstMipHeight / kernelBlockSize),
cmd.CopyTexture(m_DepthPyramidMips[i + 1], 0, 0, targetTexture, 0, i + 1);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_DepthPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_DepthPyramidBuffer, 0, i + 1, 0, 0);
src = dest;
for (int i = 0; i < lodCount + 1; i++)
cmd.ReleaseTemporaryRT(m_DepthPyramidMips[i]);
cmd.SetGlobalTexture(HDShaderIDs._PyramidDepthTexture, m_DepthPyramidBuffer);
}
public void RenderColorPyramid(

RenderTargetIdentifier colorTexture,
RenderTargetIdentifier targetTexture)
RTHandle colorTexture)
var colorPyramidDesc = colorRenderTextureDescriptor;
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_ColorPyramidBuffer.rt.format, m_ColorPyramidMips, lodCount);
var lodCount = colorBufferMipMapCount;
if (lodCount > m_ColorPyramidMips.Length)
{
Debug.LogWarningFormat("Cannot compute all mipmaps of the color pyramid, max texture size supported: {0}", (2 << m_ColorPyramidMips.Length).ToString());
lodCount = m_ColorPyramidMips.Length;
}
Vector2 scale = GetPyramidToScreenScale(hdCamera);
cmd.SetGlobalVector(HDShaderIDs._GaussianPyramidColorMipSize, new Vector4(scale.x, scale.y, lodCount, 0.0f));
cmd.CopyTexture(colorTexture, 0, 0, targetTexture, 0, 0);
var last = colorTexture;
// Here we blit a "camera space" texture into a square texture but we want to keep the original viewport.
// Other BlitCameraTexture version will setup the viewport based on the destination RT scale (square here) so we need override it here.
HDUtils.BlitCameraTexture(cmd, hdCamera, colorTexture, m_ColorPyramidBuffer, new Rect(0.0f, 0.0f, hdCamera.actualWidth, hdCamera.actualHeight));
colorPyramidDesc.sRGB = false;
colorPyramidDesc.enableRandomWrite = true;
colorPyramidDesc.useMipMap = false;
RTHandle src = m_ColorPyramidBuffer;
colorPyramidDesc.width = colorPyramidDesc.width >> 1;
colorPyramidDesc.height = colorPyramidDesc.height >> 1;
RTHandle dest = m_ColorPyramidMips[i];
var srcMipWidth = hdCamera.actualWidth >> i;
var srcMipHeight = hdCamera.actualHeight >> i;
var dstMipWidth = srcMipWidth >> 1;
var dstMipHeight = srcMipHeight >> 1;
cmd.ReleaseTemporaryRT(m_ColorPyramidMips[i + 1]);
cmd.GetTemporaryRT(m_ColorPyramidMips[i + 1], colorPyramidDesc, FilterMode.Bilinear);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, last);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, m_ColorPyramidMips[i + 1]);
cmd.SetComputeVectorParam(m_ColorPyramidCS, _Size, new Vector4(colorPyramidDesc.width, colorPyramidDesc.height, 1f / colorPyramidDesc.width, 1f / colorPyramidDesc.height));
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Source, src);
cmd.SetComputeTextureParam(m_ColorPyramidCS, m_ColorPyramidKernel, _Result, dest);
// _Size is used as a scale inside the whole render target so here we need to keep the full size (and not the scaled size depending on the current camera)
cmd.SetComputeVectorParam(m_ColorPyramidCS, _Size, new Vector4(dest.rt.width, dest.rt.height, 1f / dest.rt.width, 1f / dest.rt.height));
Mathf.CeilToInt(colorPyramidDesc.width / 8f),
Mathf.CeilToInt(colorPyramidDesc.height / 8f),
Mathf.CeilToInt(dstMipWidth / 8f),
Mathf.CeilToInt(dstMipHeight / 8f),
cmd.CopyTexture(m_ColorPyramidMips[i + 1], 0, 0, targetTexture, 0, i + 1);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(m_ColorPyramidMips[i], 0, 0, 0, 0, dstMipWidth, dstMipHeight, m_ColorPyramidBuffer, 0, i + 1, 0, 0);
last = m_ColorPyramidMips[i + 1];
src = dest;
for (int i = 0; i < lodCount; i++)
cmd.ReleaseTemporaryRT(m_ColorPyramidMips[i + 1]);
}
public void Initialize(HDCamera hdCamera, bool enableStereo)
{
var colorDesc = CalculateRenderTextureDescriptor(hdCamera, enableStereo);
colorDesc.colorFormat = RenderTextureFormat.ARGBHalf;
m_ColorRenderTextureDescriptor = colorDesc;
var depthDesc = CalculateRenderTextureDescriptor(hdCamera, enableStereo);
depthDesc.colorFormat = RenderTextureFormat.RFloat;
m_DepthRenderTextureDescriptor = depthDesc;
}
public static RenderTextureDescriptor CalculateRenderTextureDescriptor(HDCamera hdCamera, bool enableStereo)
{
var desc = hdCamera.renderTextureDesc;
desc.depthBufferBits = 0;
desc.useMipMap = true;
desc.autoGenerateMips = false;
desc.msaaSamples = 1; // These are approximation textures, they don't need MSAA
// for stereo double-wide, each half of the texture will represent a single eye's pyramid
//var widthModifier = 1;
//if (stereoEnabled && (desc.dimension != TextureDimension.Tex2DArray))
// widthModifier = 2; // double-wide
//desc.width = pyramidSize * widthModifier;
desc.width = (int)hdCamera.screenSize.x;
desc.height = (int)hdCamera.screenSize.y;
return desc;
cmd.SetGlobalTexture(HDShaderIDs._GaussianPyramidColorTexture, m_ColorPyramidBuffer);
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset


combineLighting: {fileID: 4800000, guid: 2e37131331fbdca449b1a2bc47a639ca, type: 3}
cameraMotionVectors: {fileID: 4800000, guid: 035941b63024d1943af48811c1db20d9, type: 3}
copyStencilBuffer: {fileID: 4800000, guid: 3d1574f1cdfa0ce4995f9bc79ed7f8ec, type: 3}
copyDepthBuffer: {fileID: 4800000, guid: 42dfcc8fe803ece4096c58630689982f, type: 3}
blitFlipMip: {fileID: 4800000, guid: ef092fc4aaa1bb546a9ab4e457c4b07a, type: 3}
blitCubemap: {fileID: 4800000, guid: d05913e251bed7a4992c921c62e1b647, type: 3}
buildProbabilityTables: {fileID: 7200000, guid: b9f26cf340afe9145a699753531b2a4c,
type: 3}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs


// General
public Shader cameraMotionVectors;
public Shader copyStencilBuffer;
public Shader copyDepthBuffer;
public Shader blit;
// Sky

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


// w = 1 + 1.0/height
float4 _ScreenParams;
// x = camera.pixelWidth / RTHandle.maxWidth
// y = camera.pixelHeight / RTHandle.maxHeight
float4 _ScreenToTargetScale;
// Values used to linearize the Z buffer (http://www.humus.name/temp/Linearize%20depth.txt)
// x = 1-far/near
// y = far/near

float4 _ScreenSize; // {w, h, 1/w, 1/h}
float4 _FrustumPlanes[6]; // {(a, b, c) = N, d = -dot(N, P)} [L, R, T, B, N, F]
CBUFFER_END
// Custom generated by HDRP, not from Unity Engine (passed in via HDCamera)
#if defined(USING_STEREO_MATRICES)
CBUFFER_START(UnityPerPassStereo)
float4x4 _InvProjMatrixStereo[2];
float4x4 _InvViewProjMatrixStereo[2];
CBUFFER_END
#endif // USING_STEREO_MATRICES
float4x4 OptimizeProjectionMatrix(float4x4 M)
{

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesFunctions.hlsl


return normalize(mul((float3x3)GetObjectToWorldMatrix(), dirOS));
}
float3 TransformWorldToViewDir(float3 dirWS)
{
return mul((float3x3)GetWorldToViewMatrix(), dirWS).xyz;
}
float3 TransformWorldToObjectDir(float3 dirWS)
{
// Normalize to support uniform scaling

projMatrix = UNITY_MATRIX_P;
projMatrix._13_23_33_43 = -projMatrix._13_23_33_43;
}
// This method should be used for rendering any full screen quad that uses an auto-scaling Render Targets (see RTHandle/HDCamera)
// It will account for the fact that the textures it samples are not necesarry using the full space of the render texture but only a partial viewport.
float2 GetNormalizedFullScreenTriangleTexCoord(uint vertexID)
{
return GetFullScreenTriangleTexCoord(vertexID) * _ScreenToTargetScale.xy;
}
#endif // UNITY_SHADER_VARIABLES_FUNCTIONS_INCLUDED

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariablesMatrixDefsHDCamera.hlsl


#ifndef UNITY_SHADER_VARIABLES_MATRIX_DEFS_HDCAMERA_INCLUDED
#define UNITY_SHADER_VARIABLES_MATRIX_DEFS_HDCAMERA_INCLUDED
#if defined(USING_STEREO_MATRICES)
#define UNITY_MATRIX_M unity_ObjectToWorld
#define UNITY_MATRIX_I_M unity_WorldToObject
#define UNITY_MATRIX_V unity_StereoMatrixV[unity_StereoEyeIndex]
#define UNITY_MATRIX_I_V unity_StereoMatrixInvV[unity_StereoEyeIndex]
#define UNITY_MATRIX_P OptimizeProjectionMatrix(unity_StereoMatrixP[unity_StereoEyeIndex])
#define UNITY_MATRIX_I_P _InvProjMatrixStereo[unity_StereoEyeIndex]
#define UNITY_MATRIX_VP unity_StereoMatrixVP[unity_StereoEyeIndex]
#define UNITY_MATRIX_I_VP _InvViewProjMatrixStereo[unity_StereoEyeIndex]
#else
#define UNITY_MATRIX_M unity_ObjectToWorld
#define UNITY_MATRIX_I_M unity_WorldToObject
#define UNITY_MATRIX_V _ViewMatrix

#define UNITY_MATRIX_VP _ViewProjMatrix
#define UNITY_MATRIX_I_VP _InvViewProjMatrix
#define UNITY_MATRIX_MV mul(UNITY_MATRIX_V, UNITY_MATRIX_M)
#define UNITY_MATRIX_T_MV transpose(UNITY_MATRIX_MV)
#define UNITY_MATRIX_IT_MV transpose(mul(UNITY_MATRIX_I_M, UNITY_MATRIX_I_V))
#define UNITY_MATRIX_MVP mul(UNITY_MATRIX_VP, UNITY_MATRIX_M)
#endif // USING_STEREO_MATRICES
#endif // UNITY_SHADER_VARIABLES_MATRIX_DEFS_HDCAMERA_INCLUDED

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存