Evgenii Golubev 6 年前
当前提交
9b95d6ea
共有 345 个文件被更改,包括 2192 次插入976 次删除
  1. 8
      .gitmodules
  2. 10
      CHANGELOG.md
  3. 2
      CHANGELOG.md.meta
  4. 9
      ImageTemplates/FailStamp.png.meta
  5. 9
      ImageTemplates/LightweightPipeline/Scenes/001_SimpleCube.unity.png.meta
  6. 9
      ImageTemplates/LightweightPipeline/Scenes/002_Camera_Clip.unity.png.meta
  7. 9
      ImageTemplates/LightweightPipeline/Scenes/003_Camera_Ortho.unity.png.meta
  8. 9
      ImageTemplates/LightweightPipeline/Scenes/004_Camera_TargetTexture.unity.png.meta
  9. 9
      ImageTemplates/LightweightPipeline/Scenes/005_LitBakedEmission.unity.png.meta
  10. 9
      ImageTemplates/LightweightPipeline/Scenes/006_LitShaderLightProbes.unity.png.meta
  11. 9
      ImageTemplates/LightweightPipeline/Scenes/007_LitShaderMaps.unity.png.meta
  12. 9
      ImageTemplates/LightweightPipeline/Scenes/008_LitShaderReflection.unity.png.meta
  13. 9
      ImageTemplates/LightweightPipeline/Scenes/009_LightweightShading.unity.png.meta
  14. 9
      ImageTemplates/LightweightPipeline/Scenes/010_MultiplePointLights.unity.png.meta
  15. 9
      ImageTemplates/LightweightPipeline/Scenes/011_UnlitSprites.unity.png.meta
  16. 9
      ImageTemplates/LightweightPipeline/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity.png.meta
  17. 9
      ImageTemplates/LightweightPipeline/Scenes/016_Lighting_Scene_Directional.unity.png.meta
  18. 9
      ImageTemplates/LightweightPipeline/Scenes/017_Lighting_Scene_DirectionalBaked.unity.png.meta
  19. 9
      ImageTemplates/LightweightPipeline/Scenes/018_Lighting_Scene_DirectionalBakedIndirect.unity.png.meta
  20. 9
      ImageTemplates/LightweightPipeline/Scenes/019_Lighting_Scene_PointLights.unity.png.meta
  21. 9
      ImageTemplates/LightweightPipeline/Scenes/020_Lighting_BasicDirectional.unity.png.meta
  22. 9
      ImageTemplates/LightweightPipeline/Scenes/021_Lighting_BasicPoint.unity.png.meta
  23. 9
      ImageTemplates/LightweightPipeline/Scenes/022_Lighting_BasicSpot.unity.png.meta
  24. 9
      ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta
  25. 9
      ImageTemplates/LightweightPipeline/Scenes/024_Shader_PBRvalidation_Specular.unity.png.meta
  26. 9
      ImageTemplates/LightweightPipeline/Scenes/025_Shader_PBRvalidation_Metallic.unity.png.meta
  27. 9
      ImageTemplates/LightweightPipeline/Scenes/026_Shader_PBRscene.unity.png.meta
  28. 9
      ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta
  29. 9
      ImageTemplates/LightweightPipeline/Scenes/028_PostProcessing_Custom.unity.png.meta
  30. 9
      ImageTemplates/LightweightPipeline/Scenes/029_Particles.unity.png.meta
  31. 9
      ImageTemplates/LightweightPipeline/Scenes/031_Shader_GlossyEnvironmentSky.unity.png.meta
  32. 9
      ImageTemplates/LightweightPipeline/Scenes/032_Shader_GlossyEnvironmentColor.unity.png.meta
  33. 9
      ImageTemplates/LightweightPipeline/Scenes/033_Shader_HighlightsEnvironmentGradientSH.unity.png.meta
  34. 9
      ImageTemplates/LightweightPipeline/Scenes/034_Shader_HighlightsEnvironmentGradientBaked.unity.png.meta
  35. 9
      ImageTemplates/LightweightPipeline/Scenes/035_Shader_TerrainShaders.unity.png.meta
  36. 9
      ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta
  37. 9
      ImageTemplates/LightweightPipeline/Scenes/037_Particles.unity.png.meta
  38. 9
      ImageTemplates/LightweightPipeline/Scenes/038_Lighting_DirectionalCookie.unity.png.meta
  39. 9
      ImageTemplates/LightweightPipeline/Scenes/039_Lighting_SpotCookie.unity.png.meta
  40. 9
      ImageTemplates/LightweightPipeline/Scenes/040_UpgradeScene.unity.png.meta
  41. 9
      ImageTemplates/LightweightPipeline/Scenes/041_Lighting_BasicArea.unity.png.meta
  42. 9
      ImageTemplates/LightweightPipeline/Scenes/042_Lighting_Scene_VertexLighting.unity.png.meta
  43. 9
      ImageTemplates/LightweightPipeline/Scenes/043_Lighting_Mixed_ShadowMask.unity.png.meta
  44. 9
      ImageTemplates/LightweightPipeline/Scenes/044_ReflectionProbe.unity.png.meta
  45. 51
      ScriptableRenderPipeline/Core/CoreRP/Editor/MaterialUpgrader.cs
  46. 125
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl
  47. 8
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  48. 4
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/EntityLighting.hlsl
  49. 9
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl
  50. 8
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Wind.hlsl
  51. 2
      ScriptableRenderPipeline/Core/CoreRP/Textures/TextureCache.cs
  52. 2
      ScriptableRenderPipeline/Core/CoreRP/Textures/DepthBits.cs.meta
  53. 30
      ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs
  54. 48
      ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md
  55. 51
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  56. 58
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
  57. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl
  58. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader
  59. 15
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs
  60. 12
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs.hlsl
  61. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/MaterialDebug.cs.hlsl
  62. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalProjectorComponent.cs
  63. 26
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs
  64. 31
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs
  65. 66
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Decal/DecalProjectorComponentEditor.cs
  66. 265
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs
  67. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineUI.cs
  68. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedHDRenderPipelineAsset.cs
  69. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs
  70. 210
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  71. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs
  72. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  73. 38
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  74. 48
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs
  75. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs
  76. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl
  77. 106
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
  78. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  79. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
  80. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightUtils.cs
  81. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/VolumeProjection.hlsl
  82. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/MRTBufferManager.cs
  83. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs
  84. 18
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs.hlsl
  85. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs
  86. 104
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs
  87. 269
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  88. 82
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs
  89. 92
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl
  90. 335
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl
  91. 42
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader
  92. 242
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl
  93. 37
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl
  94. 22
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  95. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  96. 120
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  97. 27
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs
  98. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/CopyStencilBuffer.shader
  99. 22
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  100. 42
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs

8
.gitmodules


[submodule "ShaderGraph"]
path = ShaderGraph
url = https://github.com/Unity-Technologies/ShaderGraph
[submodule "Tests/UTF_Core"]
path = Tests/UTF_Core
url=https://github.com/Unity-Technologies/UTF_Core.git
[submodule "Tests/UTF_Tests_HDRP"]
path = Tests/UTF_Tests_HDRP
url=https://github.com/Unity-Technologies/UTF_Tests_HDRP.git

10
CHANGELOG.md


and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
### Changed
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
## [0.1.6] - 2018-xx-yy

2
CHANGELOG.md.meta


fileFormatVersion: 2
guid: d29df6d4fc5e8db47acccb638ce31e60
guid: ce3703c6cb43f154bac29c5ad0a09ba6
TextScriptImporter:
externalObjects: {}
userData:

9
ImageTemplates/FailStamp.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/001_SimpleCube.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/002_Camera_Clip.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/003_Camera_Ortho.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/004_Camera_TargetTexture.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/005_LitBakedEmission.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/006_LitShaderLightProbes.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/007_LitShaderMaps.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/008_LitShaderReflection.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/009_LightweightShading.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/010_MultiplePointLights.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/011_UnlitSprites.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/016_Lighting_Scene_Directional.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/017_Lighting_Scene_DirectionalBaked.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/018_Lighting_Scene_DirectionalBakedIndirect.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/019_Lighting_Scene_PointLights.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/020_Lighting_BasicDirectional.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/021_Lighting_BasicPoint.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/022_Lighting_BasicSpot.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/024_Shader_PBRvalidation_Specular.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/025_Shader_PBRvalidation_Metallic.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/026_Shader_PBRscene.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/028_PostProcessing_Custom.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/029_Particles.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/031_Shader_GlossyEnvironmentSky.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/032_Shader_GlossyEnvironmentColor.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/033_Shader_HighlightsEnvironmentGradientSH.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/034_Shader_HighlightsEnvironmentGradientBaked.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/035_Shader_TerrainShaders.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/037_Particles.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/038_Lighting_DirectionalCookie.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/039_Lighting_SpotCookie.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/040_UpgradeScene.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/041_Lighting_BasicArea.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/042_Lighting_Scene_VertexLighting.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/043_Lighting_Mixed_ShadowMask.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

9
ImageTemplates/LightweightPipeline/Scenes/044_ReflectionProbe.unity.png.meta


TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 5
serializedVersion: 6
mipmaps:
mipMapMode: 0
enableMipMap: 1

heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0

serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

51
ScriptableRenderPipeline/Core/CoreRP/Editor/MaterialUpgrader.cs


namespace UnityEditor.Experimental.Rendering
{
public static class DialogText
{
public static readonly string title = "Material Upgrader";
public static readonly string proceed = "Proceed";
public static readonly string ok = "Ok";
public static readonly string cancel = "Cancel";
public static readonly string noSelectionMessage = "You must select at least one material.";
public static readonly string projectBackMessage = "Make sure to have a project backup before proceeding.";
}
public class MaterialUpgrader
{
public delegate void MaterialFinalizer(Material mat);

private static readonly string projectBackMessage = "Make sure to have a project backup before proceeding.";
MaterialFinalizer m_Finalizer;
Dictionary<string, string> m_TextureRename = new Dictionary<string, string>();

public static void UpgradeProjectFolder(List<MaterialUpgrader> upgraders, string progressBarName, UpgradeFlags flags = UpgradeFlags.None)
{
if (!EditorUtility.DisplayDialog("Material Upgrader", "The upgrade will overwrite materials in your project. " + projectBackMessage, "Proceed", "Cancel"))
if (!EditorUtility.DisplayDialog(DialogText.title, "The upgrade will overwrite materials in your project. " + DialogText.projectBackMessage, DialogText.proceed, DialogText.cancel))
return;
int totalMaterialCount = 0;

public static void Upgrade(Material material, List<MaterialUpgrader> upgraders, UpgradeFlags flags)
{
if (material == null)
return;
var upgrader = GetUpgrader(upgraders, material);
if (upgrader != null)

{
var selection = Selection.objects;
if (selection == null || selection.Length == 0)
if (EditorUtility.DisplayDialog("Material Upgrader", "You must select at least one material.", "Ok"))
return;
if (selection == null)
{
EditorUtility.DisplayDialog(DialogText.title, DialogText.noSelectionMessage, DialogText.ok);
return;
}
List<Material> selectedMaterials = new List<Material>(selection.Length);
for (int i = 0; i < selection.Length; ++i)
{
Material mat = selection[i] as Material;
if (mat != null)
selectedMaterials.Add(mat);
}
int selectedMaterialsCount = selectedMaterials.Count;
if (selectedMaterialsCount == 0)
{
EditorUtility.DisplayDialog(DialogText.title, DialogText.noSelectionMessage, DialogText.ok);
return;
}
if (!EditorUtility.DisplayDialog("Material Upgrader", string.Format("The upgrade will overwrite {0} selected material{1}. ", selection.Length, (selection.Length > 1) ? "s" : "") +
projectBackMessage, "Proceed", "Cancel"))
if (!EditorUtility.DisplayDialog(DialogText.title, string.Format("The upgrade will overwrite {0} selected material{1}. ", selectedMaterialsCount, selectedMaterialsCount > 1 ? "s" : "") +
DialogText.projectBackMessage, DialogText.proceed, DialogText.cancel))
Debug.Log(selection.Length);
for (int i = 0; i < selection.Length; i++)
for (int i = 0; i < selectedMaterialsCount; i++)
if (UnityEditor.EditorUtility.DisplayCancelableProgressBar(progressBarName, string.Format("({0} of {1}) {2}", i, selection.Length, lastMaterialName), (float)i / (float)selection.Length))
if (UnityEditor.EditorUtility.DisplayCancelableProgressBar(progressBarName, string.Format("({0} of {1}) {2}", i, selectedMaterialsCount, lastMaterialName), (float)i / (float)selectedMaterialsCount))
var material = selection[i] as Material;
var material = selectedMaterials[i];
Upgrade(material, upgraders, flags);
if (material != null)
lastMaterialName = material.name;

125
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl


real F_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (f90 - f0) * x5 + f0; // sub mul mul mul sub mad

real3 F_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return f0 * (1.0 - x5) + (f90 * x5); // sub mul mul mul sub mul mad*3

// Does not handle TIR.
real F_Transm_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad

// Does not handle TIR.
real3 F_Transm_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad*3

// Ref: https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/
// Fresnel dieletric / dielectric
real F_Fresnel(real ior, real u)
real F_FresnelDieletric(real ior, real u)
// Fresnel dieletric / conductor
// Note: etak2 = etak * etak (optimization for Artist Friendly Metallic Fresnel below)
// eta = eta_t / eta_i and etak = k_t / n_i
real3 F_FresnelConductor(real3 eta, real3 etak2, real cosTheta)
{
real cosTheta2 = cosTheta * cosTheta;
real sinTheta2 = 1.0 - cosTheta2;
real3 eta2 = eta * eta;
real3 t0 = eta2 - etak2 - sinTheta2;
real3 a2plusb2 = sqrt(t0 * t0 + 4.0 * eta2 * etak2);
real3 t1 = a2plusb2 + cosTheta2;
real3 a = sqrt(0.5 * (a2plusb2 + t0));
real3 t2 = 2.0 * a * cosTheta;
real3 Rs = (t1 - t2) / (t1 + t2);
real3 t3 = cosTheta2 * a2plusb2 + sinTheta2 * sinTheta2;
real3 t4 = t2 * sinTheta2;
real3 Rp = Rs * (t3 - t4) / (t3 + t4);
return 0.5 * (Rp + Rs);
}
// Conversion FO/IOR
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor, real incidentIor = 1.0)
{
return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor));
}
// Assume air interface for top
// Note: Don't handle the case fresnel0 == 1
real Fresnel0ToIor(real fresnel0)
{
real sqrtF0 = sqrt(fresnel0);
return (1.0 + sqrtF0) / (1.0 - sqrtF0);
}
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
// Artist Friendly Metallic Fresnel Ref: http://jcgt.org/published/0003/04/03/paper.pdf
real3 GetIorN(real3 f0, real3 edgeTint)
{
real3 sqrtF0 = sqrt(f0);
return lerp((1.0 - f0) / (1.0 + f0), (1.0 + sqrtF0) / (1.0 - sqrt(f0)), edgeTint);
}
real3 getIorK2(real3 f0, real3 n)
{
real3 nf0 = Sq(n + 1.0) * f0 - Sq(f0 - 1.0);
return nf0 / (1.0 - f0);
}
//-----------------------------------------------------------------------------
// Specular BRDF
//-----------------------------------------------------------------------------

real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
return a2 / (s * s);
}

real DV_SmithJointGGX(real NdotH, real NdotL, real NdotV, real roughness, real partLambdaV)
{
real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real lambdaV = NdotL * partLambdaV;
real lambdaL = NdotV * sqrt((-NdotL * a2 + NdotL) * NdotL + a2);

// Inline D_GGXAniso() * V_SmithJointGGXAniso() together for better code generation.
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
{
real a2 = roughnessT * roughnessB;
real3 v = real3(roughnessB * TdotH, roughnessT * BdotH, a2 * NdotH);

}
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH,
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
roughnessT, roughnessB, partLambdaV);
roughnessT, roughnessB, partLambdaV);
}
//-----------------------------------------------------------------------------

real fd90 = 0.5 + (perceptualRoughness + perceptualRoughness * LdotV);
// Two schlick fresnel term
real lightScatter = F_Schlick(1.0, fd90, NdotL);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
// Normalize the BRDF for polar view angles of up to (Pi/4).
// We use the worst case of (roughness = albedo = 1), and, for each view angle,

// Ref: Diffuse Lighting for GGX + Smith Microsurfaces, p. 113.
real3 DiffuseGGXNoPI(real3 albedo, real NdotV, real NdotL, real NdotH, real LdotV, real roughness)
{
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
return single + albedo * multiple;
}

// Note that we could save 2 cycles by inlining the multiplication by INV_PI.
return INV_PI * DiffuseGGXNoPI(albedo, NdotV, NdotL, NdotH, LdotV, roughness);
}
//-----------------------------------------------------------------------------
// Conversion FO/IOR
//-----------------------------------------------------------------------------
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor, real incidentIor = 1.0)
{
return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor));
}
// Assume air interface for top
// Note: Don't handle the case fresnel0 == 1
real Fresnel0ToIor(real fresnel0)
{
real sqrtF0 = sqrt(fresnel0);
return (1.0 + sqrtF0) / (1.0 - sqrtF0);
}
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
//-----------------------------------------------------------------------------
// Iridescence

8
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


// Metal doesn't support high enough OpenGL version
#if defined(MIP_COUNT_SUPPORTED)
uint width, height, depth, mipCount;
width = height = depth = mipCount = 0;
tex.GetDimensions(width, height, depth, mipCount);
return mipCount;
uint mipLevel, width, height, mipCount;
mipLevel = width = height = mipCount = 0;
tex.GetDimensions(mipLevel, width, height, mipCount);
return mipCount;
#else
return 0;
#endif

4
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/EntityLighting.hlsl


real3 UnpackLightmapRGBM(real4 rgbmInput, real4 decodeInstructions)
{
return rgbmInput.rgb * pow(rgbmInput.a, decodeInstructions.y) * decodeInstructions.x;
return rgbmInput.rgb * PositivePow(rgbmInput.a, decodeInstructions.y) * decodeInstructions.x;
}
real3 UnpackLightmapDoubleLDR(real4 encodedColor, real4 decodeInstructions)

real alpha = max(decodeInstructions.w * (encodedIrradiance.a - 1.0) + 1.0, 0.0);
// If Linear mode is not supported we can skip exponent part
return (decodeInstructions.x * pow(alpha, decodeInstructions.y)) * encodedIrradiance.rgb;
return (decodeInstructions.x * PositivePow(alpha, decodeInstructions.y)) * encodedIrradiance.rgb;
}
real3 SampleSingleLightmap(TEXTURE2D_ARGS(lightmapTex, lightmapSampler), float2 uv, float4 transform, bool encodedLightmap, real4 decodeInstructions)

9
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl


return lerp(N, R, lerpFactor);
}
// To simulate the streching of highlight at grazing angle for IBL we shrink the roughness
// which allow to fake an anisotropic specular lobe.
// Ref: http://www.frostbite.com/2015/08/stochastic-screen-space-reflections/ - slide 84
real AnisotropicStrechAtGrazingAngle(real perceptualRoughness, real NdotV)
{
real p = perceptualRoughness;
return p * lerp(saturate(NdotV * 2.0) * p, p, p);
}
// ----------------------------------------------------------------------------
// Importance sampling BSDF functions
// ----------------------------------------------------------------------------

8
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Wind.hlsl


// TODO: no global variable or resource declarations in the Shader Library. Functions and macros only!
TEXTURE2D(WIND_SETTINGS_TexNoise);
TEXTURE2D(_WIND_SETTINGS_TexNoise);
TEXTURE2D(WIND_SETTINGS_TexGust);
TEXTURE2D(_WIND_SETTINGS_TexGust);
SAMPLER(sampler_WIND_SETTINGS_TexGust);
float4 WIND_SETTINGS_WorldDirectionAndSpeed;

float3 texNoise(float3 worldPos, float LOD)
{
return SAMPLE_TEXTURE2D_LOD(WIND_SETTINGS_TexNoise, sampler_WIND_SETTINGS_TexNoise, worldPos.xz, LOD).xyz -0.5;
return SAMPLE_TEXTURE2D_LOD(_WIND_SETTINGS_TexNoise, sampler_WIND_SETTINGS_TexNoise, worldPos.xz, LOD).xyz -0.5;
return SAMPLE_TEXTURE2D_LOD(WIND_SETTINGS_TexGust, sampler_WIND_SETTINGS_TexGust, worldPos.xz, LOD).x;
return SAMPLE_TEXTURE2D_LOD(_WIND_SETTINGS_TexGust, sampler_WIND_SETTINGS_TexGust, worldPos.xz, LOD).x;
}

2
ScriptableRenderPipeline/Core/CoreRP/Textures/TextureCache.cs


{
case BuildTarget.iOS:
case BuildTarget.Android:
#if !UNITY_2018_2_OR_NEWER
#endif
case BuildTarget.WSAPlayer:
// Note: We return true on purpose even if Windows Store Apps are running on Desktop.
return true;

2
ScriptableRenderPipeline/Core/CoreRP/Textures/DepthBits.cs.meta


fileFormatVersion: 2
guid: 32875dc85f620f54e817e767811b5c2e
guid: d063f57ca4b7cd346a14a1be20de65b4
MonoImporter:
externalObjects: {}
serializedVersion: 2

30
ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs


{
string msg = "AR/VR devices are not supported, no rendering will occur";
DisplayUnsupportedMessage(msg);
}
}
// Returns 'true' if "Animated Materials" are enabled for the view associated with the given camera.
public static bool AreAnimatedMaterialsEnabled(Camera camera)

}
}
}
// TODO: how to handle reflection views? We don't know the parent window they are being rendered into,
// so we don't know whether we can animate them...
//

#endif
return animateMaterials;
}
public static bool IsSceneViewFogEnabled(Camera camera)
{
bool fogEnable = true;
#if UNITY_EDITOR
fogEnable = Application.isPlaying;
if (camera.cameraType == CameraType.SceneView)
{
fogEnable = false;
// Determine whether the "Animated Materials" checkbox is checked for the current view.
foreach (UnityEditor.SceneView sv in Resources.FindObjectsOfTypeAll(typeof(UnityEditor.SceneView)))
{
if (sv.camera == camera && sv.sceneViewState.showFog)
{
fogEnable = true;
break;
}
}
}
#endif
return fogEnable;
}
}
}

48
ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md


All notable changes to this package will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [2018.2 undecided]
## [0.1.6] - 2018-xx-yy
## [2018.1 undecided]
### Changelog starting
### Bug fixes
- Fix ConvertPhysicalLightIntensityToLightIntensity() function used when creating light from script to match HDLightEditor behavior
Started Changelog
## [2018.1.0f2]
### Improvements
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
- Added buffered frame history per camera
- Port Global Density Volumes to the Interpolation Volume System.
- Optimize ImportanceSampleLambert() to not require the tangent frame.
- Generalize SampleVBuffer() to handle different sampling and reconstruction methods.
- Improve the quality of volumetric lighting reprojection.
- Optimize Morton Order code in the Subsurface Scattering pass.
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Use an atlas instead of a texture array for cluster transparent decals
- Add a debug view to visualize the decal atlas
- Only store decal textures to atlas if decal is visible, debounce out of memory decal atlas warning.
- Add manipulator gizmo on decal to improve authoring workflow
- Add a minimal StackLit material (work in progress, this version can be used as template to add new material)
### Changed, Removals and deprecations
- EnableShadowMask in FrameSettings (But shadowMaskSupport still disable by default)
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
- Moved RTHandle static methods to RTHandles
- Renamed RTHandle to RTHandleSystem.RTHandle
### Bug fixes
- Fix fog flags in scene view is now taken into account
- Fix sky in preview windows that were disappearing after a load of a new level
- Fix numerical issues in IntersectRayAABB().
- Fix alpha blending of volumetric lighting with transparent objects.
- Fix the near plane of the V-Buffer causing out-of-bounds look-ups in the clustered data structure.
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
## [2018.1.0b13]
...

51
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


HDAdditionalCameraData m_AdditionalCameraData;
BufferedRTHandleSystem m_HistoryRTSystem = new BufferedRTHandleSystem();
public HDCamera(Camera cam)
{
camera = cam;

// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.Swap();
int maxWidth = RTHandle.maxWidth;
int maxHeight = RTHandle.maxHeight;
int maxWidth = RTHandles.maxWidth;
int maxHeight = RTHandles.maxHeight;
m_CameraScaleBias.x = (float)m_ActualWidth / maxWidth;
m_CameraScaleBias.y = (float)m_ActualHeight / maxHeight;

return hdcam;
}
public static void ClearAll()
{
foreach (var cam in s_Cameras)
cam.Value.ReleaseHistoryBuffer();
s_Cameras.Clear();
s_Cleanup.Clear();
}
// Look for any camera that hasn't been used in the last frame and remove them for the pool.
public static void CleanUnused()
{

{
if (kvp.Value.m_LastFrameActive != frameCheck)
if (kvp.Value.m_LastFrameActive < frameCheck)
{
var hdCam = s_Cameras[cam];
if (hdCam.m_HistoryRTSystem != null)
{
hdCam.m_HistoryRTSystem.Dispose();
hdCam.m_HistoryRTSystem = null;
}
}
s_Cleanup.Clear();
}

cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewMatrixStereo, invViewStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvProjMatrixStereo, invProjStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewProjMatrixStereo, invViewProjStereo);
}
public RTHandleSystem.RTHandle GetPreviousFrameRT(int id)
{
return m_HistoryRTSystem.GetFrameRT(id, 1);
}
public RTHandleSystem.RTHandle GetCurrentFrameRT(int id)
{
return m_HistoryRTSystem.GetFrameRT(id, 0);
}
// Allocate buffers frames and return current frame
public RTHandleSystem.RTHandle AllocHistoryFrameRT(int id, Func<string, int, RTHandleSystem, RTHandleSystem.RTHandle> allocator)
{
m_HistoryRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), 2);
return m_HistoryRTSystem.GetFrameRT(id, 0);
}
void ReleaseHistoryBuffer()
{
m_HistoryRTSystem.ReleaseAll();
}
}
}

58
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs


public static string k_PanelRendering = "Rendering";
public static string k_PanelScreenSpaceTracing = "Screen Space Tracing";
public static string k_PanelDecals = "Decals";
//static readonly string[] k_HiZIntersectionKind = { "None", "Depth", "Cell" };

DebugUI.Widget[] m_DebugRenderingItems;
DebugUI.Widget[] m_DebugScreenSpaceTracingItems;
DebugUI.Widget[] m_DebugDecalsItems;
public float debugOverlayRatio = 0.33f;
public FullScreenDebugMode fullScreenDebugMode = FullScreenDebugMode.None;

public LightingDebugSettings lightingDebugSettings = new LightingDebugSettings();
public MipMapDebugSettings mipMapDebugSettings = new MipMapDebugSettings();
public ColorPickerDebugSettings colorPickerDebugSettings = new ColorPickerDebugSettings();
public DecalsDebugSettings decalsDebugSettings = new DecalsDebugSettings();
public static GUIContent[] lightingFullScreenDebugStrings = null;
public static int[] lightingFullScreenDebugValues = null;

Lit.RefractionSSRayModel m_LastSSRayModel = Lit.RefractionSSRayModel.None;
ScreenSpaceTracingDebug m_ScreenSpaceTracingDebugData;
public ScreenSpaceTracingDebug screenSpaceTracingDebugData
public ScreenSpaceTracingDebug screenSpaceTracingDebugData
internal set
internal set
m_ScreenSpaceTracingDebugData = value;
m_ScreenSpaceTracingDebugData = value;
if (m_LastSSRayModel != m_ScreenSpaceTracingDebugData.tracingModel)
{
m_LastSSRayModel = m_ScreenSpaceTracingDebugData.tracingModel;

return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayEnabled() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();
}
public bool IsDebugDisplayRemovePostprocess()
{
// We want to keep post process when only the override more are enabled and none of the other
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayRemovePostprocess() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();
}
public bool IsDebugMaterialDisplayEnabled()
{
return materialDebugSettings.IsDebugDisplayEnabled();

bool IsScreenSpaceTracingRefractionDebugEnabled()
{
return fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
return fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction;
}

public void UpdateMaterials()
{
//if (mipMapDebugSettings.debugMipMapMode != 0)
// Texture.SetStreamingTextureMaterialDebugProperties();
#if UNITY_2018_2_OR_NEWER
if (mipMapDebugSettings.debugMipMapMode != 0)
Texture.SetStreamingTextureMaterialDebugProperties();
#endif
}
public bool DebugNeedsExposure()

var refractionContainer = new DebugUI.Container
{
displayName = "Refraction",
children =
children =
{
new DebugUI.BoolField { displayName = "Debug Enabled", getter = IsScreenSpaceTracingRefractionDebugEnabled, setter = SetScreenSpaceTracingRefractionDebugEnabled, onValueChanged = RefreshScreenSpaceTracingDebug },
}

RegisterScreenSpaceTracingDebug();
}
void RefreshDecalsDebug<T>(DebugUI.Field<T> field, T value)
{
UnregisterDebugItems(k_PanelDecals, m_DebugDecalsItems);
RegisterDecalsDebug();
}
public void RegisterLightingDebug()
{
var list = new List<DebugUI.Widget>();

list.Add(new DebugUI.FloatField { displayName = "Shadow Range Max Value", getter = () => lightingDebugSettings.shadowMaxValue, setter = value => lightingDebugSettings.shadowMaxValue = value });
list.Add(new DebugUI.EnumField { displayName = "Lighting Debug Mode", getter = () => (int)lightingDebugSettings.debugLightingMode, setter = value => SetDebugLightingMode((DebugLightingMode)value), autoEnum = typeof(DebugLightingMode), onValueChanged = RefreshLightingDebug });
if (lightingDebugSettings.debugLightingMode == DebugLightingMode.EnvironmentProxyVolume)
{
list.Add(new DebugUI.Container
{
children =
{
new DebugUI.FloatField { displayName = "Debug Environment Proxy Depth Scale", getter = () => lightingDebugSettings.environmentProxyDepthScale, setter = value => lightingDebugSettings.environmentProxyDepthScale = value, min = () => 0.1f, max = () => 50f }
}
});
}
list.Add(new DebugUI.EnumField { displayName = "Fullscreen Debug Mode", getter = () => (int)fullScreenDebugMode, setter = value => fullScreenDebugMode = (FullScreenDebugMode)value, enumNames = lightingFullScreenDebugStrings, enumValues = lightingFullScreenDebugValues, onValueChanged = RefreshLightingDebug });
switch (fullScreenDebugMode)
{

panel.children.Add(m_DebugRenderingItems);
}
public void RegisterDecalsDebug()
{
m_DebugDecalsItems = new DebugUI.Widget[]
{
new DebugUI.BoolField { displayName = "Display atlas", getter = () => decalsDebugSettings.m_DisplayAtlas, setter = value => decalsDebugSettings.m_DisplayAtlas = value},
new DebugUI.UIntField { displayName = "Mip Level", getter = () => decalsDebugSettings.m_MipLevel, setter = value => decalsDebugSettings.m_MipLevel = value, min = () => 0u, max = () => (uint)(RenderPipelineManager.currentPipeline as HDRenderPipeline).GetDecalAtlasMipCount() }
};
var panel = DebugManager.instance.GetPanel(k_PanelDecals, true);
panel.children.Add(m_DebugDecalsItems);
}
RegisterDecalsDebug();
RegisterDisplayStatsDebug();
RegisterMaterialDebug();
RegisterLightingDebug();

public void UnregisterDebug()
{
UnregisterDebugItems(k_PanelDecals, m_DebugDecalsItems);
UnregisterDebugItems(k_PanelDisplayStats, m_DebugDisplayStatsItems);
UnregisterDebugItems(k_PanelMaterials, m_DebugMaterialItems);
UnregisterDebugItems(k_PanelLighting, m_DebugLightingItems);

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.hlsl


float4 _DebugLightingSpecularColor; // x == bool override, yzw = specular color
float4 _MousePixelCoord; // xy unorm, zw norm
float4 _MouseClickPixelCoord; // xy unorm, zw norm
float _DebugEnvironmentProxyDepthScale;
float _DebugExposure;
CBUFFER_END

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugFullScreen.shader


CBUFFER_END
TEXTURE2D(_DebugFullScreenTexture);
TEXTURE2D(_DepthPyramidTexture);
StructuredBuffer<ScreenSpaceTracingDebug> _DebugScreenSpaceTracingData;
struct Attributes

15
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs


SpecularLighting,
LuxMeter,
VisualizeCascade,
IndirectDiffuseOcclusionFromSsao,
IndirectDiffuseGtaoFromSsao,
IndirectSpecularOcclusionFromSsao,
IndirectSpecularGtaoFromSsao,
EnvironmentProxyVolume,
EnvironmentSampleCoordinates,
IndirectDiffuseOcclusion,
IndirectSpecularOcclusion,
ScreenSpaceTracingRefraction,
ScreenSpaceTracingReflection
}

{
public bool IsDebugDisplayEnabled()
{
return debugLightingMode != DebugLightingMode.None || overrideSmoothness || overrideAlbedo || overrideNormal;
return debugLightingMode != DebugLightingMode.None || overrideSmoothness || overrideAlbedo || overrideNormal || overrideSpecularColor;
}
public bool IsDebugDisplayRemovePostprocess()
{
return debugLightingMode != DebugLightingMode.None;
}
public DebugLightingMode debugLightingMode = DebugLightingMode.None;

12
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs.hlsl


#define DEBUGLIGHTINGMODE_SPECULAR_LIGHTING (2)
#define DEBUGLIGHTINGMODE_LUX_METER (3)
#define DEBUGLIGHTINGMODE_VISUALIZE_CASCADE (4)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO (5)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO (6)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO (7)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO (8)
#define DEBUGLIGHTINGMODE_ENVIRONMENT_PROXY_VOLUME (9)
#define DEBUGLIGHTINGMODE_ENVIRONMENT_SAMPLE_COORDINATES (10)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION (11)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION (12)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION (5)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION (6)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION (7)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION (8)
//
// UnityEngine.Experimental.Rendering.HDPipeline.DebugScreenSpaceTracing: static fields

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/MaterialDebug.cs.hlsl


#define DEBUGVIEWVARYING_VERTEX_NORMAL_WS (7)
#define DEBUGVIEWVARYING_VERTEX_COLOR (8)
#define DEBUGVIEWVARYING_VERTEX_COLOR_ALPHA (9)
#define DEBUGVIEWVARYING_LAST (10)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewGbuffer: static fields

#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK1 (13)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK2 (14)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK3 (15)
#define DEBUGVIEWGBUFFER_LAST (16)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewProperties: static fields

#define DEBUGVIEWPROPERTIES_DEPTH_OFFSET (20)
#define DEBUGVIEWPROPERTIES_LIGHTMAP (21)
#define DEBUGVIEWPROPERTIES_INSTANCING (22)
#define DEBUGVIEWPROPERTIES_LAST (23)
#endif

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalProjectorComponent.cs


private void DrawGizmo(bool selected)
{
var col = new Color(0.0f, 0.7f, 1f, 1.0f);
col.a = selected ? 0.3f : 0.1f;
Gizmos.color = col;
Gizmos.DrawCube(Vector3.zero, Vector3.one);
col.a = selected ? 0.5f : 0.2f;
Gizmos.color = col;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);

26
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs


private Texture2DAtlas m_Atlas = null;
public bool m_AllocationSuccess = true;
public bool m_PrevAllocationSuccess = true;
public Texture2DAtlas Atlas
{

{
if (m_NumResults == 0)
return;
// only add if anything in this decal set is visible.
AddToTextureList(ref instance.m_TextureList);
int instanceCount = 0;
int batchCount = 0;
Matrix4x4[] decalToWorldBatch = null;

textureList.Add(m_Mask);
}
}
public void RenderIntoDBuffer(CommandBuffer cmd)
{

// updates textures, texture atlas indices and blend value
public void UpdateCachedMaterialData()
{
//instance.m_AllocationSuccess = true;
pair.Value.InitializeMaterialValues();
pair.Value.AddToTextureList(ref m_TextureList);
pair.Value.InitializeMaterialValues();
}
}

AddTexture(cmd, textureScaleBias);
}
if(!m_AllocationSuccess) // still failed to allocate, decal atlas size needs to increase
if(!m_AllocationSuccess && m_PrevAllocationSuccess) // still failed to allocate, decal atlas size needs to increase, debounce so that we don't spam the console with warnings
m_PrevAllocationSuccess = m_AllocationSuccess;
}
public void CreateDrawData()

// set to null so that they get recreated
m_DecalMesh = null;
m_Atlas = null;
}
public void RenderDebugOverlay(HDCamera hdCamera, CommandBuffer cmd, DebugDisplaySettings debugDisplaySettings, ref float x, ref float y, float overlaySize, float width)
{
if(debugDisplaySettings.decalsDebugSettings.m_DisplayAtlas)
{
using (new ProfilingSample(cmd, "Display Decal Atlas", CustomSamplerId.DisplayDebugDecalsAtlas.GetSampler()))
{
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1,1,0,0), new Vector4(width / hdCamera.actualWidth, overlaySize / hdCamera.actualHeight, x / hdCamera.actualWidth, y / hdCamera.actualHeight), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}
}
}
}

31
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs


public SerializedProperty directionalIntensity;
public SerializedProperty punctualIntensity;
public SerializedProperty areaIntensity;
public SerializedProperty enableSpotReflector;
public SerializedProperty spotInnerPercent;
public SerializedProperty lightDimmer;
public SerializedProperty fadeDistance;

public SerializedProperty spotLightShape;
public SerializedProperty enableSpotReflector;
public SerializedProperty shapeWidth;
public SerializedProperty shapeHeight;
public SerializedProperty aspectRatio;

directionalIntensity = o.Find(x => x.directionalIntensity),
punctualIntensity = o.Find(x => x.punctualIntensity),
areaIntensity = o.Find(x => x.areaIntensity),
enableSpotReflector = o.Find(x => x.enableSpotReflector),
spotInnerPercent = o.Find(x => x.m_InnerSpotPercent),
lightDimmer = o.Find(x => x.lightDimmer),
fadeDistance = o.Find(x => x.fadeDistance),

spotLightShape = o.Find(x => x.spotLightShape),
enableSpotReflector = o.Find(x => x.enableSpotReflector),
spotLightShape = o.Find(x => x.spotLightShape),
shapeWidth = o.Find(x => x.shapeWidth),
shapeHeight = o.Find(x => x.shapeHeight),
aspectRatio = o.Find(x => x.aspectRatio),

if (spotLightShape == SpotLightShape.Cone)
{
settings.DrawSpotAngle();
EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
EditorGUILayout.Slider(m_AdditionalLightData.spotInnerPercent, 0f, 100f, s_Styles.spotInnerPercent);
}
// TODO : replace with angle and ratio

EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
EditorGUILayout.Slider(m_AdditionalLightData.aspectRatio, 0.05f, 20.0f, s_Styles.aspectRatioPyramid);
}
else if (spotLightShape == SpotLightShape.Box)

}
else if (spotLightShape == SpotLightShape.Pyramid)
{
var aspectRatio = m_AdditionalLightData.aspectRatio.floatValue;
// Since the smallest angles is = to the fov, and we don't care of the angle order, simply make sure the aspect ratio is > 1
if ( aspectRatio < 1f ) aspectRatio = 1f/aspectRatio;
var angleA = settings.spotAngle.floatValue * Mathf.Deg2Rad;
var halfAngle = angleA * 0.5f; // half of the smallest angle
var length = Mathf.Tan(halfAngle); // half length of the smallest side of the rectangle
length *= aspectRatio; // half length of the bigest side of the rectangle
halfAngle = Mathf.Atan(length); // half of the bigest angle
var angleB = halfAngle * 2f;
float angleA, angleB;
LightUtils.CalculateAnglesForPyramid( m_AdditionalLightData.aspectRatio.floatValue, settings.spotAngle.floatValue,
out angleA, out angleB);
settings.intensity.floatValue = LightUtils.ConvertFrustrumLightIntensity(m_AdditionalLightData.punctualIntensity.floatValue, angleA, angleB );
}

case LightShape.Point:
case LightShape.Spot:
EditorGUILayout.PropertyField(m_AdditionalLightData.punctualIntensity, s_Styles.punctualIntensity);
// Only display reflector option if it make sense
if (m_LightShape == LightShape.Spot)
{
var spotLightShape = (SpotLightShape)m_AdditionalLightData.spotLightShape.enumValueIndex;
if (spotLightShape == SpotLightShape.Cone || spotLightShape == SpotLightShape.Pyramid)
EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
}
break;
case LightShape.Rectangle:

66
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/Decal/DecalProjectorComponentEditor.cs


using UnityEngine.Experimental.Rendering;
using UnityEngine.Experimental.Rendering.HDPipeline;
using UnityEditor;
using UnityEditor.IMGUI.Controls;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{

private SerializedProperty m_DrawDistanceProperty;
private SerializedProperty m_FadeScaleProperty;
public class DecalBoundsHandle : BoxBoundsHandle
{
protected override Bounds OnHandleChanged(HandleDirection handle, Bounds boundsOnClick, Bounds newBounds)
{
// special case for Y axis because decal mesh is centered at 0, -0.5, 0
if (handle == HandleDirection.NegativeY)
{
m_Translation = Vector3.zero;
m_Scale = newBounds.size;
}
else if (handle == HandleDirection.PositiveY)
{
m_Translation = (newBounds.center + newBounds.extents - (m_Center + 0.5f * m_Size));
m_Scale = (m_Size + m_Translation);
}
else
{
m_Translation = newBounds.center - m_Center;
m_Scale = newBounds.size;
}
return newBounds;
}
public void SetSizeAndCenter(Vector3 inSize, Vector3 inCenter)
{
// boundsOnClick implies that it gets refreshed only if the handle is clicked on again, but we need actual center and scale which we set before handle is drawn every frame
m_Center = inCenter;
m_Size = inSize;
center = inCenter;
size = inSize;
}
private Vector3 m_Center;
private Vector3 m_Size;
public Vector3 m_Translation;
public Vector3 m_Scale;
}
private DecalBoundsHandle m_Handle = new DecalBoundsHandle();
private void OnEnable()
{

// Update material editor with the new material
m_MaterialEditor = (MaterialEditor)CreateEditor(m_DecalProjectorComponent.Mat);
}
void OnSceneGUI()
{
EditorGUI.BeginChangeCheck();
var mat = Handles.matrix;
var col = Handles.color;
Handles.color = Color.white;
// decal mesh is centered at (0, -0.5, 0)
// zero out the local scale in the matrix so that handle code gives us back the actual scale
Handles.matrix = Matrix4x4.TRS(m_DecalProjectorComponent.transform.position, m_DecalProjectorComponent.transform.rotation, Vector3.one) * Matrix4x4.Translate(new Vector3(0.0f, -0.5f* m_DecalProjectorComponent.transform.localScale.y, 0.0f));
// pass in the scale
m_Handle.SetSizeAndCenter(m_DecalProjectorComponent.transform.localScale, Vector3.zero);
m_Handle.DrawHandle();
if (EditorGUI.EndChangeCheck())
{
// adjust decal transform if handle changed
m_DecalProjectorComponent.transform.Translate(m_Handle.m_Translation);
m_DecalProjectorComponent.transform.localScale = m_Handle.m_Scale;
Repaint();
}
Handles.matrix = mat;
Handles.color = col;
}
public override void OnInspectorGUI()
{

265
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs


using System;
class StackLitGUI : BaseUnlitGUI
class StackLitGUI : BaseMaterialGUI
protected static class Styles
#region Strings
protected const string k_DoubleSidedNormalMode = "_DoubleSidedNormalMode";
protected const string k_UVBase = "_UVBase";
// Base
protected const string k_BaseColor = "_BaseColor";
protected const string k_BaseColorMap = "_BaseColorMap";
protected const string k_Metallic = "_Metallic";
protected const string k_MetallicMap = "_MetallicMap";
protected const string k_MetallicRemapMin = "_MetallicRemap";
protected const string k_Smoothness1 = "_SmoothnessA";
protected const string k_Smoothness1Map = "_SmoothnessAMap";
protected const string k_Smoothness1RemapMin = "_SmoothnessARemap";
protected const string k_NormalMap = "_NormalMap";
protected const string k_NormalScale = "_NormalScale";
// Emissive
protected const string k_EmissiveColor = "_EmissiveColor";
protected const string k_EmissiveColorMap = "_EmissiveColorMap";
protected const string k_EmissiveIntensity = "_EmissiveIntensity";
protected const string k_AlbedoAffectEmissive = "_AlbedoAffectEmissive";
// SSS
protected const string k_DiffusionProfileName = "_DiffusionProfile";
protected const string k_SubsurfaceMaskName = "_SubsurfaceMask";
protected const string k_SubsurfaceMaskMap = "_SubsurfaceMaskMap";
protected const string k_ThicknessName = "_Thickness";
protected const string k_ThicknessMapName = "_ThicknessMap";
protected const string k_ThicknessRemapName = "_ThicknessRemap";
// Second Lobe.
protected const string k_Smoothness2 = "_SmoothnessB";
protected const string k_Smoothness2Map = "_SmoothnessBMap";
protected const string k_SmoothnessRemap2Min = "_SmoothnessBRemap";
protected const string k_LobeMix = "_LobeMix";
//// transparency params
//protected MaterialProperty transmissionEnable = null;
//protected const string kTransmissionEnable = "_TransmissionEnable";
//protected MaterialProperty ior = null;
//protected const string kIor = "_Ior";
//protected MaterialProperty transmittanceColor = null;
//protected const string kTransmittanceColor = "_TransmittanceColor";
//protected MaterialProperty transmittanceColorMap = null;
//protected const string kTransmittanceColorMap = "_TransmittanceColorMap";
//protected MaterialProperty atDistance = null;
//protected const string kATDistance = "_ATDistance";
//protected MaterialProperty thicknessMultiplier = null;
//protected const string kThicknessMultiplier = "_ThicknessMultiplier";
//protected MaterialProperty refractionModel = null;
//protected const string kRefractionModel = "_RefractionModel";
//protected MaterialProperty refractionSSRayModel = null;
//protected const string kRefractionSSRayModel = "_RefractionSSRayModel";
#endregion
// Add the properties into an array.
private readonly GroupProperty _baseMaterialProperties = null;
private readonly GroupProperty _materialProperties = null;
public StackLitGUI()
public static string InputsText = "Inputs";
_baseMaterialProperties = new GroupProperty(this, new BaseProperty[]
{
// JFFTODO: Find the proper condition, and proper way to display this.
new Property(this, k_DoubleSidedNormalMode, "Normal mode", "This will modify the normal base on the selected mode. Mirror: Mirror the normal with vertex normal plane, Flip: Flip the normal.", false),
});
public static GUIContent baseColorText = new GUIContent("Base Color + Opacity", "Albedo (RGB) and Opacity (A)");
_materialProperties = new GroupProperty(this, new BaseProperty[]
{
new GroupProperty(this, "Standard", new BaseProperty[]
{
new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Base Color + Opacity", "Albedo (RGB) and Opacity (A)", false),
new TextureProperty(this, k_MetallicMap, k_Metallic, "Metallic", "Metallic", false),
new TextureProperty(this, k_Smoothness1Map, k_Smoothness1, "Smoothness", "Smoothness", false),
// TODO: Special case for normal maps.
new TextureProperty(this, k_NormalMap, k_NormalScale, "Normal TODO", "Normal Map", false, true),
public static GUIContent emissiveText = new GUIContent("Emissive Color", "Emissive");
public static GUIContent emissiveIntensityText = new GUIContent("Emissive Intensity", "Emissive");
public static GUIContent albedoAffectEmissiveText = new GUIContent("Albedo Affect Emissive", "Specifies whether or not the emissive color is multiplied by the albedo.");
}
//new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Dielectric IoR", "Index of Refraction for Dielectric", false),
}),
new GroupProperty(this, "Emissive", new BaseProperty[]
{
new TextureProperty(this, k_EmissiveColorMap, k_EmissiveColor, "Emissive Color", "Emissive", false),
new Property(this, k_EmissiveIntensity, "Emissive Intensity", "Emissive", false),
new Property(this, k_AlbedoAffectEmissive, "Albedo Affect Emissive", "Specifies whether or not the emissive color is multiplied by the albedo.", false),
}),
protected MaterialProperty baseColor = null;
protected const string kBaseColor = "_BaseColor";
protected MaterialProperty baseColorMap = null;
protected const string kBaseColorMap = "_BaseColorMap";
//new GroupProperty(this, "Coat", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "SmoothnessCoat", "smoothnessCoat", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Index Of Refraction", "iorCoat", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Normal", "normal Coat", false),
//}),
protected MaterialProperty emissiveColor = null;
protected const string kEmissiveColor = "_EmissiveColor";
protected MaterialProperty emissiveColorMap = null;
protected const string kEmissiveColorMap = "_EmissiveColorMap";
protected MaterialProperty emissiveIntensity = null;
protected const string kEmissiveIntensity = "_EmissiveIntensity";
protected MaterialProperty albedoAffectEmissive = null;
protected const string kAlbedoAffectEmissive = "_AlbedoAffectEmissive";
new GroupProperty(this, "Sub-Surface Scattering", new BaseProperty[]
{
new DiffusionProfileProperty(this, k_DiffusionProfileName, "Diffusion Profile", "A profile determines the shape of the SSS/transmission filter.", false),
new TextureProperty(this, k_SubsurfaceMaskName, "Subsurface mask map (R)", "Determines the strength of the subsurface scattering effect.", false),
}/*, _ => _materialId == MaterialId.SubSurfaceScattering*/),
override protected void FindMaterialProperties(MaterialProperty[] props)
{
baseColor = FindProperty(kBaseColor, props);
baseColorMap = FindProperty(kBaseColorMap, props);
new GroupProperty(this, "Second Specular Lobe", new BaseProperty[]
{
new TextureProperty(this, k_Smoothness2Map, k_Smoothness2, "Smoothness2", "Smoothness2", false),
new Property(this, k_LobeMix, "Lobe Mix", "Lobe Mix", false),
}),
emissiveColor = FindProperty(kEmissiveColor, props);
emissiveColorMap = FindProperty(kEmissiveColorMap, props);
emissiveIntensity = FindProperty(kEmissiveIntensity, props);
albedoAffectEmissive = FindProperty(kAlbedoAffectEmissive, props);
}
//new GroupProperty(this, "Anisotropy", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Anisotropy Strength", "anisotropy strength", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Rotation", "rotation", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Tangent", "tangent", false),
//}),
protected override void MaterialPropertiesGUI(Material material)
{
EditorGUILayout.LabelField(Styles.InputsText, EditorStyles.boldLabel);
new GroupProperty(this, "Transmission", new BaseProperty[]
{
new DiffusionProfileProperty(this, k_DiffusionProfileName, "Diffusion Profile", "A profile determines the shape of the SSS/transmission filter.", false),
new TextureProperty(this, k_ThicknessName, "Thickness", "If subsurface scattering is enabled, low values allow some light to be transmitted through the object.", false),
}),
m_MaterialEditor.TexturePropertySingleLine(Styles.baseColorText, baseColorMap, baseColor);
m_MaterialEditor.TextureScaleOffsetProperty(baseColorMap);
//new GroupProperty(this, "Iridescence", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Index of Refraction", "Index of Refraction for Iridescence", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Thickness", "Thickness", false),
//}),
m_MaterialEditor.TexturePropertySingleLine(Styles.emissiveText, emissiveColorMap, emissiveColor);
m_MaterialEditor.TextureScaleOffsetProperty(emissiveColorMap);
m_MaterialEditor.ShaderProperty(emissiveIntensity, Styles.emissiveIntensityText);
m_MaterialEditor.ShaderProperty(albedoAffectEmissive, Styles.albedoAffectEmissiveText);
//new GroupProperty(this, "Glint", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Density", "Density:", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Tint", "Tint", false),
//}),
});
}
var surfaceTypeValue = (SurfaceType)surfaceType.floatValue;
if (surfaceTypeValue == SurfaceType.Transparent)
{
EditorGUILayout.Space();
EditorGUILayout.LabelField(StylesBaseUnlit.TransparencyInputsText, EditorStyles.boldLabel);
++EditorGUI.indentLevel;
protected override bool ShouldEmissionBeEnabled(Material material)
{
return material.GetFloat(k_EmissiveIntensity) > 0.0f;
}
DoDistortionInputsGUI();
protected override void FindBaseMaterialProperties(MaterialProperty[] props)
{
base.FindBaseMaterialProperties(props);
_baseMaterialProperties.OnFindProperty(props);
}
--EditorGUI.indentLevel;
protected override void FindMaterialProperties(MaterialProperty[] props)
{
//base.FindMaterialProperties(props);
_materialProperties.OnFindProperty(props);
}
protected override void MaterialPropertiesAdvanceGUI(Material material)
protected override void BaseMaterialPropertiesGUI()
base.BaseMaterialPropertiesGUI();
_baseMaterialProperties.OnGUI();
protected override void VertexAnimationPropertiesGUI()
protected override void MaterialPropertiesGUI(Material material)
_materialProperties.OnGUI();
}
protected override void MaterialPropertiesAdvanceGUI(Material material)
{
protected override bool ShouldEmissionBeEnabled(Material mat)
protected override void VertexAnimationPropertiesGUI()
return mat.GetFloat(kEmissiveIntensity) > 0.0f;
}
protected override void SetupMaterialKeywordsAndPassInternal(Material material)

// All Setup Keyword functions must be static. It allow to create script to automatically update the shaders with a script if code change
static public void SetupMaterialKeywordsAndPass(Material material)
{
//TODO see BaseLitUI.cs:SetupBaseLitKeywords (stencil etc)
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
bool doubleSidedEnable = material.GetFloat(kDoubleSidedEnable) > 0.0f;
if (doubleSidedEnable)
{
BaseLitGUI.DoubleSidedNormalMode doubleSidedNormalMode = (BaseLitGUI.DoubleSidedNormalMode)material.GetFloat(k_DoubleSidedNormalMode);
switch (doubleSidedNormalMode)
{
case BaseLitGUI.DoubleSidedNormalMode.Mirror: // Mirror mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(1.0f, 1.0f, -1.0f, 0.0f));
break;
case BaseLitGUI.DoubleSidedNormalMode.Flip: // Flip mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(-1.0f, -1.0f, -1.0f, 0.0f));
break;
case BaseLitGUI.DoubleSidedNormalMode.None: // None mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(1.0f, 1.0f, 1.0f, 0.0f));
break;
}
}
//NOTE: For SSS in forward and split lighting, obviously we don't have a gbuffer pass,
// so no stencil tagging there, but velocity? To check...
//TODO: stencil state, displacement, wind, depthoffset, tessellation
SetupMainTexForAlphaTestGI("_BaseColorMap", "_BaseColor", material);
//TODO: disable DBUFFER
CoreUtils.SetKeyword(material, "_NORMALMAP_TANGENT_SPACE", true);
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(k_NormalMap));
CoreUtils.SetKeyword(material, "_SMOOTHNESSMASKMAPA", material.GetTexture(k_Smoothness1Map));
CoreUtils.SetKeyword(material, "_SMOOTHNESSMASKMAPB", material.GetTexture(k_Smoothness2Map));
CoreUtils.SetKeyword(material, "_METALLICMAP", material.GetTexture(k_MetallicMap));
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(k_EmissiveColorMap));
//bool needUV2 = (LitGUI.UVBaseMapping)material.GetFloat(k_UVBase) == LitGUI.UVBaseMapping.UV2;
//bool needUV3 = (LitGUI.UVBaseMapping)material.GetFloat(k_UVBase) == LitGUI.UVBaseMapping.UV3;
//if (needUV3)
//{
// material.DisableKeyword("_REQUIRE_UV2");
// material.EnableKeyword("_REQUIRE_UV3");
//}
//else if (needUV2)
//{
// material.EnableKeyword("_REQUIRE_UV2");
// material.DisableKeyword("_REQUIRE_UV3");
//}
//else
//{
// material.DisableKeyword("_REQUIRE_UV2");
// material.DisableKeyword("_REQUIRE_UV3");
//}
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(k_EmissiveColorMap));
}
}
} // namespace UnityEditor

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineUI.cs


{
EditorGUILayout.PropertyField(d.renderPipelineResources, _.GetContent("Render Pipeline Resources|Set of resources that need to be loaded when creating stand alone"));
EditorGUILayout.PropertyField(d.diffusionProfileSettings, _.GetContent("Diffusion Profile Settings"));
EditorGUILayout.PropertyField(d.allowShaderVariantStripping, _.GetContent("Enable Shader Variant Stripping (experimental)"));
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedHDRenderPipelineAsset.cs


public SerializedProperty renderPipelineResources;
public SerializedProperty diffusionProfileSettings;
public SerializedProperty allowShaderVariantStripping;
public SerializedRenderPipelineSettings renderPipelineSettings;
public SerializedFrameSettings defaultFrameSettings;

renderPipelineResources = serializedObject.FindProperty("m_RenderPipelineResources");
diffusionProfileSettings = serializedObject.Find((HDRenderPipelineAsset s) => s.diffusionProfileSettings);
allowShaderVariantStripping = serializedObject.Find((HDRenderPipelineAsset s) => s.allowShaderVariantStripping);
renderPipelineSettings = new SerializedRenderPipelineSettings(serializedObject.Find((HDRenderPipelineAsset a) => a.renderPipelineSettings));
defaultFrameSettings = new SerializedFrameSettings(serializedObject.FindProperty("m_FrameSettings"));

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs


GBuffer,
DBufferRender,
DBufferPrepareDrawData,
DisplayDebugDecalsAtlas,
DisplayDebugViewMaterial,
DebugViewMaterialGBuffer,
BlitDebugViewMaterialDebug,

TransparentDepthPostpass,
ObjectsVelocity,
CameraVelocity,
GaussianPyramidColor,
PyramidDepth,
ColorPyramid,
DepthPyramid,
PostProcessing,
RenderDebug,
ClearBuffers,

210
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


};
readonly HDRenderPipelineAsset m_Asset;
public HDRenderPipelineAsset asset { get { return m_Asset; } }
DiffusionProfileSettings m_InternalSSSAsset;
public DiffusionProfileSettings diffusionProfileSettings

RenderTargetIdentifier[] m_MRTCache2 = new RenderTargetIdentifier[2];
// 'm_CameraColorBuffer' does not contain diffuse lighting of SSS materials until the SSS pass. It is stored within 'm_CameraSssDiffuseLightingBuffer'.
RTHandle m_CameraColorBuffer;
RTHandle m_CameraSssDiffuseLightingBuffer;
RTHandleSystem.RTHandle m_CameraColorBuffer;
RTHandleSystem.RTHandle m_CameraSssDiffuseLightingBuffer;
RTHandle m_CameraDepthStencilBuffer;
RTHandle m_CameraDepthBufferCopy;
RTHandle m_CameraStencilBufferCopy;
RTHandleSystem.RTHandle m_CameraDepthStencilBuffer;
RTHandleSystem.RTHandle m_CameraDepthBufferCopy;
RTHandleSystem.RTHandle m_CameraStencilBufferCopy;
RTHandle m_VelocityBuffer;
RTHandle m_DeferredShadowBuffer;
RTHandle m_AmbientOcclusionBuffer;
RTHandle m_DistortionBuffer;
RTHandleSystem.RTHandle m_VelocityBuffer;
RTHandleSystem.RTHandle m_DeferredShadowBuffer;
RTHandleSystem.RTHandle m_AmbientOcclusionBuffer;
RTHandleSystem.RTHandle m_DistortionBuffer;
// The pass "SRPDefaultUnlit" is a fall back to legacy unlit rendering and is required to support unity 2d + unity UI that render in the scene.
ShaderPassName[] m_ForwardAndForwardOnlyPassNames = { new ShaderPassName(), new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName };

public int GetCurrentShadowCount() { return m_LightLoop.GetCurrentShadowCount(); }
public int GetShadowAtlasCount() { return m_LightLoop.GetShadowAtlasCount(); }
public int GetDecalAtlasMipCount()
{
int highestDim = Math.Max(renderPipelineSettings.decalSettings.atlasWidth, renderPipelineSettings.decalSettings.atlasHeight);
return (int)Math.Log(highestDim, 2);
}
public int GetShadowSliceCount(uint atlasIndex) { return m_LightLoop.GetShadowSliceCount(atlasIndex); }
readonly SkyManager m_SkyManager = new SkyManager();

public DebugDisplaySettings debugDisplaySettings { get { return m_DebugDisplaySettings; } }
static DebugDisplaySettings s_NeutralDebugDisplaySettings = new DebugDisplaySettings();
DebugDisplaySettings m_CurrentDebugDisplaySettings;
RTHandle m_DebugColorPickerBuffer;
RTHandle m_DebugFullScreenTempBuffer;
RTHandleSystem.RTHandle m_DebugColorPickerBuffer;
RTHandleSystem.RTHandle m_DebugFullScreenTempBuffer;
bool m_FullScreenDebugPushed;
bool m_ValidAPI; // False by default mean we render normally, true mean we don't render anything

// Initial state of the RTHandle system.
// Tells the system that we will require MSAA or not so that we can avoid wasteful render texture allocation.
// TODO: Might want to initialize to at least the window resolution to avoid un-necessary re-alloc in the player
RTHandle.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAA, m_Asset.renderPipelineSettings.msaaSampleCount);
RTHandles.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAA, m_Asset.renderPipelineSettings.msaaSampleCount);
if(!m_Asset.renderPipelineSettings.supportForwardOnly)
m_GbufferManager.CreateBuffers();

m_BufferPyramid.CreateBuffers();
m_CameraColorBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB : false, enableRandomWrite: true, enableMSAA: true, name : "CameraColor");
m_CameraSssDiffuseLightingBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "CameraSSSDiffuseLighting");
m_CameraColorBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB : false, enableRandomWrite: true, enableMSAA: true, name : "CameraColor");
m_CameraSssDiffuseLightingBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "CameraSSSDiffuseLighting");
m_CameraDepthStencilBuffer = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthBufferCopy = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilCopy");
m_CameraDepthBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilCopy");
m_CameraStencilBufferCopy = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: RenderTextureFormat.R8, sRGB: false, filterMode: FilterMode.Point, enableMSAA: true, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity
m_CameraStencilBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: RenderTextureFormat.R8, sRGB: false, filterMode: FilterMode.Point, enableMSAA: true, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity
m_AmbientOcclusionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Bilinear, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "AmbientOcclusion");
m_AmbientOcclusionBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Bilinear, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "AmbientOcclusion");
m_VelocityBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBufferSRGBFlag(), enableMSAA: true, name: "Velocity");
m_VelocityBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBufferSRGBFlag(), enableMSAA: true, name: "Velocity");
m_DistortionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag(), name: "Distortion");
m_DistortionBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag(), name: "Distortion");
m_DeferredShadowBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
m_DeferredShadowBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
m_DebugColorPickerBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugColorPicker");
m_DebugFullScreenTempBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugFullScreen");
m_DebugColorPickerBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugColorPicker");
m_DebugFullScreenTempBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugFullScreen");
}
}

m_DbufferManager.DestroyBuffers();
m_BufferPyramid.DestroyBuffers();
RTHandle.Release(m_CameraColorBuffer);
RTHandle.Release(m_CameraSssDiffuseLightingBuffer);
RTHandle.Release(m_CameraDepthStencilBuffer);
RTHandle.Release(m_CameraDepthBufferCopy);
RTHandle.Release(m_CameraStencilBufferCopy);
RTHandles.Release(m_CameraColorBuffer);
RTHandles.Release(m_CameraSssDiffuseLightingBuffer);
RTHandle.Release(m_AmbientOcclusionBuffer);
RTHandle.Release(m_VelocityBuffer);
RTHandle.Release(m_DistortionBuffer);
RTHandle.Release(m_DeferredShadowBuffer);
RTHandles.Release(m_CameraDepthStencilBuffer);
RTHandles.Release(m_CameraDepthBufferCopy);
RTHandles.Release(m_CameraStencilBufferCopy);
RTHandle.Release(m_DebugColorPickerBuffer);
RTHandle.Release(m_DebugFullScreenTempBuffer);
RTHandles.Release(m_AmbientOcclusionBuffer);
RTHandles.Release(m_VelocityBuffer);
RTHandles.Release(m_DistortionBuffer);
RTHandles.Release(m_DeferredShadowBuffer);
RTHandles.Release(m_DebugColorPickerBuffer);
RTHandles.Release(m_DebugFullScreenTempBuffer);
HDCamera.CleanUnused();
}

m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd);
var ssrefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
var ssRefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
ssrefraction.PushShaderParameters(cmd);
ssRefraction.PushShaderParameters(cmd);
var previousDepthPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid);
if (previousDepthPyramidRT != null)
{
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, previousDepthPyramidRT);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(
previousDepthPyramidRT.referenceSize.x,
previousDepthPyramidRT.referenceSize.y,
1f / previousDepthPyramidRT.referenceSize.x,
1f / previousDepthPyramidRT.referenceSize.y
));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(
previousDepthPyramidRT.referenceSize.x / (float)previousDepthPyramidRT.rt.width,
previousDepthPyramidRT.referenceSize.y / (float)previousDepthPyramidRT.rt.height,
Mathf.Log(Mathf.Min(previousDepthPyramidRT.rt.width, previousDepthPyramidRT.rt.height), 2),
0.0f
));
}
var previousColorPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
if (previousColorPyramidRT != null)
{
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, previousColorPyramidRT);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(
previousColorPyramidRT.referenceSize.x,
previousColorPyramidRT.referenceSize.y,
1f / previousColorPyramidRT.referenceSize.x,
1f / previousColorPyramidRT.referenceSize.y
));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(
previousColorPyramidRT.referenceSize.x / (float)previousColorPyramidRT.rt.width,
previousColorPyramidRT.referenceSize.y / (float)previousColorPyramidRT.rt.height,
Mathf.Log(Mathf.Min(previousColorPyramidRT.rt.width, previousColorPyramidRT.rt.height), 2),
0.0f
));
}
}
}

return m_LightLoop.GetFeatureVariantsEnabled();
}
RTHandle GetDepthTexture()
RTHandleSystem.RTHandle GetDepthTexture()
{
return NeedDepthBufferCopy() ? m_CameraDepthBufferCopy : m_CameraDepthStencilBuffer;
}

var postProcessLayer = camera.GetComponent<PostProcessLayer>();
// Disable post process if we enable debug mode or if the post process layer is disabled
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
if (m_CurrentDebugDisplaySettings.IsDebugDisplayRemovePostprocess() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
{
m_FrameSettings.enablePostprocess = false;
}

DecalSystem.instance.EndCull();
m_DbufferManager.vsibleDecalCount = DecalSystem.m_DecalsVisibleThisFrame;
DecalSystem.instance.UpdateCachedMaterialData(); // textures, alpha or fade distances could've changed
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible, needs to happen after CreateDrawData
}
}
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);

// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
RenderPyramidDepth(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
RenderDepthPyramid(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
StopStereoRendering(renderContext, hdCamera.camera);

m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// Overwrite camera properties set during the shadow pass with the original camera properties.
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);
hdCamera.SetupGlobalParams(cmd, m_Time, m_LastTime);
if (m_FrameSettings.enableStereo) hdCamera.SetupGlobalStereoParams(cmd);
}

RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.PreRefraction);
RenderGaussianPyramidColor(hdCamera, cmd, renderContext, true);
RenderColorPyramid(hdCamera, cmd, renderContext, true);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.Transparent);

RenderTransparentDepthPostpass(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.Transparent);
RenderGaussianPyramidColor(hdCamera, cmd, renderContext, false);
RenderColorPyramid(hdCamera, cmd, renderContext, false);
AccumulateDistortion(m_CullResults, hdCamera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources, hdCamera);

m_DebugScreenSpaceTracingData.GetData(m_DebugScreenSpaceTracingDataArray);
var data = m_DebugScreenSpaceTracingDataArray[0];
m_CurrentDebugDisplaySettings.screenSpaceTracingDebugData = data;
// Assign -1 in tracing model to notifiy we took the data.
// When debugging in forward, we want only the first time the pixel is drawn
data.tracingModel = (Lit.RefractionSSRayModel)(-1);
m_DebugScreenSpaceTracingDataArray[0] = data;
m_DebugScreenSpaceTracingData.SetData(m_DebugScreenSpaceTracingDataArray);
}
} // For each camera
}

using (new ProfilingSample(cmd, "ApplyDistortion", CustomSamplerId.ApplyDistortion.GetSampler()))
{
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
var colorPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
var pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, colorPyramidRT);
// Need to account for the fact that the gaussian pyramid is actually rendered inside the camera viewport in a square texture so we mutiply by the PyramidToScreen scale
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, pyramidScale.x / hdCamera.screenSize.x, pyramidScale.y / hdCamera.screenSize.y);

cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, m_BufferPyramid.colorPyramid);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, colorPyramidRT);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._CameraColorTexture, m_CameraColorBuffer);
cmd.SetComputeVectorParam(m_applyDistortionCS, HDShaderIDs._Size, size);

var camera = hdCamera.camera;
m_LightLoop.RenderForward(camera, cmd, pass == ForwardPass.Opaque);
var debugScreenSpaceTracing = m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing;
if (pass == ForwardPass.Opaque)
{

var passNames = m_FrameSettings.enableForwardRenderingOnly
? m_ForwardAndForwardOnlyPassNames
: m_ForwardOnlyPassNames;
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingReflection);
if (debugSSTThisPass)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
}
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
// Assign debug data
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(1, m_DebugScreenSpaceTracingData);
}
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
}
}
}

}
}
void RenderGaussianPyramidColor(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, bool isPreRefraction)
void RenderColorPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, bool isPreRefraction)
{
if (isPreRefraction)
{

return;
}
using (new ProfilingSample(cmd, "Gaussian Pyramid Color", CustomSamplerId.GaussianPyramidColor.GetSampler()))
m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer);
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorPyramid, m_BufferPyramid.AllocColorRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
PushFullScreenDebugTextureMip(cmd, m_BufferPyramid.colorPyramid, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
using (new ProfilingSample(cmd, "Color Pyramid", CustomSamplerId.ColorPyramid.GetSampler()))
m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer, cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
void RenderPyramidDepth(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)
void RenderDepthPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)
using (new ProfilingSample(cmd, "Pyramid Depth", CustomSamplerId.PyramidDepth.GetSampler()))
m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture());
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthPyramid, m_BufferPyramid.AllocDepthRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
PushFullScreenDebugTextureMip(cmd, m_BufferPyramid.depthPyramid, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
using (new ProfilingSample(cmd, "Depth Pyramid", CustomSamplerId.DepthPyramid.GetSampler()))
m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture(), cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
}
void RenderPostProcess(HDCamera hdcamera, CommandBuffer cmd, PostProcessLayer layer)

}
}
public void PushColorPickerDebugTexture(CommandBuffer cmd, RTHandle textureID, HDCamera hdCamera)
public void PushColorPickerDebugTexture(CommandBuffer cmd, RTHandleSystem.RTHandle textureID, HDCamera hdCamera)
{
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{

}
}
public void PushFullScreenDebugTexture(CommandBuffer cmd, RTHandle textureID, HDCamera hdCamera, FullScreenDebugMode debugMode)
public void PushFullScreenDebugTexture(CommandBuffer cmd, RTHandleSystem.RTHandle textureID, HDCamera hdCamera, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

}
void PushFullScreenDebugTextureMip(CommandBuffer cmd, RTHandle texture, int lodCount, Vector4 scaleBias, HDCamera hdCamera, FullScreenDebugMode debugMode)
void PushFullScreenDebugTextureMip(CommandBuffer cmd, RTHandleSystem.RTHandle texture, int lodCount, Vector4 scaleBias, HDCamera hdCamera, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

// (i.e. we have perform a flip, we need to flip the input texture)
m_DebugFullScreen.SetFloat(HDShaderIDs._RequireToFlipInputTexture, hdCamera.camera.cameraType != CameraType.SceneView ? 1.0f : 0.0f);
m_DebugFullScreen.SetBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, m_BufferPyramid.depthPyramid);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid));
HDUtils.DrawFullScreen(cmd, hdCamera, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
PushColorPickerDebugTexture(cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget, hdCamera);

}
m_LightLoop.RenderDebugOverlay(hdCamera, cmd, m_CurrentDebugDisplaySettings, ref x, ref y, overlaySize, hdCamera.actualWidth);
DecalSystem.instance.RenderDebugOverlay(hdCamera, cmd, m_CurrentDebugDisplaySettings, ref x, ref y, overlaySize, hdCamera.actualWidth);
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs


return renderPipelineSettings;
}
public bool allowShaderVariantStripping = true;
[SerializeField]
public DiffusionProfileSettings diffusionProfileSettings;

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _ShowGrid = Shader.PropertyToID("_ShowGrid");
public static readonly int _ShowDepthPyramidDebug = Shader.PropertyToID("_ShowDepthPyramidDebug");
public static readonly int _DebugEnvironmentProxyDepthScale = Shader.PropertyToID("_DebugEnvironmentProxyDepthScale");
public static readonly int _DebugViewMaterial = Shader.PropertyToID("_DebugViewMaterial");
public static readonly int _DebugLightingMode = Shader.PropertyToID("_DebugLightingMode");
public static readonly int _DebugLightingSubMode = Shader.PropertyToID("_DebugLightingSubMode");

38
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
private static void SetViewportAndClear(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor)
private static void SetViewportAndClear(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag, Color clearColor)
{
// Clearing a partial viewport currently does not go through the hardware clear.
// Instead it goes through a quad rendered with a specific shader.

// This set of RenderTarget management methods is supposed to be used when rendering into a camera dependent render texture.
// This will automatically set the viewport based on the camera size and the RTHandle scaling info.
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
{
cmd.SetRenderTarget(colorBuffers, depthBuffer);
SetViewport(cmd, camera, depthBuffer);

// When we render using a camera whose viewport is smaller than the RTHandles reference size (and thus smaller than the RT actual size), we need to set it explicitly (otherwise, native code will set the viewport at the size of the RT)
// For auto-scaled RTs (like for example a half-resolution RT), we need to scale this viewport accordingly.
// For non scaled RTs we just do nothing, the native code will set the viewport at the size of the RT anyway.
public static void SetViewport(CommandBuffer cmd, HDCamera camera, RTHandle target)
public static void SetViewport(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle target)
{
if (target.useScaling)
{

cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 2 : 3, MeshTopology.Quads, 4, 1, s_PropertyBlock);
}
public static void BlitTexture(CommandBuffer cmd, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
public static void BlitTexture(CommandBuffer cmd, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
{
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);

// It means that we can end up rendering inside a partial viewport for one of these "camera space" rendering.
// In this case, we need to make sure than when we blit from one such camera texture to another, we only blit the necessary portion corresponding to the camera viewport.
// Here, both source and destination are camera-scaled.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

// This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
{
SetRenderTarget(cmd, camera, destination);
cmd.SetViewport(destViewport);

// This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RenderTargetIdentifier destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RenderTargetIdentifier destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandle destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandleSystem.RTHandle destination)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

// These method should be used to render full screen triangles sampling auto-scaling RTs.
// This will set the proper viewport and UV scale.
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer,
RTHandleSystem.RTHandle colorBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer);

public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer, RTHandle depthStencilBuffer,
RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer, depthStencilBuffer);

public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RenderTargetIdentifier[] colorBuffers, RTHandle depthStencilBuffer,
RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffers, depthStencilBuffer);

48
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs


public float punctualIntensity = 600.0f; // Light default to 600 lumen, i.e ~48 candela
public float areaIntensity = 200.0f; // Light default to 200 lumen to better match point light
// Only for Spotlight, should be hide for other light
public bool enableSpotReflector = false;
[Range(0.0f, 100.0f)]
public float m_InnerSpotPercent = 0.0f; // To display this field in the UI this need to be public

// Only for Spotlight, should be hide for other light
public SpotLightShape spotLightShape = SpotLightShape.Cone;
// Only for Spotlight, should be hide for other light
public bool enableSpotReflector = false;
// Only for Rectangle/Line/box projector lights
public float shapeWidth = 0.5f;

switch (light.type)
{
case LightType.Directional:
light.intensity = directionalIntensity;
light.intensity = Mathf.Max(0, directionalIntensity);
light.intensity = LightUtils.ConvertPointLightIntensity(punctualIntensity);
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
// Spot should used conversion which take into account the angle, and thus the intensity vary with angle.
// This is not easy to manipulate for lighter, so we simply consider any spot light as just occluded point light. So reuse the same code.
light.intensity = LightUtils.ConvertPointLightIntensity(punctualIntensity);
// TODO: What to do with box shape ?
// var spotLightShape = (SpotLightShape)m_AdditionalspotLightShape.enumValueIndex;
if (enableSpotReflector)
{
if (spotLightShape == SpotLightShape.Cone)
{
light.intensity = LightUtils.ConvertSpotLightIntensity(Mathf.Max(0, punctualIntensity), light.spotAngle * Mathf.Deg2Rad, true);
}
else if (spotLightShape == SpotLightShape.Pyramid)
{
float angleA, angleB;
LightUtils.CalculateAnglesForPyramid(aspectRatio, light.spotAngle,
out angleA, out angleB);
light.intensity = LightUtils.ConvertFrustrumLightIntensity(Mathf.Max(0, punctualIntensity), angleA, angleB);
}
else // Box shape, fallback to punctual light.
{
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
}
}
else
{
// Spot should used conversion which take into account the angle, and thus the intensity vary with angle.
// This is not easy to manipulate for lighter, so we simply consider any spot light as just occluded point light. So reuse the same code.
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
// TODO: What to do with box shape ?
// var spotLightShape = (SpotLightShape)m_AdditionalspotLightShape.enumValueIndex;
}
break;
}

light.intensity = LightUtils.ConvertRectLightIntensity(areaIntensity, shapeWidth, shapeHeight);
light.intensity = LightUtils.ConvertRectLightIntensity(Mathf.Max(0, areaIntensity), shapeWidth, shapeHeight);
light.intensity = LightUtils.CalculateLineLightIntensity(areaIntensity, shapeWidth);
light.intensity = LightUtils.CalculateLineLightIntensity(Mathf.Max(0, areaIntensity), shapeWidth);
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs


public float weight;
public float multiplier;
public Vector3 sampleDirectionDiscardWS;
// Sampling properties
public int envIndex;
};

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl


float3 boxSideFadeNegative;
float weight;
float multiplier;
float3 sampleDirectionDiscardWS;
int envIndex;
};

float GetMultiplier(EnvLightData value)
{
return value.multiplier;
}
float3 GetSampleDirectionDiscardWS(EnvLightData value)
{
return value.sampleDirectionDiscardWS;
}
int GetEnvIndex(EnvLightData value)
{

106
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl


attenuation *= shadow;
}
// Environment map share function
#include "Reflection/VolumeProjection.hlsl"
void EvaluateLight_EnvIntersection(float3 positionWS, float3 normalWS, EnvLightData lightData, int influenceShapeType, inout float3 R, inout float weight)
{
// Guideline for reflection volume: In HDRenderPipeline we separate the projection volume (the proxy of the scene) from the influence volume (what pixel on the screen is affected)
// However we add the constrain that the shape of the projection and influence volume is the same (i.e if we have a sphere shape projection volume, we have a shape influence).
// It allow to have more coherence for the dynamic if in shader code.
// Users can also chose to not have any projection, in this case we use the property minProjectionDistance to minimize code change. minProjectionDistance is set to huge number
// that simulate effect of no shape projection
float3x3 worldToIS = WorldToInfluenceSpace(lightData); // IS: Influence space
float3 positionIS = WorldToInfluencePosition(lightData, worldToIS, positionWS);
float3 dirIS = mul(R, worldToIS);
float3x3 worldToPS = WorldToProxySpace(lightData); // PS: Proxy space
float3 positionPS = WorldToProxyPosition(lightData, worldToPS, positionWS);
float3 dirPS = mul(R, worldToPS);
float projectionDistance = 0;
// Process the projection
// In Unity the cubemaps are capture with the localToWorld transform of the component.
// This mean that location and orientation matter. So after intersection of proxy volume we need to convert back to world.
if (influenceShapeType == ENVSHAPETYPE_SPHERE)
{
projectionDistance = IntersectSphereProxy(lightData, dirPS, positionPS);
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.capturePositionWS
float3 capturePositionWS = lightData.capturePositionWS;
R = (positionWS + projectionDistance * R) - capturePositionWS;
weight = InfluenceSphereWeight(lightData, normalWS, positionWS, positionIS, dirIS);
}
else if (influenceShapeType == ENVSHAPETYPE_BOX)
{
projectionDistance = IntersectBoxProxy(lightData, dirPS, positionPS);
// No need to normalize for fetching cubemap
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.capturePositionWS
float3 capturePositionWS = lightData.capturePositionWS;
R = (positionWS + projectionDistance * R) - capturePositionWS;
weight = InfluenceBoxWeight(lightData, normalWS, positionWS, positionIS, dirIS);
}
// Smooth weighting
weight = Smoothstep01(weight);
weight *= lightData.weight;
}
// Ambient occlusion
struct AmbientOcclusionFactor
{
float3 indirectAmbientOcclusion;
float3 directAmbientOcclusion;
float3 indirectSpecularOcclusion;
};
void GetScreenSpaceAmbientOcclusion(float2 positionSS, float NdotV, float perceptualRoughness, float specularOcclusionFromData, out AmbientOcclusionFactor aoFactor)
{
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), indirectAmbientOcclusion);
aoFactor.directAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
aoFactor.indirectSpecularOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(specularOcclusionFromData, specularOcclusion));
}
void GetScreenSpaceAmbientOcclusionMultibounce(float2 positionSS, float NdotV, float perceptualRoughness, float specularOcclusionFromData, float3 diffuseColor, float3 fresnel0, out AmbientOcclusionFactor aoFactor)
{
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the diffuseColor)
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);
aoFactor.indirectAmbientOcclusion = GTAOMultiBounce(indirectAmbientOcclusion, diffuseColor);
aoFactor.directAmbientOcclusion = GTAOMultiBounce(min(specularOcclusionFromData, specularOcclusion), fresnel0);
aoFactor.indirectSpecularOcclusion = GTAOMultiBounce(directAmbientOcclusion, diffuseColor);
}

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


int NumLightIndicesPerClusteredTile()
{
return 8 * (1 << k_Log2NumClusters); // total footprint for all layers of the tile (measured in light index entries)
return 32 * (1 << k_Log2NumClusters); // total footprint for all layers of the tile (measured in light index entries)
}
public void AllocResolutionDependentBuffers(int width, int height, bool stereoEnabled)

var capturePosition = Vector3.zero;
var influenceToWorld = probe.influenceToWorld;
var sampleDirectionDiscardWS = Vector3.zero;
// 31 bits index, 1 bit cache type
var envIndex = -1;
if (probe.planarReflectionProbe != null)

// We transform it to object space by translating the capturePosition
var vp = gpuProj * gpuView * Matrix4x4.Translate(capturePosition);
m_Env2DCaptureVP[fetchIndex] = vp;
sampleDirectionDiscardWS = captureRotation * Vector3.forward;
}
else if (probe.reflectionProbe != null)
{

envLightData.blendDistanceNegative = probe.blendDistanceNegative;
envLightData.boxSideFadePositive = probe.boxSideFadePositive;
envLightData.boxSideFadeNegative = probe.boxSideFadeNegative;
envLightData.sampleDirectionDiscardWS = sampleDirectionDiscardWS;
envLightData.influenceRight = influenceToWorld.GetColumn(0).normalized;
envLightData.influenceUp = influenceToWorld.GetColumn(1).normalized;

public void UpdateCullingParameters(ref ScriptableCullingParameters cullingParams)
{
m_ShadowMgr.UpdateCullingParameters( ref cullingParams );
// In HDRP we don't need per object light/probe info so we disable the native code that handles it.
cullingParams.cullingFlags |= CullFlag.DisablePerObjectCulling;
}
public bool IsBakedShadowMaskLight(Light light)

public bool outputSplitLighting;
}
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandleSystem.RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
{
if (m_CurrentSunLight == null || m_CurrentSunLight.GetComponent<AdditionalShadowData>() == null || m_CurrentSunLightShadowIndex < 0)
{

public void RenderDebugOverlay(HDCamera hdCamera, CommandBuffer cmd, DebugDisplaySettings debugDisplaySettings, ref float x, ref float y, float overlaySize, float width)
{
LightingDebugSettings lightingDebug = debugDisplaySettings.lightingDebugSettings;
if (lightingDebug.debugLightingMode == DebugLightingMode.EnvironmentProxyVolume)
cmd.SetGlobalFloat(HDShaderIDs._DebugEnvironmentProxyDepthScale, lightingDebug.environmentProxyDepthScale);
using (new ProfilingSample(cmd, "Tiled/cluster Lighting Debug", CustomSamplerId.TPTiledLightingDebug.GetSampler()))
{

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


#define SINGLE_PASS_CONTEXT_SAMPLE_REFLECTION_PROBES 0
#define SINGLE_PASS_CONTEXT_SAMPLE_SKY 1
#ifdef DEBUG_DISPLAY
float4 ApplyDebugProjectionVolume(float4 color, float3 radiusToProxy, float scale)
{
float l = length(radiusToProxy);
l = pow(l / (1 + l), scale);
return float4(l.xxx * 0.7 + color.rgb * 0.3, color.a);
}
#endif
// Note: index is whatever the lighting architecture want, it can contain information like in which texture to sample (in case we have a compressed BC6H texture and an uncompressed for real time reflection ?)
// EnvIndex can also be use to fetch in another array of struct (to atlas information etc...).
// Cubemap : texCoord = direction vector

color.rgb = SAMPLE_TEXTURE2D_ARRAY_LOD(_Env2DTextures, s_trilinear_clamp_sampler, ndc.xy, index, lod).rgb;
color.a = any(ndc.xyz < 0) || any(ndc.xyz > 1) ? 0.0 : 1.0;
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == DEBUGLIGHTINGMODE_ENVIRONMENT_SAMPLE_COORDINATES)
color = float4(ndc.xy, 0, color.a);
#endif
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == DEBUGLIGHTINGMODE_ENVIRONMENT_SAMPLE_COORDINATES)
color = float4(texCoord.xyz * 0.5 + 0.5, color.a);
#endif
}
}
else // SINGLE_PASS_SAMPLE_SKY

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightUtils.cs


//radiance = power / (length * (4 * Pi)).
return intensity / (4.0f * Mathf.PI * lineWidth);
}
public static void CalculateAnglesForPyramid(float aspectRatio, float spotAngle, out float angleA, out float angleB)
{
// Since the smallest angles is = to the fov, and we don't care of the angle order, simply make sure the aspect ratio is > 1
if (aspectRatio < 1.0f)
aspectRatio = 1.0f / aspectRatio;
angleA = spotAngle * Mathf.Deg2Rad;
var halfAngle = angleA * 0.5f; // half of the smallest angle
var length = Mathf.Tan(halfAngle); // half length of the smallest side of the rectangle
length *= aspectRatio; // half length of the bigest side of the rectangle
halfAngle = Mathf.Atan(length); // half of the bigest angle
angleB = halfAngle * 2.0f;
}
}
}

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/VolumeProjection.hlsl


#define ENVMAP_FEATURE_PERFACEFADE
#define ENVMAP_FEATURE_INFLUENCENORMAL
#include "../LightDefinition.cs.hlsl"
float3x3 WorldToProxySpace(EnvLightData lightData)
{
return transpose(

return projectionDistance;
}
float InfluenceSphereWeight(EnvLightData lightData, BSDFData bsdfData, float3 positionWS, float3 positionLS, float3 dirLS)
float InfluenceSphereWeight(EnvLightData lightData, float3 normalWS, float3 positionWS, float3 positionLS, float3 dirLS)
{
float lengthPositionLS = length(positionLS);
float sphereInfluenceDistance = lightData.influenceExtents.x - lightData.blendDistancePositive.x;

#if defined(ENVMAP_FEATURE_INFLUENCENORMAL)
float insideInfluenceNormalVolume = lengthPositionLS <= (lightData.influenceExtents.x - lightData.blendNormalDistancePositive.x) ? 1.0 : 0.0;
float insideWeight = InfluenceFadeNormalWeight(bsdfData.normalWS, normalize(positionWS - lightData.capturePositionWS));
float insideWeight = InfluenceFadeNormalWeight(normalWS, normalize(positionWS - lightData.capturePositionWS));
alpha *= insideInfluenceNormalVolume ? 1.0 : insideWeight;
#endif

float InfluenceBoxWeight(EnvLightData lightData, BSDFData bsdfData, float3 positionWS, float3 positionIS, float3 dirIS)
float InfluenceBoxWeight(EnvLightData lightData, float3 normalWS, float3 positionWS, float3 positionIS, float3 dirIS)
{
float3 influenceExtents = lightData.influenceExtents;
// 2. Process the position influence

float3 belowPositiveInfluenceNormalVolume = positiveDistance / max(0.0001, lightData.blendNormalDistancePositive);
float3 aboveNegativeInfluenceNormalVolume = negativeDistance / max(0.0001, lightData.blendNormalDistanceNegative);
float insideInfluenceNormalVolume = all(belowPositiveInfluenceNormalVolume >= 1.0) && all(aboveNegativeInfluenceNormalVolume >= 1.0) ? 1.0 : 0;
float insideWeight = InfluenceFadeNormalWeight(bsdfData.normalWS, normalize(positionWS - lightData.capturePositionWS));
float insideWeight = InfluenceFadeNormalWeight(normalWS, normalize(positionWS - lightData.capturePositionWS));
alpha *= insideInfluenceNormalVolume ? 1.0 : insideWeight;
#endif

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/MRTBufferManager.cs


{
protected int m_BufferCount;
protected RenderTargetIdentifier[] m_RTIDs;
protected RTHandle[] m_RTs;
protected RTHandleSystem.RTHandle[] m_RTs;
protected int[] m_TextureShaderIDs;
public int bufferCount { get { return m_BufferCount; } }

m_BufferCount = maxBufferCount;
m_RTIDs = new RenderTargetIdentifier[maxBufferCount];
m_RTs = new RTHandle[maxBufferCount];
m_RTs = new RTHandleSystem.RTHandle[maxBufferCount];
m_TextureShaderIDs = new int[maxBufferCount];
}

return m_RTIDs;
}
public RTHandle GetBuffer(int index)
public RTHandleSystem.RTHandle GetBuffer(int index)
{
Debug.Assert(index < m_BufferCount);
return m_RTs[index];

{
for (int i = 0; i < m_BufferCount; ++i)
{
RTHandle.Release(m_RTs[i]);
RTHandles.Release(m_RTs[i]);
m_RTs[i] = null;
}
}

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs


{
public int vsibleDecalCount { get; set; }
RTHandle m_HTile;
RTHandleSystem.RTHandle m_HTile;
public DBufferManager()
: base(Decal.GetMaterialDBufferCount())

for (int dbufferIndex = 0; dbufferIndex < m_BufferCount; ++dbufferIndex)
{
m_RTs[dbufferIndex] = RTHandle.Alloc(Vector2.one, colorFormat: rtFormat[dbufferIndex], sRGB: sRGBFlags[dbufferIndex], filterMode: FilterMode.Point, name: string.Format("DBuffer{0}", dbufferIndex));
m_RTs[dbufferIndex] = RTHandles.Alloc(Vector2.one, colorFormat: rtFormat[dbufferIndex], sRGB: sRGBFlags[dbufferIndex], filterMode: FilterMode.Point, name: string.Format("DBuffer{0}", dbufferIndex));
m_HTile = RTHandle.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "DBufferHTile"); // Enable UAV
m_HTile = RTHandles.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "DBufferHTile"); // Enable UAV
RTHandle.Release(m_HTile);
RTHandles.Release(m_HTile);
}
public void ClearTargets(CommandBuffer cmd, HDCamera camera)

18
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/Decal.cs.hlsl


{
float4x4 worldToDecal;
float4x4 normalToWorld;
float4 diffuseScaleBias;
float4 normalScaleBias;
float4 maskScaleBias;
float4 diffuseScaleBias;
float4 normalScaleBias;
float4 maskScaleBias;
};
//

float4x4 GetNormalToWorld(DecalData value)
{
return value.normalToWorld;
}
float4 GetDiffuseScaleBias(DecalData value)
{
return value.diffuseScaleBias;
}
float4 GetNormalScaleBias(DecalData value)
{
return value.normalScaleBias;
}
float4 GetMaskScaleBias(DecalData value)
{
return value.maskScaleBias;
}
//

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs


for (int gbufferIndex = 0; gbufferIndex < m_GBufferCount; ++gbufferIndex)
{
m_RTs[gbufferIndex] = RTHandle.Alloc(Vector2.one, colorFormat: rtFormat[gbufferIndex], sRGB: sRGBFlags[gbufferIndex], filterMode: FilterMode.Point, name: string.Format("GBuffer{0}", gbufferIndex));
m_RTs[gbufferIndex] = RTHandles.Alloc(Vector2.one, colorFormat: rtFormat[gbufferIndex], sRGB: sRGBFlags[gbufferIndex], filterMode: FilterMode.Point, name: string.Format("GBuffer{0}", gbufferIndex));
m_RTIDs[gbufferIndex] = m_RTs[gbufferIndex].nameID;
m_TextureShaderIDs[gbufferIndex] = HDShaderIDs._GBufferTexture[gbufferIndex];
m_RTIDsNoShadowMask[gbufferIndex] = HDShaderIDs._GBufferTexture[gbufferIndex];

{
m_RTs[m_GBufferCount] = RTHandle.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMaskSRGBFlag(), filterMode: FilterMode.Point, name: "GBufferShadowMask");
m_RTs[m_GBufferCount] = RTHandles.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMaskSRGBFlag(), filterMode: FilterMode.Point, name: "GBufferShadowMask");
m_RTIDs[m_GBufferCount] = new RenderTargetIdentifier(m_RTs[m_GBufferCount]);
m_TextureShaderIDs[m_GBufferCount] = HDShaderIDs._ShadowMaskTexture;
}

104
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.cs


bool m_isInit;
// For image based lighting
Material m_InitPreFGD;
RenderTexture m_PreIntegratedFGD;
// For area lighting - We pack all texture inside a texture array to reduce the number of resource required
Texture2DArray m_LtcData; // 0: m_LtcGGXMatrix - RGBA, 2: m_LtcDisneyDiffuseMatrix - RGBA, 3: m_LtcMultiGGXFresnelDisneyDiffuse - RGB, A unused
const int k_LtcLUTMatrixDim = 3; // size of the matrix (3x3)
const int k_LtcLUTResolution = 64;
// Load LUT with one scalar in alpha of a tex2D
void LoadLUT(Texture2DArray tex, int arrayElement, TextureFormat format, float[] LUTScalar)
{
const int count = k_LtcLUTResolution * k_LtcLUTResolution;
Color[] pixels = new Color[count];
for (int i = 0; i < count; i++)
{
pixels[i] = new Color(0, 0, 0, LUTScalar[i]);
}
tex.SetPixels(pixels, arrayElement);
}
// Load LUT with 3x3 matrix in RGBA of a tex2D (some part are zero)
void LoadLUT(Texture2DArray tex, int arrayElement, TextureFormat format, double[,] LUTTransformInv)
{
const int count = k_LtcLUTResolution * k_LtcLUTResolution;
Color[] pixels = new Color[count];
for (int i = 0; i < count; i++)
{
// Both GGX and Disney Diffuse BRDFs have zero values in columns 1, 3, 5, 7.
// Column 8 contains only ones.
pixels[i] = new Color((float)LUTTransformInv[i, 0],
(float)LUTTransformInv[i, 2],
(float)LUTTransformInv[i, 4],
(float)LUTTransformInv[i, 6]);
}
tex.SetPixels(pixels, arrayElement);
}
// Special-case function for 'm_LtcMultiGGXFresnelDisneyDiffuse'.
void LoadLUT(Texture2DArray tex, int arrayElement, TextureFormat format, float[] LtcGGXMagnitudeData,
float[] LtcGGXFresnelData,
float[] LtcDisneyDiffuseMagnitudeData)
{
const int count = k_LtcLUTResolution * k_LtcLUTResolution;
Color[] pixels = new Color[count];
for (int i = 0; i < count; i++)
{
// We store the result of the subtraction as a run-time optimization.
// See the footnote 2 of "LTC Fresnel Approximation" by Stephen Hill.
pixels[i] = new Color(LtcGGXMagnitudeData[i] - LtcGGXFresnelData[i],
LtcGGXFresnelData[i], LtcDisneyDiffuseMagnitudeData[i], 1);
}
tex.SetPixels(pixels, arrayElement);
}
m_InitPreFGD = CoreUtils.CreateEngineMaterial("Hidden/HDRenderPipeline/PreIntegratedFGD");
m_PreIntegratedFGD = new RenderTexture(128, 128, 0, RenderTextureFormat.ARGB2101010, RenderTextureReadWrite.Linear);
m_PreIntegratedFGD.hideFlags = HideFlags.HideAndDontSave;
m_PreIntegratedFGD.filterMode = FilterMode.Bilinear;
m_PreIntegratedFGD.wrapMode = TextureWrapMode.Clamp;
m_PreIntegratedFGD.hideFlags = HideFlags.DontSave;
m_PreIntegratedFGD.name = CoreUtils.GetRenderTargetAutoName(128, 128, RenderTextureFormat.ARGB2101010, "PreIntegratedFGD");
m_PreIntegratedFGD.Create();
m_LtcData = new Texture2DArray(k_LtcLUTResolution, k_LtcLUTResolution, 3, TextureFormat.RGBAHalf, false /*mipmap*/, true /* linear */)
{
hideFlags = HideFlags.HideAndDontSave,
wrapMode = TextureWrapMode.Clamp,
filterMode = FilterMode.Bilinear,
name = CoreUtils.GetTextureAutoName(k_LtcLUTResolution, k_LtcLUTResolution, TextureFormat.RGBAHalf, depth: 3, dim: TextureDimension.Tex2DArray, name: "LTC_LUT")
};
LoadLUT(m_LtcData, 0, TextureFormat.RGBAHalf, s_LtcGGXMatrixData);
LoadLUT(m_LtcData, 1, TextureFormat.RGBAHalf, s_LtcDisneyDiffuseMatrixData);
// TODO: switch to RGBA64 when it becomes available.
LoadLUT(m_LtcData, 2, TextureFormat.RGBAHalf, s_LtcGGXMagnitudeData, s_LtcGGXFresnelData, s_LtcDisneyDiffuseMagnitudeData);
m_LtcData.Apply();
PreIntegratedFGD.instance.Build();
LTCAreaLight.instance.Build();
m_isInit = false;
}

CoreUtils.Destroy(m_InitPreFGD);
CoreUtils.Destroy(m_PreIntegratedFGD);
CoreUtils.Destroy(m_LtcData);
PreIntegratedFGD.instance.Cleanup();
LTCAreaLight.instance.Cleanup();
// TODO: how to delete RenderTexture ? or do we need to do it ?
m_isInit = false;
}

return;
using (new ProfilingSample(cmd, "Init PreFGD"))
{
CoreUtils.DrawFullScreen(cmd, m_InitPreFGD, new RenderTargetIdentifier(m_PreIntegratedFGD));
}
PreIntegratedFGD.instance.RenderInit(cmd);
m_isInit = true;
}

Shader.SetGlobalTexture("_PreIntegratedFGD", m_PreIntegratedFGD);
Shader.SetGlobalTexture("_LtcData", m_LtcData);
PreIntegratedFGD.instance.Bind();
LTCAreaLight.instance.Bind();
}
}
}

269
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


TEXTURE2D(_GBufferTexture2);
TEXTURE2D(_GBufferTexture3);
// Rough refraction texture
// Color pyramid (width, height, lodcount, Unused)
TEXTURE2D(_ColorPyramidTexture);
// Depth pyramid (width, height, lodcount, Unused)
TEXTURE2D(_DepthPyramidTexture);
CBUFFER_START(UnityLightingParameters)
// Buffer pyramid
float4 _ColorPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _DepthPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _ColorPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
float4 _DepthPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
// Screen space lighting
float _SSRefractionInvScreenWeightDistance; // Distance for screen space smoothstep with fallback
// Ambiant occlusion
float4 _AmbientOcclusionParam; // xyz occlusion color, w directLightStrenght
CBUFFER_END
// Ambient occlusion texture
TEXTURE2D(_AmbientOcclusionTexture);
// Area light textures
// TODO: This one should be set into a constant Buffer at pass frequency (with _Screensize)
TEXTURE2D(_PreIntegratedFGD);
TEXTURE2D_ARRAY(_LtcData); // We pack the 3 Ltc data inside a texture array
#define LTC_GGX_MATRIX_INDEX 0 // RGBA
#define LTC_DISNEY_DIFFUSE_MATRIX_INDEX 1 // RGBA
#define LTC_MULTI_GGX_FRESNEL_DISNEY_DIFFUSE_INDEX 2 // RGB, A unused
#define LTC_LUT_SIZE 64
#define LTC_LUT_SCALE ((LTC_LUT_SIZE - 1) * rcp(LTC_LUT_SIZE))
#define LTC_LUT_OFFSET (0.5 * rcp(LTC_LUT_SIZE))
#include "../LTCAreaLight/LTCAreaLight.hlsl"
#include "../PreIntegratedFGD/PreIntegratedFGD.hlsl"
//-----------------------------------------------------------------------------
// Definition

//-----------------------------------------------------------------------------
#if HAS_REFRACTION
# include "CoreRP/ShaderLibrary/Refraction.hlsl"
# include "HDRP/Lighting/Reflection/VolumeProjection.hlsl"
# include "HDRP/Lighting/LightDefinition.cs.hlsl"
# define SSRTID Refraction
# include "HDRP/Lighting/Reflection/ScreenSpaceTracing.hlsl"
# undef SSRTID
#include "CoreRP/ShaderLibrary/Refraction.hlsl"
#include "HDRP/Lighting/LightDefinition.cs.hlsl"
#include "HDRP/Lighting/Reflection/VolumeProjection.hlsl"
#define SSRTID Refraction
#include "HDRP/Lighting/Reflection/ScreenSpaceTracing.hlsl"
#undef SSRTID
# if defined(_REFRACTION_PLANE)
# define REFRACTION_MODEL(V, posInputs, bsdfData) RefractionModelPlane(V, posInputs.positionWS, bsdfData.normalWS, bsdfData.ior, bsdfData.thickness)
# elif defined(_REFRACTION_SPHERE)
# define REFRACTION_MODEL(V, posInputs, bsdfData) RefractionModelSphere(V, posInputs.positionWS, bsdfData.normalWS, bsdfData.ior, bsdfData.thickness)
# endif
#if defined(_REFRACTION_PLANE)
#define REFRACTION_MODEL(V, posInputs, bsdfData) RefractionModelPlane(V, posInputs.positionWS, bsdfData.normalWS, bsdfData.ior, bsdfData.thickness)
#elif defined(_REFRACTION_SPHERE)
#define REFRACTION_MODEL(V, posInputs, bsdfData) RefractionModelSphere(V, posInputs.positionWS, bsdfData.normalWS, bsdfData.ior, bsdfData.thickness)
#endif
# if defined(_REFRACTION_SSRAY_PROXY)
# define REFRACTION_SSRAY_IN ScreenSpaceProxyRaycastInput
# define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceProxyRaycastRefraction(input, hit)
# elif defined(_REFRACTION_SSRAY_HIZ)
# define REFRACTION_SSRAY_IN ScreenSpaceHiZRaymarchInput
# define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceHiZRaymarchRefraction(input, hit)
# endif
#if defined(_REFRACTION_SSRAY_PROXY)
#define REFRACTION_SSRAY_IN ScreenSpaceProxyRaycastInput
#define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceProxyRaycastRefraction(input, hit)
#elif defined(_REFRACTION_SSRAY_HIZ)
#define REFRACTION_SSRAY_IN ScreenSpaceHiZRaymarchInput
#define REFRACTION_SSRAY_QUERY(input, hit) ScreenSpaceHiZRaymarchRefraction(input, hit)
#endif
#endif
// This method allows us to know at compile time what material features should be removed from the code by Tile (Indepenently of the value of material feature flag per pixel).

bsdfData.absorptionCoefficient = TransmittanceColorAtDistanceToAbsorption(transmittanceColor, atDistance);
bsdfData.transmittanceMask = transmittanceMask;
bsdfData.thickness = max(thickness, 0.0001);
}
// For image based lighting, a part of the BSDF is pre-integrated.
// This is done both for specular and diffuse (in case of DisneyDiffuse)
void GetPreIntegratedFGD(float NdotV, float perceptualRoughness, float3 fresnel0, out float3 specularFGD, out float diffuseFGD, out float reflectivity)
{
// Pre-integrate GGX FGD
// Integral{BSDF * <N,L> dw} =
// Integral{(F0 + (1 - F0) * (1 - <V,H>)^5) * (BSDF / F) * <N,L> dw} =
// (1 - F0) * Integral{(1 - <V,H>)^5 * (BSDF / F) * <N,L> dw} + F0 * Integral{(BSDF / F) * <N,L> dw}=
// (1 - F0) * x + F0 * y = lerp(x, y, F0)
// Pre integrate DisneyDiffuse FGD:
// z = DisneyDiffuse
float3 preFGD = SAMPLE_TEXTURE2D_LOD(_PreIntegratedFGD, s_linear_clamp_sampler, float2(NdotV, perceptualRoughness), 0).xyz;
specularFGD = lerp(preFGD.xxx, preFGD.yyy, fresnel0);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
diffuseFGD = 1.0;
#else
// Remap from the [0, 1] to the [0.5, 1.5] range.
diffuseFGD = preFGD.z + 0.5;
#endif
reflectivity = preFGD.y;
}
// This function is use to help with debugging and must be implemented by any lit material

// IBL
// Handle IBL + multiscattering
float reflectivity;
GetPreIntegratedFGD(NdotV, preLightData.iblPerceptualRoughness, bsdfData.fresnel0, preLightData.specularFGD, preLightData.diffuseFGD, reflectivity);
float specularReflectivity;
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness, bsdfData.fresnel0, preLightData.specularFGD, preLightData.diffuseFGD, specularReflectivity);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
preLightData.diffuseFGD = 1.0;
#endif
iblR = reflect(-V, iblN);
// This is a ad-hoc tweak to better match reference of anisotropic GGX.

// We deem the intensity difference of a couple of percent for high values of roughness
// to not be worth the cost of another precomputed table.
// Note: this formulation bakes the BSDF non-symmetric!
preLightData.energyCompensation = 1.0 / reflectivity - 1.0;
preLightData.energyCompensation = 1.0 / specularReflectivity - 1.0;
#else
preLightData.energyCompensation = 0.0;
#endif // LIT_USE_GGX_ENERGY_COMPENSATION

#endif
#include "../../Lighting/LightEvaluation.hlsl"
#include "../../Lighting/Reflection/VolumeProjection.hlsl"
//-----------------------------------------------------------------------------
// Lighting structure for light accumulation

UpdateLightingHierarchyWeights(hierarchyWeight, weight); // Shouldn't be needed, but safer in case we decide to change hierarchy priority
float3 preLD = SAMPLE_TEXTURE2D_LOD(
_ColorPyramidTexture,
s_trilinear_clamp_sampler,
hit.positionNDC * _ColorPyramidScale.xy,
_ColorPyramidTexture,
s_trilinear_clamp_sampler,
hit.positionNDC * _ColorPyramidScale.xy,
preLightData.transparentSSMipLevel
).rgb;

#else
// TODO: factor this code in common, so other material authoring don't require to rewrite everything,
// TODO: test the strech from Tomasz
// float roughness = PerceptualRoughnessToRoughness(preLightData.IblPerceptualRoughness);
// float shrunkRoughness = AnisotropicStrechAtGrazingAngle(roughness, roughness, NdotV);
// Guideline for reflection volume: In HDRenderPipeline we separate the projection volume (the proxy of the scene) from the influence volume (what pixel on the screen is affected)
// However we add the constrain that the shape of the projection and influence volume is the same (i.e if we have a sphere shape projection volume, we have a shape influence).
// It allow to have more coherence for the dynamic if in shader code.
// Users can also chose to not have any projection, in this case we use the property minProjectionDistance to minimize code change. minProjectionDistance is set to huge number
// that simulate effect of no shape projection
float3 coatR = preLightData.coatIblR;
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION)
{

// In Unity the cubemaps are capture with the localToWorld transform of the component.
// This mean that location and orientation matter. So after intersection of proxy volume we need to convert back to world.
float3x3 worldToIS = WorldToInfluenceSpace(lightData);
float3 positionIS = WorldToInfluencePosition(lightData, worldToIS, positionWS);
float3 dirIS = mul(R, worldToIS);
float3x3 worldToPS = WorldToProxySpace(lightData);
float3 positionPS = WorldToProxyPosition(lightData, worldToPS, positionWS);
float3 dirPS = mul(R, worldToPS);
float projectionDistance = 0;
// 1. First process the projection
if (influenceShapeType == ENVSHAPETYPE_SPHERE)
{
projectionDistance = IntersectSphereProxy(lightData, dirPS, positionPS);
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.capturePositionWS
float3 capturePositionWS = lightData.capturePositionWS;
R = (positionWS + projectionDistance * R) - capturePositionWS;
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R, weight);
// Test again for clear coat
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION && HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
{
dirPS = mul(coatR, worldToPS);
projectionDistance = IntersectSphereProxy(lightData, dirPS, positionPS);
coatR = (positionWS + projectionDistance * coatR) - capturePositionWS;
}
weight = InfluenceSphereWeight(lightData, bsdfData, positionWS, positionIS, dirIS);
}
else if (influenceShapeType == ENVSHAPETYPE_BOX)
// Don't do clear coating for refraction
float3 coatR = preLightData.coatIblR;
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION && HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
projectionDistance = IntersectBoxProxy(lightData, dirPS, positionPS);
// No need to normalize for fetching cubemap
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.capturePositionWS
float3 capturePositionWS = lightData.capturePositionWS;
R = (positionWS + projectionDistance * R) - capturePositionWS;
// TODO: add distance based roughness
// Test again for clear coat
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION && HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
{
dirPS = mul(coatR, worldToPS);
projectionDistance = IntersectBoxProxy(lightData, dirPS, positionPS);
coatR = (positionWS + projectionDistance * coatR) - capturePositionWS;
}
weight = InfluenceBoxWeight(lightData, bsdfData, positionWS, positionIS, dirIS);
float unusedWeight = 0.0;
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, coatR, unusedWeight);
#ifdef DEBUG_DISPLAY
float3 radiusToProxy = R;
#endif
// When we are rough, we tend to see outward shifting of the reflection when at the boundary of the projection volume
// Also it appear like more sharp. To avoid these artifact and at the same time get better match to reference we lerp to original unmodified reflection.
// Formula is empirical.

float3 sampleDirectionDiscardWS = lightData.sampleDirectionDiscardWS;
if (dot(sampleDirectionDiscardWS, R) < 0)
return lighting;
weight *= preLD.a;
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == DEBUGLIGHTINGMODE_ENVIRONMENT_PROXY_VOLUME)
preLD = ApplyDebugProjectionVolume(preLD, radiusToProxy, _DebugEnvironmentProxyDepthScale);
#endif
// Smooth weighting
weight = Smoothstep01(weight);
weight *= preLD.a; // Used by planar reflection to discard pixel
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION)
{

#endif // LIT_DISPLAY_REFERENCE_IBL
weight *= lightData.weight;
UpdateLightingHierarchyWeights(hierarchyWeight, weight);
envLighting *= weight * lightData.multiplier;

{
float3 bakeDiffuseLighting = bakeLightingData.bakeDiffuseLighting;
AmbientOcclusionFactor aoFactor;
#define GTAO_MULTIBOUNCE_APPROX 1
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#if 0
GetScreenSpaceAmbientOcclusion(posInput.positionSS, preLightData.NdotV, bsdfData.perceptualRoughness, bsdfData.specularOcclusion, aoFactor);
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
GetScreenSpaceAmbientOcclusionMultibounce(posInput.positionSS, preLightData.NdotV, bsdfData.perceptualRoughness, bsdfData.specularOcclusion, bsdfData.diffuseColor, bsdfData.fresnel0, aoFactor);
#if GTAO_MULTIBOUNCE_APPROX
bakeDiffuseLighting *= GTAOMultiBounce(indirectAmbientOcclusion, bsdfData.diffuseColor);
#else
bakeDiffuseLighting *= lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), indirectAmbientOcclusion);
#endif
float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(preLightData.NdotV), indirectAmbientOcclusion, roughness);
// Try to mimic multibounce with specular color. Not the point of the original formula but ok result.
// Take the min of screenspace specular occlusion and visibility cone specular occlusion
#if GTAO_MULTIBOUNCE_APPROX
lighting.indirect.specularReflected *= GTAOMultiBounce(min(bsdfData.specularOcclusion, specularOcclusion), bsdfData.fresnel0);
#else
lighting.indirect.specularReflected *= lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), min(bsdfData.specularOcclusion, specularOcclusion));
#endif
bakeDiffuseLighting *= aoFactor.indirectAmbientOcclusion;
lighting.indirect.specularReflected *= aoFactor.indirectSpecularOcclusion;
lighting.direct.diffuse *= aoFactor.directAmbientOcclusion;
lighting.direct.diffuse *=
#if GTAO_MULTIBOUNCE_APPROX
GTAOMultiBounce(directAmbientOcclusion, bsdfData.diffuseColor);
#else
lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
#endif
// Subsurface scattering mdoe
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;
float3 modifiedDiffuseColor = ApplySubsurfaceScatteringTexturingMode(texturingMode, bsdfData.diffuseColor);

// If refraction is enable we use the transmittanceMask to lerp between current diffuse lighting and refraction value
// Physically speaking, it should be transmittanceMask should be 1, but for artistic reasons, we let the value vary
// Physically speaking, transmittanceMask should be 1, but for artistic reasons, we let the value vary
#if HAS_REFRACTION
diffuseLighting = lerp(diffuseLighting, lighting.indirect.specularTransmitted, bsdfData.transmittanceMask);
#endif

specularLighting *= 1.0 + bsdfData.fresnel0 * preLightData.energyCompensation;
#ifdef DEBUG_DISPLAY
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:

case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO:
diffuseLighting = indirectAmbientOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION:
diffuseLighting = aoFactor.indirectAmbientOcclusion;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO:
diffuseLighting = specularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION:
diffuseLighting = aoFactor.indirectSpecularOcclusion;
#if GTAO_MULTIBOUNCE_APPROX
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO:
diffuseLighting = GTAOMultiBounce(indirectAmbientOcclusion, bsdfData.diffuseColor);
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO:
diffuseLighting = GTAOMultiBounce(specularOcclusion, bsdfData.fresnel0);
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
break;
#endif
{
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
break;
}
}

82
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs


using UnityEngine;
using UnityEngine.Rendering;
//-----------------------------------------------------------------------------
// structure definition
//-----------------------------------------------------------------------------
[GenerateHLSL(PackingRules.Exact)]
public enum MaterialFeatureFlags
{
LitStandard = 1 << 0
};
//-----------------------------------------------------------------------------
// SurfaceData
//-----------------------------------------------------------------------------

public struct SurfaceData
{
[SurfaceDataAttributes("Material Features")]
public uint materialFeatures;
// Standard
[SurfaceDataAttributes("Base Color", false, true)]
public Vector3 baseColor;

[SurfaceDataAttributes("Smoothness A")]
public float perceptualSmoothnessA;
[SurfaceDataAttributes("Smoothness B")]
public float perceptualSmoothnessB;
[SurfaceDataAttributes("Lobe Mixing")]
public float lobeMix;
[SurfaceDataAttributes("Metallic")]
public float metallic;
};
//-----------------------------------------------------------------------------

[GenerateHLSL(PackingRules.Exact, false, true, 1400)]
public struct BSDFData
{
public uint materialFeatures;
public Vector3 fresnel0;
public float perceptualRoughnessA;
public float perceptualRoughnessB;
public float lobeMix;
public float roughnessAT;
public float roughnessAB;
public float roughnessBT;
public float roughnessBB;
public float anisotropy;
//public fixed float test[2];
//-----------------------------------------------------------------------------
// Init precomputed textures
//-----------------------------------------------------------------------------
bool m_isInit;
public StackLit() {}
public override void Build(HDRenderPipelineAsset hdAsset)
{
PreIntegratedFGD.instance.Build();
//LTCAreaLight.instance.Build();
m_isInit = false;
}
public override void Cleanup()
{
PreIntegratedFGD.instance.Cleanup();
//LTCAreaLight.instance.Cleanup();
m_isInit = false;
}
public override void RenderInit(CommandBuffer cmd)
{
if (m_isInit)
return;
PreIntegratedFGD.instance.RenderInit(cmd);
m_isInit = true;
}
public override void Bind()
{
PreIntegratedFGD.instance.Bind();
//LTCAreaLight.instance.Bind();
}
}
}

92
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl


#ifndef STACKLIT_CS_HLSL
#define STACKLIT_CS_HLSL
//
// UnityEngine.Experimental.Rendering.HDPipeline.StackLit+MaterialFeatureFlags: static fields
//
#define MATERIALFEATUREFLAGS_LIT_STANDARD (1)
//
#define DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR (1300)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL (1301)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE (1302)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_MATERIAL_FEATURES (1300)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR (1301)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL (1302)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE (1303)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A (1304)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B (1305)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING (1306)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_METALLIC (1307)
#define DEBUGVIEW_STACKLIT_BSDFDATA_DIFFUSE_COLOR (1400)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_WS (1401)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE (1402)
#define DEBUGVIEW_STACKLIT_BSDFDATA_MATERIAL_FEATURES (1400)
#define DEBUGVIEW_STACKLIT_BSDFDATA_DIFFUSE_COLOR (1401)
#define DEBUGVIEW_STACKLIT_BSDFDATA_FRESNEL0 (1402)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_WS (1403)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE (1404)
#define DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_A (1405)
#define DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_B (1406)
#define DEBUGVIEW_STACKLIT_BSDFDATA_LOBE_MIXING (1407)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AT (1408)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AB (1409)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BT (1410)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BB (1411)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ANISOTROPY (1412)
uint materialFeatures;
float perceptualSmoothnessA;
float perceptualSmoothnessB;
float lobeMix;
float metallic;
};
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.StackLit+BSDFData

uint materialFeatures;
float3 fresnel0;
float perceptualRoughnessA;
float perceptualRoughnessB;
float lobeMix;
float roughnessAT;
float roughnessAB;
float roughnessBT;
float roughnessBB;
float anisotropy;
};
//

{
switch (paramId)
{
case DEBUGVIEW_STACKLIT_SURFACEDATA_MATERIAL_FEATURES:
result = GetIndexColor(surfacedata.materialFeatures);
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR:
result = surfacedata.baseColor;
needLinearToSRGB = true;

case DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE:
result = surfacedata.normalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A:
result = surfacedata.perceptualSmoothnessA.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B:
result = surfacedata.perceptualSmoothnessB.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING:
result = surfacedata.lobeMix.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_METALLIC:
result = surfacedata.metallic.xxx;
break;
}
}

{
switch (paramId)
{
case DEBUGVIEW_STACKLIT_BSDFDATA_MATERIAL_FEATURES:
result = GetIndexColor(bsdfdata.materialFeatures);
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_FRESNEL0:
result = bsdfdata.fresnel0;
break;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_A:
result = bsdfdata.perceptualRoughnessA.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_B:
result = bsdfdata.perceptualRoughnessB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_LOBE_MIXING:
result = bsdfdata.lobeMix.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AT:
result = bsdfdata.roughnessAT.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AB:
result = bsdfdata.roughnessAB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BT:
result = bsdfdata.roughnessBT.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BB:
result = bsdfdata.roughnessBB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ANISOTROPY:
result = bsdfdata.anisotropy.xxx;
break;
}
}

335
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl


//
// Also add options at the top of this file, see Lit.hlsl.
//-----------------------------------------------------------------------------
// Texture and constant buffer declaration
//-----------------------------------------------------------------------------
// Declare the BSDF specific FGD property and its fetching function
#include "../PreIntegratedFGD/PreIntegratedFGD.hlsl"
//-----------------------------------------------------------------------------
// Definition
//-----------------------------------------------------------------------------
#define HAS_REFRACTION (defined(_REFRACTION_PLANE) || defined(_REFRACTION_SPHERE)) && (defined(_REFRACTION_SSRAY_PROXY) || defined(_REFRACTION_SSRAY_HIZ))
#define DEFAULT_SPECULAR_VALUE 0.04
//-----------------------------------------------------------------------------
// Configuration
//-----------------------------------------------------------------------------
#define LIT_DIFFUSE_LAMBERT_BRDF // TODO Disney Diffuse
#define LIT_USE_GGX_ENERGY_COMPENSATION
//-----------------------------------------------------------------------------
// Helper functions/variable specific to this material
//-----------------------------------------------------------------------------
// This method allows us to know at compile time what material features should be removed from the code by Tile (Indepenently of the value of material feature flag per pixel).
// This is only useful for classification during lighting, so it's not needed in EncodeIntoGBuffer and ConvertSurfaceDataToBSDFData (where we always know exactly what the material feature is)
bool HasFeatureFlag(uint featureFlags, uint flag)
{
return ((featureFlags & flag) != 0);
}
float3 ComputeDiffuseColor(float3 baseColor, float metallic)
{
return baseColor * (1.0 - metallic);
}
float3 ComputeFresnel0(float3 baseColor, float metallic, float dielectricF0)
{
return lerp(dielectricF0.xxx, baseColor, metallic);
}
// This function is use to help with debugging and must be implemented by any lit material
// Implementer must take into account what are the current override component and

BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
// NEWLITTODO: will be much more involved obviously, and use metallic, etc.
bsdfData.diffuseColor = surfaceData.baseColor;
// IMPORTANT: In our forward only case, all enable flags are statically know at compile time, so the compiler can do compile time optimization
bsdfData.materialFeatures = surfaceData.materialFeatures;
// Two lobe base material
bsdfData.perceptualRoughnessA = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessA);
bsdfData.perceptualRoughnessB = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessB);
bsdfData.lobeMix = surfaceData.lobeMix;
// There is no metallic with SSS and specular color mode
//todo: float metallic = HasFeatureFlag(surfaceData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SPECULAR_COLOR | MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING | MATERIALFEATUREFLAGS_LIT_TRANSMISSION) ? 0.0 : surfaceData.metallic;
float metallic = surfaceData.metallic;
bsdfData.diffuseColor = ComputeDiffuseColor(surfaceData.baseColor, metallic);
bsdfData.fresnel0 = ComputeFresnel0(surfaceData.baseColor, surfaceData.metallic, DEFAULT_SPECULAR_VALUE);
// roughnessT and roughnessB are clamped, and are meant to be used with punctual and directional lights.
// perceptualRoughness is not clamped, and is meant to be used for IBL.
// TODO: add ui inputs, +tangent map, tangentws to use anisotropy; for now bsdfData.anisotropy = 0,
// so only bsdfData.roughnessT is used.
ConvertAnisotropyToClampRoughness(bsdfData.perceptualRoughnessA, bsdfData.anisotropy, bsdfData.roughnessAT, bsdfData.roughnessAB);
ConvertAnisotropyToClampRoughness(bsdfData.perceptualRoughnessB, bsdfData.anisotropy, bsdfData.roughnessBT, bsdfData.roughnessBB);
ApplyDebugToBSDFData(bsdfData);
return bsdfData;

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL_VIEW_SPACE:
case DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(surfaceData.normalWS) * 0.5 + 0.5;
break;

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_BSDFDATA_NORMAL_VIEW_SPACE:
case DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(bsdfData.normalWS) * 0.5 + 0.5;
break;

struct PreLightData
{
float NdotV; // Could be negative due to normal mapping, use ClampNdotV()
//NEWLITTODO
// IBL: we calculate and prefetch the pre-integrated split sum data for
// both lobes
float3 iblR[2]; // Dominant specular direction, used for IBL in EvaluateBSDF_Env()
float iblPerceptualRoughness[2];
float3 specularFGD[2]; // Store preconvoled BRDF for both specular and diffuse
float diffuseFGD[2];
// GGX
float partLambdaV[2]; // One for each lobe
float energyCompensation;
};
PreLightData GetPreLightData(float3 V, PositionInputs posInput, inout BSDFData bsdfData)

float3 N = bsdfData.normalWS;
preLightData.NdotV = dot(N, V);
preLightData.iblPerceptualRoughness[0] = bsdfData.perceptualRoughnessA;
preLightData.iblPerceptualRoughness[1] = bsdfData.perceptualRoughnessB;
//float NdotV = ClampNdotV(preLightData.NdotV);
float NdotV = ClampNdotV(preLightData.NdotV);
float3 iblN[2], iblR[2];
// We will need two hacked N for the stretch anisotropic hack later.
// (Could use a UNITY_UNROLL loop, but not much code to dupe)
preLightData.partLambdaV[0] = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughnessAT);
preLightData.partLambdaV[1] = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughnessBT);
iblN[0] = iblN[1] = N;
// IBL
// Handle IBL pre calculated data + GGX multiscattering energy loss compensation term
float specularReflectivity[2];
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness[0], bsdfData.fresnel0, preLightData.specularFGD[0], preLightData.diffuseFGD[0], specularReflectivity[0]);
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness[1], bsdfData.fresnel0, preLightData.specularFGD[1], preLightData.diffuseFGD[1], specularReflectivity[1]);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
preLightData.diffuseFGD[0] = preLightData.diffuseFGD[1] = 1.0;
#endif
iblR[0] = reflect(-V, iblN[0]);
iblR[1] = reflect(-V, iblN[1]);
// This is a ad-hoc tweak to better match reference of anisotropic GGX.
// TODO: We need a better hack.
float fact = saturate(1.2 - abs(bsdfData.anisotropy));
preLightData.iblPerceptualRoughness[0] *= fact;
preLightData.iblPerceptualRoughness[1] *= fact;
// Corretion of reflected direction for better handling of rough material
preLightData.iblR[0] = GetSpecularDominantDir(N, iblR[0], preLightData.iblPerceptualRoughness[0], NdotV);
preLightData.iblR[1] = GetSpecularDominantDir(N, iblR[1], preLightData.iblPerceptualRoughness[1], NdotV);
#ifdef LIT_USE_GGX_ENERGY_COMPENSATION
// Ref: Practical multiple scattering compensation for microfacet models.
// We only apply the formulation for metals.
// For dielectrics, the change of reflectance is negligible.
// We deem the intensity difference of a couple of percent for high values of roughness
// to not be worth the cost of another precomputed table.
// Note: this formulation bakes the BSDF non-symmetric!
// Note: that this also assumes all specular comes from GGX BSDFs.
// (That's the FGD we use above to get integral[BSDF/F (N.w) dw] )
// In our case, since this compensation term is already an average
// applied to a sum (akin to a "split sum" approximation) we will
// just lerp our two "specularReflectivities"
preLightData.energyCompensation = 1.0 / lerp(specularReflectivity[0], specularReflectivity[1], bsdfData.lobeMix) - 1.0;
#else
preLightData.energyCompensation = 0.0;
#endif // LIT_USE_GGX_ENERGY_COMPENSATION
return preLightData;
}

// BSDF share between directional light, punctual light and area light (reference)
//-----------------------------------------------------------------------------
// NEWLITTODO
// This function apply BSDF. Assumes that NdotL is positive.
void BSDF( float3 V, float3 L, float NdotL, float3 positionWS, PreLightData preLightData, BSDFData bsdfData,

float3 N = bsdfData.normalWS;
// Optimized math. Ref: PBR Diffuse Lighting for GGX + Smith Microsurfaces (slide 114).
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2.0 * LdotV + 2.0, FLT_EPS)); // invLenLV = rcp(length(L + V)), clamp to avoid rsqrt(0) = NaN
float NdotH = saturate((NdotL + preLightData.NdotV) * invLenLV); // Do not clamp NdotV here
float LdotH = saturate(invLenLV * LdotV + invLenLV);
float NdotV = ClampNdotV(preLightData.NdotV);
// TODO: Proper Fresnel
float3 F = F_Schlick(bsdfData.fresnel0, LdotH);
// TODO: with iridescence, will be per light sample.
float DV[2];
DV[0] = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughnessAT, preLightData.partLambdaV[0]);
DV[1] = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughnessBT, preLightData.partLambdaV[1]);
specularLighting = F * lerp(DV[0], DV[1], bsdfData.lobeMix);
// TODO: config option + diffuse GGX
specularLighting = float3(0.0, 0.0, 0.0);
// TODO: coat
}
//-----------------------------------------------------------------------------

IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
//NEWLITTODO
// TODO: refraction
#if !HAS_REFRACTION
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION)
return lighting;
#endif
float3 envLighting;
float3 positionWS = posInput.positionWS;
float weight;
float tempWeight[2];
#ifdef LIT_DISPLAY_REFERENCE_IBL
envLighting = IntegrateSpecularGGXIBLRef(lightLoopContext, V, preLightData, lightData, bsdfData);
// TODO: Do refraction reference (is it even possible ?)
// TODO: handle clear coat
// TODO: Handle two lobes in reference
// #ifdef LIT_DIFFUSE_LAMBERT_BRDF
// envLighting += IntegrateLambertIBLRef(lightData, V, bsdfData);
// #else
// envLighting += IntegrateDisneyDiffuseIBLRef(lightLoopContext, V, preLightData, lightData, bsdfData);
// #endif
#else
float3 R[2];
R[0] = preLightData.iblR[0];
R[1] = preLightData.iblR[1];
// TODO: Refraction
// We will sample 2 times (one for each lobe) the environment.
// Steps are:
// -Calculate influence weights from intersection with the proxies.
// Since the weights are influence blending weights, we can correctly
// use our lobe weight and mix them.
// -Fudge the sampling direction to dampen boundary artefacts.
// -Do early discard for planar reflections.
// -Fetch 2 samples of preintegrated environment lighting
// (see preLD, first part of the split-sum approx.)
// -Use the 2 BSDF preintegration terms we pre-fetched in preLightData
// (second part of the split-sum approx.,
// and common to all Env. Lights. using the same BSDF and
// we only have GGX thus only one FGD map for now)
// -Multiply the two split sum terms together for each lobe
// and linearly combine the results.
// Note: using influenceShapeType and projectionShapeType instead of (lightData|proxyData).shapeType allow to make compiler optimization in case the type is know (like for sky)
tempWeight[0] = tempWeight[1] = 1.0;
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R[0], tempWeight[0]);
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R[1], tempWeight[1]);
weight = lerp(tempWeight[0], tempWeight[1], bsdfData.lobeMix);
// TODO: reflections + coating
// When we are rough, we tend to see outward shifting of the reflection when at the boundary of the projection volume
// Also it appear like more sharp. To avoid these artifact and at the same time get better match to reference we lerp to original unmodified reflection.
// Formula is empirical.
float roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness[0]);
R[0] = lerp(R[0], preLightData.iblR[0], saturate(smoothstep(0, 1, roughness * roughness)));
roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness[1]);
R[1] = lerp(R[1], preLightData.iblR[1], saturate(smoothstep(0, 1, roughness * roughness)));
float3 F[2];
F[0] = preLightData.specularFGD[0];
F[1] = preLightData.specularFGD[1];
float4 preLD[2];
float iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness[0]);
preLD[0] = SampleEnv(lightLoopContext, lightData.envIndex, R[0], iblMipLevel);
iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness[1]);
preLD[1] = SampleEnv(lightLoopContext, lightData.envIndex, R[1], iblMipLevel);
// Used by planar reflection to discard pixel:
weight *= lerp(preLD[0].a, preLD[1].a, bsdfData.lobeMix);
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION)
{
envLighting = lerp(F[0] * preLD[0].rgb, F[1] * preLD[1].rgb, bsdfData.lobeMix);
// TODO: Clear Coat component if needed
}
else
{
// TODO: Refractions
// No clear coat support with refraction
// specular transmisted lighting is the remaining of the reflection (let's use this approx)
// With refraction, we don't care about the clear coat value, only about the Fresnel, thus why we use 'envLighting ='
//envLighting = (1.0 - F) * preLD.rgb * preLightData.transparentTransmittance;
}
#endif // LIT_DISPLAY_REFERENCE_IBL
UpdateLightingHierarchyWeights(hierarchyWeight, weight);
envLighting *= weight * lightData.multiplier;
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION)
lighting.specularReflected = envLighting;
//TODO refraction:
//else
// lighting.specularTransmitted = envLighting * preLightData.transparentTransmittance;
return lighting;
}

PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, AggregateLighting lighting,
out float3 diffuseLighting, out float3 specularLighting)
{
// Apply the albedo to the direct diffuse lighting and that's about it.
// TODO: AO, SO, SSS, Refraction
specularLighting = lighting.direct.specular; // should be 0 for now.
specularLighting = lighting.direct.specular + lighting.indirect.specularReflected;
// Apply the fudge factor (boost) to compensate for multiple scattering not accounted for in BSDF.
// This assumes all spec comes from a GGX BSDF.
// Note: The multiply with fresnel0 is to apply it only for metallic
specularLighting *= 1.0 + bsdfData.fresnel0 * preLightData.energyCompensation;
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
{
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO)
if (_DebugLightingMode != 0)
// NEWLITTODO
//diffuseLighting = indirectAmbientOcclusion;
switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
break;
case DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION:
//diffuseLighting = aoFactor.indirectAmbientOcclusion;
break;
case DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION:
//diffuseLighting = aoFactor.indirectSpecularOcclusion;
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:
//if (_DebugLightingSubMode != DEBUGSCREENSPACETRACING_COLOR)
// diffuseLighting = lighting.indirect.specularTransmitted;
break;
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO)
{
// NEWLITTODO
//diffuseLighting = specularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
#if GTAO_MULTIBOUNCE_APPROX
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO)
{
// NEWLITTODO
//diffuseLighting = GTAOMultiBounce(indirectAmbientOcclusion, bsdfData.diffuseColor);
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO)
{
// NEWLITTODO
//diffuseLighting = GTAOMultiBounce(specularOcclusion, bsdfData.fresnel0);
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
#endif
// NEWLITTODO
//diffuseLighting = bsdfData.diffuseColor;
diffuseLighting = bsdfData.diffuseColor;
#endif
}

42
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader


// Be careful, do not change the name here to _Color. It will conflict with the "fake" parameters (see end of properties) required for GI.
_BaseColor("BaseColor", Color) = (1,1,1,1)
_BaseColorMap("BaseColorMap", 2D) = "white" {}
_BaseColorMapUV("BaseColorMapUV", Float) = 0.0
_Metallic("Metallic", Range(0.0, 1.0)) = 0
_MetallicMap("MetallicMap", 2D) = "black" {}
_MetallicMapUV("MetallicMapUV", Float) = 0.0
_MetallicMapChannel("MetallicMapChannel", Vector) = (1, 0, 0, 0)
_MetallicRemap("Metallic Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _MetallicRemapInverted("Invert Metallic Remap", Float) = 0.0
_SmoothnessA("SmoothnessA", Range(0.0, 1.0)) = 1.0
_SmoothnessAMap("BaseColorMap", 2D) = "white" {}
_SmoothnessAMapUV("SmoothnessAMapUV", Float) = 0.0
_SmoothnessAMapChannel("SmoothnessA Map Channel", Vector) = (1, 0, 0, 0)
_SmoothnessARemap("SmoothnessA Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _SmoothnessARemapInverted("Invert SmoothnessA Remap", Float) = 0.0
_SmoothnessB("SmoothnessB", Range(0.0, 1.0)) = 1.0
_SmoothnessBMap("SmoothnessBMap", 2D) = "white" {}
_SmoothnessBMapUV("SmoothnessBMapUV", Float) = 0.0
_SmoothnessBMapChannel("SmoothnessB Map Channel", Vector) = (1, 0, 0, 0)
_SmoothnessBRemap("SmoothnessB Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _SmoothnessBRemapInverted("Invert SmoothnessB Remap", Float) = 0.0
_LobeMix("Lobe Mix", Range(0.0, 1.0)) = 0
_NormalMap("NormalMap", 2D) = "bump" {} // Tangent space normal map
_NormalMapUV("NormalMapUV", Float) = 0.0
_NormalScale("_NormalScale", Range(0.0, 2.0)) = 1
[Enum(UV0, 0, UV1, 1, UV2, 2, UV3, 3, Planar, 4, Triplanar, 5)] _UVBase("UV Set for base", Float) = 0
[HideInInspector] _UVMappingMask("_UVMappingMask", Color) = (1, 0, 0, 0)
_EmissiveColorMapUV("EmissiveColorMap", Range(0.0, 1.0)) = 0
_EmissiveIntensity("EmissiveIntensity", Float) = 0
[ToggleUI] _AlbedoAffectEmissive("Albedo Affect Emissive", Float) = 0.0

#pragma shader_feature _ALPHATEST_ON
#pragma shader_feature _DOUBLESIDED_ON
#pragma shader_feature _NORMALMAP_TANGENT_SPACE
#pragma shader_feature _ _REQUIRE_UV2 _REQUIRE_UV3
// ...TODO: for surface gradient framework eg see litdata.hlsl,
// but we need it right away for toggle with LayerTexCoord mapping so we might need them
// from the Frag input right away. See also ShaderPass/StackLitSharePass.hlsl.
#pragma shader_feature _NORMALMAP
#pragma shader_feature _MASKMAPA
#pragma shader_feature _MASKMAPB
#pragma shader_feature _EMISSIVE_COLOR_MAP
// Keyword for transparent

242
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl


//-------------------------------------------------------------------------------------
// Fill SurfaceData/Builtin data function
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Sampling/SampleUVMapping.hlsl"
//-----------------------------------------------------------------------------
// Texture Mapping (think of LayerTexCoord as simply TexCoordMappings,
// ie no more layers here - cf Lit materials)
//-----------------------------------------------------------------------------
//
// For easier copying of code for now use a LayerTexCoord wrapping struct.
// We don't have details yet.
//
// NEWLITTODO: Eventually, we could quickly share GetBuiltinData of LitBuiltinData.hlsl
// in our GetSurfaceAndBuiltinData( ) here, since we will use the LayerTexCoord identifier,
// and an identical ComputeLayerTexCoord( ) prototype
//
struct LayerTexCoord
{
UVMapping base;
UVMapping details;
// Store information that will be share by all UVMapping
float3 vertexNormalWS; // TODO: store also object normal map for object triplanar
};
// Want to use only one sampler for normalmap/bentnormalmap either we use OS or TS. And either we have normal map or bent normal or both.
//
// Note (compared to Lit shader):
//
// We don't have a layered material with which we are sharing code here like the LayeredLit shader, but we can also save a couple of
// samplers later if we use bentnormals.
//
// _IDX suffix is meaningless here, could use the name SAMPLER_NORMALMAP_ID instead of SAMPLER_NORMALMAP_IDX and replace all
// indirect #ifdef _NORMALMAP_TANGENT_SPACE_IDX #ifdef and _NORMALMAP_IDX tests with the more direct
// shader_feature keywords _NORMALMAP_TANGENT_SPACE and _NORMALMAP.
//
// (Originally in the LayeredLit shader, shader_feature keywords like _NORMALMAP become _NORMALMAP0 but since files are shared,
// LitDataIndividualLayer will use a generic _NORMALMAP_IDX defined before its inclusion by the client LitData or LayeredLitData.
// That way, LitDataIndividualLayer supports multiple inclusions)
//
//
#ifdef _NORMALMAP_TANGENT_SPACE
#if defined(_NORMALMAP)
#define SAMPLER_NORMALMAP_ID sampler_NormalMap
// TODO:
//#elif defined(_BENTNORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_BentNormalMap
#endif
#else
// TODO:
//#error STACKLIT_USES_ONLY_TANGENT_SPACE_FOR_NOW
//#if defined(_NORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_NormalMapOS
//#elif defined(_BENTNORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_BentNormalMapOS
//#endif
#endif
void ComputeLayerTexCoord( // Uv related parameters
float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3, float4 uvMappingMask,
// scale and bias for base
float2 texScale, float2 texBias,
// mapping type and output
int mappingType, inout LayerTexCoord layerTexCoord)
{
//TODO: Planar, Triplanar, detail map, surface_gradient.
// Handle uv0, uv1, uv2, uv3 based on _UVMappingMask weight (exclusif 0..1)
float2 uvBase = uvMappingMask.x * texCoord0 +
uvMappingMask.y * texCoord1 +
uvMappingMask.z * texCoord2 +
uvMappingMask.w * texCoord3;
// Copy data in uvmapping fields: used by generic sampling code (see especially SampleUVMappingNormalInternal.hlsl)
layerTexCoord.base.mappingType = mappingType;
layerTexCoord.base.normalWS = layerTexCoord.vertexNormalWS;
// Apply tiling options
layerTexCoord.base.uv = uvBase * texScale + texBias;
}
float3 GetNormalTS(FragInputs input, LayerTexCoord layerTexCoord, float3 detailNormalTS, float detailMask)
{
// TODO: different spaces (eg #ifdef _NORMALMAP_TANGENT_SPACE #elif object space, SURFACE_GRADIENT, etc.)
// and use detail map
float3 normalTS;
// Note we don't use the _NORMALMAP_IDX mechanism of the Lit shader, since we don't have "layers", we can
// directly use the shader_feature keyword:
#ifdef _NORMALMAP
normalTS = SAMPLE_UVMAPPING_NORMALMAP(_NormalMap, SAMPLER_NORMALMAP_ID, layerTexCoord.base, _NormalScale);
#else
normalTS = float3(0.0, 0.0, 1.0);
#endif
return normalTS;
}
// This maybe call directly by tessellation (domain) shader, thus all part regarding surface gradient must be done
// in function with FragInputs input as parameters
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3,
float3 positionWS, float3 vertexNormalWS, inout LayerTexCoord layerTexCoord)
{
layerTexCoord.vertexNormalWS = vertexNormalWS;
// TODO:
//layerTexCoord.triplanarWeights = ComputeTriplanarWeights(vertexNormalWS);
int mappingType = UV_MAPPING_UVSET;
//TODO: _MAPPING_PLANAR, _MAPPING_TRIPLANAR
// Be sure that the compiler is aware that we don't use UV1 to UV3 for main layer so it can optimize code
ComputeLayerTexCoord( texCoord0, texCoord1, texCoord2, texCoord3, _UVMappingMask, /* TODO _UVDetailsMappingMask, */
_BaseColorMap_ST.xy, _BaseColorMap_ST.zw, /* TODO _DetailMap_ST.xy, _DetailMap_ST.zw, 1.0, _LinkDetailsWithBase,
/* TODO positionWS, _TexWorldScale, */
mappingType, layerTexCoord);
}
// This is call only in this file
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(FragInputs input, inout LayerTexCoord layerTexCoord)
{
// TODO: SURFACE_GRADIENT
//#ifdef SURFACE_GRADIENT
//GenerateLayerTexCoordBasisTB(input, layerTexCoord);
//#endif
GetLayerTexCoord( input.texCoord0, input.texCoord1, input.texCoord2, input.texCoord3,
input.positionWS, input.worldToTangent[2].xyz, layerTexCoord);
}
//-----------------------------------------------------------------------------
// ...Texture Mapping
//-----------------------------------------------------------------------------
//
// cf with
// LitData.hlsl:GetSurfaceAndBuiltinData()
// LitDataIndividualLayer.hlsl:GetSurfaceData( )
// LitBuiltinData.hlsl:GetBuiltinData()
//
// Here we can combine them
//
// NEWLITTODO:
// For now, just use the interpolated vertex normal. This has been normalized in the initial fragment interpolators unpacking.
// Eventually, we want to share all the LitData LayerTexCoord (and surface gradient frame + uv, planar, triplanar, etc.) logic, also
// spread in LitDataIndividualLayer and LitDataMeshModification.
surfaceData.normalWS = input.worldToTangent[2].xyz;
float2 baseColorMapUv = TRANSFORM_TEX(input.texCoord0, _BaseColorMap);
surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).rgb * _BaseColor.rgb;
float alpha = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).a * _BaseColor.a;
LayerTexCoord layerTexCoord;
ZERO_INITIALIZE(LayerTexCoord, layerTexCoord);
GetLayerTexCoord(input, layerTexCoord);
// -------------------------------------------------------------
// Surface Data:
// -------------------------------------------------------------
// We perform the conversion to world of the normalTS outside of the GetSurfaceData
// so it allow us to correctly deal with detail normal map and optimize the code for the layered shaders
float3 normalTS;
// TODO: Those are only needed once we handle specular occlusion and optionnally bent normal maps.
// Also, for the builtinData part, use bentnormal to sample diffuse GI
//float3 bentNormalTS;
//float3 bentNormalWS;
//float alpha = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).a * _BaseColor.a;
float alpha = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).a * _BaseColor.a;
#ifdef _ALPHATEST_ON
//NEWLITTODO: Once we include those passes in the main StackLit.shader, add handling of CUTOFF_TRANSPARENT_DEPTH_PREPASS and _POSTPASS
// and the related properties (in the .shader) and uniforms (in the StackLitProperties file) _AlphaCutoffPrepass, _AlphaCutoffPostpass

// TODO detail map:
float3 detailNormalTS = float3(0.0, 0.0, 0.0);
float detailMask = 0.0;
//TODO remove the following and use fetching macros that use uvmapping :
//float2 baseColorMapUv = TRANSFORM_TEX(input.texCoord0, _BaseColorMap);
//surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).rgb * _BaseColor.rgb;
surfaceData.baseColor = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).rgb * _BaseColor.rgb;
//surfaceData.normalWS = float3(0.0, 0.0, 0.0);
normalTS = GetNormalTS(input, layerTexCoord, detailNormalTS, detailMask);
//TODO: bentNormalTS
#if defined(_SMOOTHNESSMASKMAPA)
surfaceData.perceptualSmoothnessA = dot(SAMPLE_UVMAPPING_TEXTURE2D(_SmoothnessAMap, sampler_SmoothnessAMap, layerTexCoord.base), _MetallicMapChannel, _SmoothnessAMapChannel);
surfaceData.perceptualSmoothnessA = lerp(_SmoothnessARemap.x, _SmoothnessARemap.y, surfaceData.perceptualSmoothnessA);
#else
surfaceData.perceptualSmoothnessA = _SmoothnessA;
#endif
#if defined(_SMOOTHNESSMASKMAPB)
surfaceData.perceptualSmoothnessB = dot(SAMPLE_UVMAPPING_TEXTURE2D(_SmoothnessAMaB, sampler_SmoothnessBMap, layerTexCoord.base), _SmoothnessBMapChannel);
surfaceData.perceptualSmoothnessB = lerp(_SmoothnessBRemap.x, _SmoothnessBRemap.y, surfaceData.perceptualSmoothnessB);
#else
surfaceData.perceptualSmoothnessB = _SmoothnessB;
#endif
// TODOSTACKLIT: lobe weighting
surfaceData.lobeMix = _LobeMix;
// TODO: Ambient occlusion, detail mask.
#ifdef _METALLICMAP
surfaceData.metallic = dot(SAMPLE_UVMAPPING_TEXTURE2D(_MetallicMap, sampler_MetallicMap, layerTexCoord.base), _MetallicMapChannel);
#else
surfaceData.metallic = 1.0;
#endif
surfaceData.metallic *= _Metallic;
// These static material feature allow compile time optimization
// TODO: As we add features, or-set the flags eg MATERIALFEATUREFLAGS_LIT_* with #ifdef
// on corresponding _MATERIAL_FEATURE_* shader_feature kerwords (set by UI) so the compiler
// knows the value of surfaceData.materialFeatures.
surfaceData.materialFeatures = MATERIALFEATUREFLAGS_LIT_STANDARD;
// -------------------------------------------------------------
// Surface Data Part 2 (outsite GetSurfaceData( ) in Lit shader):
// -------------------------------------------------------------
GetNormalWS(input, V, normalTS, surfaceData.normalWS); // MaterialUtilities.hlsl
// TODO: decal etc.
surfaceData.color = GetTextureDataDebug(_DebugMipMapMode, baseColorMapUv, _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.color);
surfaceData.baseColor = GetTextureDataDebug(_DebugMipMapMode, layerTexCoord.base.uv, _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.baseColor);
surfaceData.metallic = 0;
// Builtin Data
// -------------------------------------------------------------
// Builtin Data:
// -------------------------------------------------------------
// NEWLITTODO: for all BuiltinData, might need to just refactor and use a comon function like that
// contained in LitBuiltinData.hlsl

// Emissive Intensity is only use here, but is part of BuiltinData to enforce UI parameters as we want the users to fill one color and one intensity
builtinData.emissiveIntensity = _EmissiveIntensity;
builtinData.emissiveIntensity = _EmissiveIntensity; // We still store intensity here so we can reuse it with debug code
#ifdef _EMISSIVE_COLOR_MAP
builtinData.emissiveColor *= SAMPLE_TEXTURE2D(_EmissiveColorMap, sampler_EmissiveColorMap, TRANSFORM_TEX(input.texCoord0, _EmissiveColorMap)).rgb;

37
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl


TEXTURE2D(_DistortionVectorMap);
SAMPLER(sampler_DistortionVectorMap);
TEXTURE2D(_BaseColorMap);
SAMPLER(sampler_BaseColorMap);
TEXTURE2D(_MetallicMap);
SAMPLER(sampler_MetallicMap);
TEXTURE2D(_SmoothnessAMap);
SAMPLER(sampler_SmoothnessAMap);
TEXTURE2D(_SmoothnessBMap);
SAMPLER(sampler_SmoothnessBMap);
TEXTURE2D(_BaseColorMap);
SAMPLER(sampler_BaseColorMap);
TEXTURE2D(_NormalMap);
SAMPLER(sampler_NormalMap);
CBUFFER_START(UnityPerMaterial)

float4 _BaseColorMap_TexelSize;
float4 _BaseColorMap_MipInfo;
float _BaseColorMapUV;
float _Metallic;
float _MetallicMapUV;
float4 _MetallicMapChannel;
float _SmoothnessA;
float _SmoothnessAMapUV;
float4 _SmoothnessAMapChannel;
float4 _SmoothnessARemap;
float _SmoothnessB;
float _SmoothnessBUV;
float4 _SmoothnessBMapChannel;
float4 _SmoothnessBRemap;
float _LobeMix;
float _NormalScale;
float _NormalMapUV;
float4 _UVMappingMask;
float3 _EmissiveColor;
float4 _EmissiveColorMap_ST;

22
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


public int sssBufferCount { get { return k_MaxSSSBuffer; } }
RTHandle[] m_ColorMRTs = new RTHandle[k_MaxSSSBuffer];
RTHandleSystem.RTHandle[] m_ColorMRTs = new RTHandleSystem.RTHandle[k_MaxSSSBuffer];
bool[] m_ExternalBuffer = new bool[k_MaxSSSBuffer];
// Disney SSS Model

RTHandle m_HTile;
RTHandleSystem.RTHandle m_HTile;
// End Disney SSS Model
// Jimenez SSS Model

// Jimenez need an extra buffer and Disney need one for some platform
RTHandle m_CameraFilteringBuffer;
RTHandleSystem.RTHandle m_CameraFilteringBuffer;
// This is use to be able to read stencil value in compute shader
Material m_CopyStencilForSplitLighting;

{
// In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
// TODO: Provide a way to reuse a render target
m_ColorMRTs[0] = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: true, name: "SSSBuffer");
m_ColorMRTs[0] = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: true, name: "SSSBuffer");
m_ExternalBuffer[0] = false;
}
else

if (ShaderConfig.k_UseDisneySSS == 0 || NeedTemporarySubsurfaceBuffer())
{
// Caution: must be same format as m_CameraSssDiffuseLightingBuffer
m_CameraFilteringBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "SSSCameraFiltering"); // Enable UAV
m_CameraFilteringBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "SSSCameraFiltering"); // Enable UAV
m_HTile = RTHandle.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "SSSHtile"); // Enable UAV
m_HTile = RTHandles.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "SSSHtile"); // Enable UAV
public RTHandle GetSSSBuffer(int index)
public RTHandleSystem.RTHandle GetSSSBuffer(int index)
{
Debug.Assert(index < sssBufferCount);
return m_ColorMRTs[index];

{
if (!m_ExternalBuffer[i])
{
RTHandle.Release(m_ColorMRTs[i]);
RTHandles.Release(m_ColorMRTs[i]);
RTHandle.Release(m_CameraFilteringBuffer);
RTHandle.Release(m_HTile);
RTHandles.Release(m_CameraFilteringBuffer);
RTHandles.Release(m_HTile);
}
public void PushGlobalParams(CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings)

// Combines specular lighting and diffuse lighting with subsurface scattering.
public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings,
RTHandle colorBufferRT, RTHandle diffuseBufferRT, RTHandle depthStencilBufferRT, RTHandle depthTextureRT)
RTHandleSystem.RTHandle colorBufferRT, RTHandleSystem.RTHandle diffuseBufferRT, RTHandleSystem.RTHandle depthStencilBufferRT, RTHandleSystem.RTHandle depthTextureRT)
{
if (sssParameters == null || !frameSettings.enableSubsurfaceScattering)
return;

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


public bool enableMSAA = false;
public MSAASamples msaaSampleCount { get; private set; }
public bool enableShadowMask = false;
public bool enableShadowMask = true;
public LightLoopSettings lightLoopSettings = new LightLoopSettings();

aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
// We must take care of the scene view fog flags in the editor
if (!CoreUtils.IsSceneViewFogEnabled(camera))
aggregate.enableAtmosphericScattering = false;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;
aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

120
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline

RTHandle m_ColorPyramidBuffer;
List<RTHandle> m_ColorPyramidMips = new List<RTHandle>();
RTHandle m_DepthPyramidBuffer;
List<RTHandle> m_DepthPyramidMips = new List<RTHandle>();
public RTHandle colorPyramid { get { return m_ColorPyramidBuffer; } }
public RTHandle depthPyramid { get { return m_DepthPyramidBuffer; } }
List<RTHandleSystem.RTHandle> m_ColorPyramidMips = new List<RTHandleSystem.RTHandle>();
List<RTHandleSystem.RTHandle> m_DepthPyramidMips = new List<RTHandleSystem.RTHandle>();
BufferPyramidProcessor m_Processor;

return scale;
}
public void CreateBuffers()
{
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "ColorPyramid");
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RGFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "DepthPyramid"); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
RTHandle.Release(m_ColorPyramidBuffer);
RTHandle.Release(m_DepthPyramidBuffer);
{
RTHandle.Release(rth);
}
RTHandles.Release(rth);
{
RTHandle.Release(rth);
}
RTHandles.Release(rth);
public int GetPyramidLodCount(HDCamera camera)
public int GetPyramidLodCount(Vector2Int size)
var minSize = Mathf.Min(camera.actualWidth, camera.actualHeight);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
var minSize = Mathf.Min(size.x, size.y);
return Mathf.Max(0, Mathf.FloorToInt(Mathf.Log(minSize, 2f)));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)

return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandle> mipList, int lodCount)
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandleSystem.RTHandle> mipList, int lodCount)
{
int currentLodCount = mipList.Count;
if (lodCount > currentLodCount)

int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
RTHandle newMip = RTHandle.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
var newMip = RTHandles.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
public Vector2 GetPyramidToScreenScale(HDCamera camera)
public Vector2 GetPyramidToScreenScale(HDCamera camera, RTHandleSystem.RTHandle rth)
return new Vector2((float)camera.actualWidth / m_DepthPyramidBuffer.rt.width, (float)camera.actualHeight / m_DepthPyramidBuffer.rt.height);
return new Vector2((float)camera.actualWidth / rth.rt.width, (float)camera.actualHeight / rth.rt.height);
}
public void RenderDepthPyramid(

RTHandle depthTexture)
RTHandleSystem.RTHandle sourceDepthTexture,
RTHandleSystem.RTHandle targetDepthTexture)
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_DepthPyramidBuffer.rt.format, m_DepthPyramidMips, lodCount);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetDepthTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping DepthPyramid calculation.");
return;
}
Vector2 scale = GetPyramidToScreenScale(hdCamera);
UpdatePyramidMips(hdCamera, targetDepthTexture.rt.format, m_DepthPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetDepthTexture);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));

depthTexture,
m_DepthPyramidBuffer,
sourceDepthTexture,
targetDepthTexture,
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, m_DepthPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, targetDepthTexture);
}
public void RenderColorPyramid(

RTHandle colorTexture)
RTHandleSystem.RTHandle sourceColorTexture,
RTHandleSystem.RTHandle targetColorTexture)
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_ColorPyramidBuffer.rt.format, m_ColorPyramidMips, lodCount);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetColorTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping ColorPyramid calculation.");
return;
}
Vector2 scale = GetPyramidToScreenScale(hdCamera);
UpdatePyramidMips(hdCamera, targetColorTexture.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetColorTexture);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));

colorTexture,
m_ColorPyramidBuffer,
sourceColorTexture,
targetColorTexture,
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, m_ColorPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, targetColorTexture);
}
public RTHandleSystem.RTHandle AllocColorRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true,
name: string.Format("ColorPyramid-{0}-{1}", id, frameIndex)
);
}
public RTHandleSystem.RTHandle AllocDepthRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.RGFloat,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true, // Need randomReadWrite because we downsample the first mip with a compute shader.
name: string.Format("DepthPyramid-{0}-{1}", id, frameIndex)
);
}
}
}

27
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


public void RenderDepthPyramid(
int width, int height,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)

RTHandle src = targetTexture;
var src = targetTexture;
RTHandle dest = mips[i];
var dest = mips[i];
var srcMip = new RectInt(0, 0, width >> i, height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);

cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
srcWorkMip.width, srcWorkMip.height,
(1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
(1.0f / srcMip.width) * scale.x, (1.0f / srcMip.height) * scale.y)
);
cmd.DispatchCompute(

1
);
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.rt.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.rt.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);

public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)

var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
//var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var srcWorkMip = new RectInt(
0,
0,

1
);
var dstMipWidthToCopy = Mathf.Min(dest.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/CopyStencilBuffer.shader


#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Packing.hlsl"
#include "../ShaderVariables.hlsl"
#include "../Lighting/LightDefinition.cs.hlsl"
int _StencilRef;
RW_TEXTURE2D(float, _HTile); // DXGI_FORMAT_R8_UINT is not supported by Unity

SubShader
{
Tags{ "RenderPipeline" = "HDRenderPipeline" }
Pass
{
Name "Pass 0 - Copy stencilRef to output"

22
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


float4 _VBufferDepthDecodingParams; // See the call site for description
CBUFFER_END
CBUFFER_START(UnityLightingParameters)
// Buffer pyramid
float4 _ColorPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _DepthPyramidSize; // (x,y) = Actual Pixel Size, (z,w) = 1 / Actual Pixel Size
float4 _ColorPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
float4 _DepthPyramidScale; // (x,y) = Screen Scale, z = lod count, w = unused
// Screen space lighting
float _SSRefractionInvScreenWeightDistance; // Distance for screen space smoothstep with fallback
// Ambiant occlusion
float4 _AmbientOcclusionParam; // xyz occlusion color, w directLightStrenght
CBUFFER_END
// Rough refraction texture
// Color pyramid (width, height, lodcount, Unused)
TEXTURE2D(_ColorPyramidTexture);
// Depth pyramid (width, height, lodcount, Unused)
TEXTURE2D(_DepthPyramidTexture);
// Ambient occlusion texture
TEXTURE2D(_AmbientOcclusionTexture);
// Custom generated by HDRP, not from Unity Engine (passed in via HDCamera)
#if defined(USING_STEREO_MATRICES)

42
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs


public class BuiltinSkyParameters
{
public Matrix4x4 pixelCoordToViewDirMatrix;
public Matrix4x4 invViewProjMatrix;
public Vector3 cameraPosWS;
public Vector4 screenSize;
public CommandBuffer commandBuffer;
public Light sunLight;
public RTHandle colorBuffer;
public RTHandle depthBuffer;
public HDCamera hdCamera;
public Matrix4x4 pixelCoordToViewDirMatrix;
public Matrix4x4 invViewProjMatrix;
public Vector3 cameraPosWS;
public Vector4 screenSize;
public CommandBuffer commandBuffer;
public Light sunLight;
public RTHandleSystem.RTHandle colorBuffer;
public RTHandleSystem.RTHandle depthBuffer;
public HDCamera hdCamera;
public DebugDisplaySettings debugSettings;

#if UNITY_EDITOR
if (camera.camera.cameraType == CameraType.Preview)
{
m_VisualSky.skySettings = m_DefaultPreviewSky;
m_VisualSky.skySettings = GetDefaultPreviewSkyInstance();
}
#endif

cmd.SetGlobalFloat(HDShaderIDs._SkyTextureMipCount, mipCount);
}
#if UNITY_EDITOR
ProceduralSky GetDefaultPreviewSkyInstance()
{
if (m_DefaultPreviewSky == null)
{
m_DefaultPreviewSky = ScriptableObject.CreateInstance<ProceduralSky>();
}
return m_DefaultPreviewSky;
}
#endif
public void Build(HDRenderPipelineAsset hdAsset, IBLFilterGGX iblFilterGGX)
{
m_BakingSkyRenderingContext = new SkyRenderingContext(iblFilterGGX, (int)hdAsset.renderPipelineSettings.lightLoopSettings.skyReflectionSize, false);

m_LightingOverrideVolumeStack = VolumeManager.instance.CreateStack();
m_LightingOverrideLayerMask = hdAsset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask;
#if UNITY_EDITOR
m_DefaultPreviewSky = ScriptableObject.CreateInstance<ProceduralSky>();
#endif
#if UNITY_EDITOR
CoreUtils.Destroy(m_DefaultPreviewSky);
#endif
CoreUtils.Destroy(m_StandardSkyboxMaterial);
CoreUtils.Destroy(m_BlitCubemapMaterial);
CoreUtils.Destroy(m_OpaqueAtmScatteringMaterial);

}
}
public void RenderSky(HDCamera camera, Light sunLight, RTHandle colorBuffer, RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
public void RenderSky(HDCamera camera, Light sunLight, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, DebugDisplaySettings debugSettings, CommandBuffer cmd)
{
m_SkyRenderingContext.RenderSky(m_VisualSky, camera, sunLight, colorBuffer, depthBuffer, debugSettings, cmd);
}

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存