浏览代码

Merge branch 'stacklit' into stacklit_vl_merge

# Conflicts:
#	ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl
/main
Stephane Laroche 6 年前
当前提交
10f8090a
共有 292 个文件被更改,包括 1654 次插入1220 次删除
  1. 8
      .gitmodules
  2. 10
      CHANGELOG.md
  3. 2
      CHANGELOG.md.meta
  4. 5
      ImageTemplates/FailStamp.png.meta
  5. 5
      ImageTemplates/LightweightPipeline/Scenes/001_SimpleCube.unity.png.meta
  6. 5
      ImageTemplates/LightweightPipeline/Scenes/002_Camera_Clip.unity.png.meta
  7. 5
      ImageTemplates/LightweightPipeline/Scenes/003_Camera_Ortho.unity.png.meta
  8. 5
      ImageTemplates/LightweightPipeline/Scenes/004_Camera_TargetTexture.unity.png.meta
  9. 5
      ImageTemplates/LightweightPipeline/Scenes/005_LitBakedEmission.unity.png.meta
  10. 5
      ImageTemplates/LightweightPipeline/Scenes/006_LitShaderLightProbes.unity.png.meta
  11. 5
      ImageTemplates/LightweightPipeline/Scenes/007_LitShaderMaps.unity.png.meta
  12. 5
      ImageTemplates/LightweightPipeline/Scenes/008_LitShaderReflection.unity.png.meta
  13. 5
      ImageTemplates/LightweightPipeline/Scenes/009_LightweightShading.unity.png.meta
  14. 5
      ImageTemplates/LightweightPipeline/Scenes/010_MultiplePointLights.unity.png.meta
  15. 5
      ImageTemplates/LightweightPipeline/Scenes/011_UnlitSprites.unity.png.meta
  16. 5
      ImageTemplates/LightweightPipeline/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity.png.meta
  17. 5
      ImageTemplates/LightweightPipeline/Scenes/016_Lighting_Scene_Directional.unity.png.meta
  18. 5
      ImageTemplates/LightweightPipeline/Scenes/017_Lighting_Scene_DirectionalBaked.unity.png.meta
  19. 5
      ImageTemplates/LightweightPipeline/Scenes/018_Lighting_Scene_DirectionalBakedIndirect.unity.png.meta
  20. 5
      ImageTemplates/LightweightPipeline/Scenes/019_Lighting_Scene_PointLights.unity.png.meta
  21. 5
      ImageTemplates/LightweightPipeline/Scenes/020_Lighting_BasicDirectional.unity.png.meta
  22. 5
      ImageTemplates/LightweightPipeline/Scenes/021_Lighting_BasicPoint.unity.png.meta
  23. 5
      ImageTemplates/LightweightPipeline/Scenes/022_Lighting_BasicSpot.unity.png.meta
  24. 5
      ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta
  25. 5
      ImageTemplates/LightweightPipeline/Scenes/024_Shader_PBRvalidation_Specular.unity.png.meta
  26. 5
      ImageTemplates/LightweightPipeline/Scenes/025_Shader_PBRvalidation_Metallic.unity.png.meta
  27. 5
      ImageTemplates/LightweightPipeline/Scenes/026_Shader_PBRscene.unity.png.meta
  28. 5
      ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta
  29. 5
      ImageTemplates/LightweightPipeline/Scenes/028_PostProcessing_Custom.unity.png.meta
  30. 5
      ImageTemplates/LightweightPipeline/Scenes/029_Particles.unity.png.meta
  31. 5
      ImageTemplates/LightweightPipeline/Scenes/031_Shader_GlossyEnvironmentSky.unity.png.meta
  32. 5
      ImageTemplates/LightweightPipeline/Scenes/032_Shader_GlossyEnvironmentColor.unity.png.meta
  33. 5
      ImageTemplates/LightweightPipeline/Scenes/033_Shader_HighlightsEnvironmentGradientSH.unity.png.meta
  34. 5
      ImageTemplates/LightweightPipeline/Scenes/034_Shader_HighlightsEnvironmentGradientBaked.unity.png.meta
  35. 5
      ImageTemplates/LightweightPipeline/Scenes/035_Shader_TerrainShaders.unity.png.meta
  36. 5
      ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta
  37. 5
      ImageTemplates/LightweightPipeline/Scenes/037_Particles.unity.png.meta
  38. 5
      ImageTemplates/LightweightPipeline/Scenes/038_Lighting_DirectionalCookie.unity.png.meta
  39. 5
      ImageTemplates/LightweightPipeline/Scenes/039_Lighting_SpotCookie.unity.png.meta
  40. 5
      ImageTemplates/LightweightPipeline/Scenes/040_UpgradeScene.unity.png.meta
  41. 5
      ImageTemplates/LightweightPipeline/Scenes/041_Lighting_BasicArea.unity.png.meta
  42. 5
      ImageTemplates/LightweightPipeline/Scenes/042_Lighting_Scene_VertexLighting.unity.png.meta
  43. 5
      ImageTemplates/LightweightPipeline/Scenes/043_Lighting_Mixed_ShadowMask.unity.png.meta
  44. 5
      ImageTemplates/LightweightPipeline/Scenes/044_ReflectionProbe.unity.png.meta
  45. 32
      LICENSE.md
  46. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/D3D11.hlsl
  47. 11
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLCore.hlsl
  48. 1
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLES2.hlsl
  49. 1
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLES3.hlsl
  50. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/Metal.hlsl
  51. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/PSSL.hlsl
  52. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/Vulkan.hlsl
  53. 15
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/XBoxOne.hlsl
  54. 109
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl
  55. 14
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  56. 2
      ScriptableRenderPipeline/Core/CoreRP/Textures/TextureCache.cs
  57. 2
      ScriptableRenderPipeline/Core/CoreRP/Textures/DepthBits.cs.meta
  58. 49
      ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs
  59. 32
      ScriptableRenderPipeline/Core/LICENSE.md
  60. 70
      ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md
  61. 67
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  62. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
  63. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs
  64. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs
  65. 31
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs
  66. 550
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs
  67. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/GlobalLightLoopSettingsUI.cs
  68. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineUI.cs
  69. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedGlobalLightLoopSettings.cs
  70. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedHDRenderPipelineAsset.cs
  71. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs
  72. 220
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  73. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs
  74. 44
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  75. 38
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
  76. 48
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs
  77. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs
  78. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl
  79. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs
  80. 78
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  81. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightUtils.cs
  82. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  83. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute
  84. 360
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  85. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/MRTBufferManager.cs
  86. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs
  87. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs
  88. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  89. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/PreIntegratedFGD/PreIntegratedFGD.cs
  90. 24
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl
  91. 109
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader
  92. 300
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl
  93. 84
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl
  94. 22
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  95. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  96. 120
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  97. 27
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs
  98. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset
  99. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs
  100. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs

8
.gitmodules


[submodule "ShaderGraph"]
path = ShaderGraph
url = https://github.com/Unity-Technologies/ShaderGraph
[submodule "Tests/UTF_Core"]
path = Tests/UTF_Core
url=https://github.com/Unity-Technologies/UTF_Core.git
[submodule "Tests/UTF_Tests_HDRP"]
path = Tests/UTF_Tests_HDRP
url=https://github.com/Unity-Technologies/UTF_Tests_HDRP.git

10
CHANGELOG.md


and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
### Changed
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
## [0.1.6] - 2018-xx-yy

2
CHANGELOG.md.meta


fileFormatVersion: 2
guid: d29df6d4fc5e8db47acccb638ce31e60
guid: ce3703c6cb43f154bac29c5ad0a09ba6
TextScriptImporter:
externalObjects: {}
userData:

5
ImageTemplates/FailStamp.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/001_SimpleCube.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/002_Camera_Clip.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/003_Camera_Ortho.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/004_Camera_TargetTexture.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/005_LitBakedEmission.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/006_LitShaderLightProbes.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/007_LitShaderMaps.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/008_LitShaderReflection.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/009_LightweightShading.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/010_MultiplePointLights.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/011_UnlitSprites.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/016_Lighting_Scene_Directional.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/017_Lighting_Scene_DirectionalBaked.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/018_Lighting_Scene_DirectionalBakedIndirect.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/019_Lighting_Scene_PointLights.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/020_Lighting_BasicDirectional.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/021_Lighting_BasicPoint.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/022_Lighting_BasicSpot.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/024_Shader_PBRvalidation_Specular.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/025_Shader_PBRvalidation_Metallic.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/026_Shader_PBRscene.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/028_PostProcessing_Custom.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/029_Particles.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/031_Shader_GlossyEnvironmentSky.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/032_Shader_GlossyEnvironmentColor.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/033_Shader_HighlightsEnvironmentGradientSH.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/034_Shader_HighlightsEnvironmentGradientBaked.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/035_Shader_TerrainShaders.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/037_Particles.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/038_Lighting_DirectionalCookie.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/039_Lighting_SpotCookie.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/040_UpgradeScene.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/041_Lighting_BasicArea.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/042_Lighting_Scene_VertexLighting.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/043_Lighting_Mixed_ShadowMask.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

5
ImageTemplates/LightweightPipeline/Scenes/044_ReflectionProbe.unity.png.meta


serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1

spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

32
LICENSE.md


**Unity Companion Package License v1.0 ("_License_")**
Copyright © 2017 Unity Technologies ApS ("**_Unity_**")
Unity hereby grants to you a worldwide, non-exclusive, no-charge, and royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute the software that is made available with this License ("**_Software_**"), subject to the following terms and conditions:
1. *Unity Companion Use Only*. Exercise of the license granted herein is limited to exercise for the creation, use, and/or distribution of applications, software, or other content pursuant to a valid Unity development engine software license ("**_Engine License_**"). That means while use of the Software is not limited to use in the software licensed under the Engine License, the Software may not be used for any purpose other than the creation, use, and/or distribution of Engine License-dependent applications, software, or other content. No other exercise of the license granted herein is permitted.
1. *No Modification of Engine License*. Neither this License nor any exercise of the license granted herein modifies the Engine License in any way.
1. *Ownership & Grant Back to You*.
3.1. You own your content. In this License, "derivative works" means derivatives of the Software itself--works derived only from the Software by you under this License (for example, modifying the code of the Software itself to improve its efficacy); “derivative works” of the Software do not include, for example, games, apps, or content that you create using the Software. You keep all right, title, and interest to your own content.
3.2. Unity owns its content. While you keep all right, title, and interest to your own content per the above, as between Unity and you, Unity will own all right, title, and interest to all intellectual property rights (including patent, trademark, and copyright) in the Software and derivative works of the Software, and you hereby assign and agree to assign all such rights in those derivative works to Unity.
3.3. You have a license to those derivative works. Subject to this License, Unity grants to you the same worldwide, non-exclusive, no-charge, and royalty-free copyright license to derivative works of the Software you create as is granted to you for the Software under this License.
1. *Trademarks*. You are not granted any right or license under this License to use any trademarks, service marks, trade names, products names, or branding of Unity or its affiliates ("**_Trademarks_**"). Descriptive uses of Trademarks are permitted; see, for example, Unity’s Branding Usage Guidelines at [https://unity3d.com/public-relations/brand](https://unity3d.com/public-relations/brand).
1. *Notices & Third-Party Rights*. This License, including the copyright notice above, must be provided in all substantial portions of the Software and derivative works thereof (or, if that is impracticable, in any other location where such notices are customarily placed). Further, if the Software is accompanied by a Unity "third-party notices" or similar file, you acknowledge and agree that software identified in that file is governed by those separate license terms.
1. *DISCLAIMER, LIMITATION OF LIABILITY*. THE SOFTWARE AND ANY DERIVATIVE WORKS THEREOF IS PROVIDED ON AN "AS IS" BASIS, AND IS PROVIDED WITHOUT WARRANTY OF ANY KIND, WHETHER EXPRESS OR IMPLIED, INCLUDING ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND/OR NONINFRINGEMENT. IN NO EVENT SHALL ANY COPYRIGHT HOLDER OR AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES (WHETHER DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL, INCLUDING PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA, OR PROFITS, AND BUSINESS INTERRUPTION), OR OTHER LIABILITY WHATSOEVER, WHETHER IN AN ACTION OF CONTRACT, TORT, OR OTHERWISE, ARISING FROM OR OUT OF, OR IN CONNECTION WITH, THE SOFTWARE OR ANY DERIVATIVE WORKS THEREOF OR THE USE OF OR OTHER DEALINGS IN SAME, EVEN WHERE ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1. *USE IS ACCEPTANCE and License Versions*. Your receipt and use of the Software constitutes your acceptance of this License and its terms and conditions. Software released by Unity under this License may be modified or updated and the License with it; upon any such modification or update, you will comply with the terms of the updated License for any use of any of the Software under the updated License.
1. *Use in Compliance with Law and Termination*. Your exercise of the license granted herein will at all times be in compliance with applicable law and will not infringe any proprietary rights (including intellectual property rights); this License will terminate immediately on any breach by you of this License.
Copyright © 2018 Unity Technologies ApS
1. *Severability*. If any provision of this License is held to be unenforceable or invalid, that provision will be enforced to the maximum extent possible and the other provisions will remain in full force and effect.
Licensed under the Unity Companion License for Unity-dependent projects--see [Unity Companion License](http://www.unity3d.com/legal/licenses/Unity_Companion_License).
1. *Governing Law and Venue*. This License is governed by and construed in accordance with the laws of Denmark, except for its conflict of laws rules; the United Nations Convention on Contracts for the International Sale of Goods will not apply. If you reside (or your principal place of business is) within the United States, you and Unity agree to submit to the personal and exclusive jurisdiction of and venue in the state and federal courts located in San Francisco County, California concerning any dispute arising out of this License ("**_Dispute_**"). If you reside (or your principal place of business is) outside the United States, you and Unity agree to submit to the personal and exclusive jurisdiction of and venue in the courts located in Copenhagen, Denmark concerning any Dispute.
Unless expressly provided otherwise, the Software under this license is made available strictly on an “AS IS” BASIS WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. Please review the license for details on these and other terms and conditions.

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/D3D11.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define PLATFORM_SUPPORT_GATHER
#define GATHER_TEXTURE2D(textureName, samplerName, coord2, offset) textureName.Gather(samplerName, coord2, offset)

11
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLCore.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#if OPENGL4_1_SM5
#define GATHER_TEXTURE2D(textureName, samplerName, coord2) textureName.Gather(samplerName, coord2)

1
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLES2.hlsl


#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) half4(0, 0, 0, 0)
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) half4(0, 0, 0, 0)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) half4(0, 0, 0, 0)
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) half4(0, 0, 0, 0)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) half4(0, 0, 0, 0)
// Gather not supported. Fallback to regular texture sampling.

1
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/GLES3.hlsl


#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#if GLES3_1_AEP

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/Metal.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define GATHER_TEXTURE2D(textureName, samplerName, coord2) textureName.Gather(samplerName, coord2)
#define GATHER_TEXTURE2D_ARRAY(textureName, samplerName, coord2, index) textureName.Gather(samplerName, float3(coord2, index))

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/PSSL.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define PLATFORM_SUPPORT_GATHER
#define GATHER_TEXTURE2D(textureName, samplerName, coord2, offset) textureName.Gather(samplerName, coord2, offset)

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/Vulkan.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define PLATFORM_SUPPORT_GATHER
#define GATHER_TEXTURE2D(textureName, samplerName, coord2, offset) textureName.Gather(samplerName, coord2, offset)

15
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/API/XBoxOne.hlsl


#define SAMPLE_DEPTH_TEXTURE(textureName, samplerName, coord2) SAMPLE_TEXTURE2D(textureName, samplerName, coord2).r
#define SAMPLE_DEPTH_TEXTURE_LOD(textureName, samplerName, coord2, lod) SAMPLE_TEXTURE2D_LOD(textureName, samplerName, coord2, lod).r
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define LOAD_TEXTURE2D(textureName, unCoord2) textureName.Load(int3(unCoord2, 0))
#define LOAD_TEXTURE2D_LOD(textureName, unCoord2, lod) textureName.Load(int3(unCoord2, lod))
#define LOAD_TEXTURE2D_MSAA(textureName, unCoord2, sampleIndex) textureName.Load(unCoord2, sampleIndex)
#define LOAD_TEXTURE2D_ARRAY(textureName, unCoord2, index) textureName.Load(int4(unCoord2, index, 0))
#define LOAD_TEXTURE2D_ARRAY_MSAA(textureName, unCoord2, index, sampleIndex) textureName.Load(int4(unCoord2, index, 0), sampleIndex)
#define LOAD_TEXTURE2D_ARRAY_LOD(textureName, unCoord2, index, lod) textureName.Load(int4(unCoord2, index, lod))
#define LOAD_TEXTURE3D(textureName, unCoord3) textureName.Load(int4(unCoord3, 0))
#define LOAD_TEXTURE3D_LOD(textureName, unCoord3, lod) textureName.Load(int4(unCoord3, lod))
#define PLATFORM_SUPPORT_GATHER
#define GATHER_TEXTURE2D(textureName, samplerName, coord2, offset) textureName.Gather(samplerName, coord2, offset)

109
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl


real F_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (f90 - f0) * x5 + f0; // sub mul mul mul sub mad

real3 F_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return f0 * (1.0 - x5) + (f90 * x5); // sub mul mul mul sub mul mad*3

// Does not handle TIR.
real F_Transm_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad

// Does not handle TIR.
real3 F_Transm_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad*3

}
// Fresnel dieletric / conductor
real3 F_FresnelConductor(real3 eta, real3 etak, real cosTheta)
// Note: etak2 = etak * etak (optimization for Artist Friendly Metallic Fresnel below)
// eta = eta_t / eta_i and etak = k_t / n_i
real3 F_FresnelConductor(real3 eta, real3 etak2, real cosTheta)
real3 etak2 = etak * etak;
real3 t0 = eta2 - etak2 - sinTheta2;
real3 a2plusb2 = sqrt(t0 * t0 + 4.0 * eta2 * etak2);

return 0.5 * (Rp + Rs);
}
// Conversion FO/IOR
TEMPLATE_2_REAL(IorToFresnel0, transmittedIor, incidentIor, return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor)) )
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor)
{
return IorToFresnel0(transmittedIor, 1.0);
}
// Assume air interface for top
// Note: We don't handle the case fresnel0 == 1
//real Fresnel0ToIor(real fresnel0)
//{
// real sqrtF0 = sqrt(fresnel0);
// return (1.0 + sqrtF0) / (1.0 - sqrtF0);
//}
TEMPLATE_1_REAL(Fresnel0ToIor, fresnel0, return ((1.0 + sqrt(fresnel0)) / (1.0 - sqrt(fresnel0))) )
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
// Artist Friendly Metallic Fresnel Ref: http://jcgt.org/published/0003/04/03/paper.pdf
real3 GetIorN(real3 f0, real3 edgeTint)
{
real3 sqrtF0 = sqrt(f0);
return lerp((1.0 - f0) / (1.0 + f0), (1.0 + sqrtF0) / (1.0 - sqrt(f0)), edgeTint);
}
real3 getIorK2(real3 f0, real3 n)
{
real3 nf0 = Sq(n + 1.0) * f0 - Sq(f0 - 1.0);
return nf0 / (1.0 - f0);
}
//-----------------------------------------------------------------------------
// Specular BRDF
//-----------------------------------------------------------------------------

real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
return a2 / (s * s);
}

real DV_SmithJointGGX(real NdotH, real NdotL, real NdotV, real roughness, real partLambdaV)
{
real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real lambdaV = NdotL * partLambdaV;
real lambdaL = NdotV * sqrt((-NdotL * a2 + NdotL) * NdotL + a2);

// Inline D_GGXAniso() * V_SmithJointGGXAniso() together for better code generation.
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
{
real a2 = roughnessT * roughnessB;
real3 v = real3(roughnessB * TdotH, roughnessT * BdotH, a2 * NdotH);

}
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH,
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
roughnessT, roughnessB, partLambdaV);
roughnessT, roughnessB, partLambdaV);
}
//-----------------------------------------------------------------------------

real fd90 = 0.5 + (perceptualRoughness + perceptualRoughness * LdotV);
// Two schlick fresnel term
real lightScatter = F_Schlick(1.0, fd90, NdotL);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
// Normalize the BRDF for polar view angles of up to (Pi/4).
// We use the worst case of (roughness = albedo = 1), and, for each view angle,

// Ref: Diffuse Lighting for GGX + Smith Microsurfaces, p. 113.
real3 DiffuseGGXNoPI(real3 albedo, real NdotV, real NdotL, real NdotH, real LdotV, real roughness)
{
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
return single + albedo * multiple;
}

// Note that we could save 2 cycles by inlining the multiplication by INV_PI.
return INV_PI * DiffuseGGXNoPI(albedo, NdotV, NdotL, NdotH, LdotV, roughness);
}
//-----------------------------------------------------------------------------
// Conversion FO/IOR
//-----------------------------------------------------------------------------
TEMPLATE_2_REAL(IorToFresnel0, transmittedIor, incidentIor, return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor)) )
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor)
{
return IorToFresnel0(transmittedIor, 1.0);
}
// Assume air interface for top
// Note: We don't handle the case fresnel0 == 1
//real Fresnel0ToIor(real fresnel0)
//{
// real sqrtF0 = sqrt(fresnel0);
// return (1.0 + sqrtF0) / (1.0 - sqrtF0);
//}
TEMPLATE_1_REAL(Fresnel0ToIor, fresnel0, return ((1.0 + sqrt(fresnel0)) / (1.0 - sqrt(fresnel0))) )
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
//-----------------------------------------------------------------------------
// Iridescence

14
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


// Metal doesn't support high enough OpenGL version
#if defined(MIP_COUNT_SUPPORTED)
uint width, height, depth, mipCount;
width = height = depth = mipCount = 0;
tex.GetDimensions(width, height, depth, mipCount);
return mipCount;
uint mipLevel, width, height, mipCount;
mipLevel = width = height = mipCount = 0;
tex.GetDimensions(mipLevel, width, height, mipCount);
return mipCount;
#else
return 0;
#endif

// saturate(d) to clamp the output of the function to the [n, f] range.
// z = 1/c * (pow(c * (f - n) + 1, d) - 1) + n
// = 1/c * pow(c * (f - n) + 1, d) + n - 1/c
// = L * pow(M, d) + N
// = 1/c * exp2(d * log2(c * (f - n) + 1)) + (n - 1/c)
// = L * exp2(d * M) + N
// Use abs() to avoid the compiler warning.
return decodingParams.x * pow(abs(decodingParams.y), d) + decodingParams.z;
return decodingParams.x * exp2(d * decodingParams.y) + decodingParams.z;
}
// 'z' is the view-space Z position (linear depth).

2
ScriptableRenderPipeline/Core/CoreRP/Textures/TextureCache.cs


{
case BuildTarget.iOS:
case BuildTarget.Android:
#if !UNITY_2018_2_OR_NEWER
#endif
case BuildTarget.WSAPlayer:
// Note: We return true on purpose even if Windows Store Apps are running on Desktop.
return true;

2
ScriptableRenderPipeline/Core/CoreRP/Textures/DepthBits.cs.meta


fileFormatVersion: 2
guid: 32875dc85f620f54e817e767811b5c2e
guid: d063f57ca4b7cd346a14a1be20de65b4
MonoImporter:
externalObjects: {}
serializedVersion: 2

49
ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs


}
}
static Texture3D m_BlackVolumeTexture;
public static Texture3D blackVolumeTexture
{
get
{
if (m_BlackVolumeTexture == null)
{
Color[] colors = { Color.black };
m_BlackVolumeTexture = new Texture3D(1, 1, 1, TextureFormat.ARGB32, false);
m_BlackVolumeTexture.SetPixels(colors, 0);
m_BlackVolumeTexture.Apply();
}
return m_BlackVolumeTexture;
}
}
public static void ClearRenderTarget(CommandBuffer cmd, ClearFlag clearFlag, Color clearColor)
{
if (clearFlag != ClearFlag.None)

};
return mat;
}
public static bool HasFlag<T>(T mask, T flag) where T : IConvertible
{
return (mask.ToUInt32(null) & flag.ToUInt32(null)) != 0;
}
public static void SetKeyword(CommandBuffer cmd, string keyword, bool state)
{

{
string msg = "AR/VR devices are not supported, no rendering will occur";
DisplayUnsupportedMessage(msg);
}
}
// Returns 'true' if "Animated Materials" are enabled for the view associated with the given camera.
public static bool AreAnimatedMaterialsEnabled(Camera camera)

}
}
}
// TODO: how to handle reflection views? We don't know the parent window they are being rendered into,
// so we don't know whether we can animate them...
//

#endif
return animateMaterials;
}
public static bool IsSceneViewFogEnabled(Camera camera)
{
bool fogEnable = true;
#if UNITY_EDITOR
if (camera.cameraType == CameraType.SceneView)
{
fogEnable = false;
// Determine whether the "Animated Materials" checkbox is checked for the current view.
foreach (UnityEditor.SceneView sv in Resources.FindObjectsOfTypeAll(typeof(UnityEditor.SceneView)))
{
if (sv.camera == camera && sv.sceneViewState.showFog)
{
fogEnable = true;
break;
}
}
}
#endif
return fogEnable;
}
}
}

32
ScriptableRenderPipeline/Core/LICENSE.md


**Unity Companion Package License v1.0 ("_License_")**
Copyright © 2017 Unity Technologies ApS ("**_Unity_**")
Unity hereby grants to you a worldwide, non-exclusive, no-charge, and royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute the software that is made available with this License ("**_Software_**"), subject to the following terms and conditions:
1. *Unity Companion Use Only*. Exercise of the license granted herein is limited to exercise for the creation, use, and/or distribution of applications, software, or other content pursuant to a valid Unity development engine software license ("**_Engine License_**"). That means while use of the Software is not limited to use in the software licensed under the Engine License, the Software may not be used for any purpose other than the creation, use, and/or distribution of Engine License-dependent applications, software, or other content. No other exercise of the license granted herein is permitted.
1. *No Modification of Engine License*. Neither this License nor any exercise of the license granted herein modifies the Engine License in any way.
1. *Ownership & Grant Back to You*.
3.1. You own your content. In this License, "derivative works" means derivatives of the Software itself--works derived only from the Software by you under this License (for example, modifying the code of the Software itself to improve its efficacy); “derivative works” of the Software do not include, for example, games, apps, or content that you create using the Software. You keep all right, title, and interest to your own content.
3.2. Unity owns its content. While you keep all right, title, and interest to your own content per the above, as between Unity and you, Unity will own all right, title, and interest to all intellectual property rights (including patent, trademark, and copyright) in the Software and derivative works of the Software, and you hereby assign and agree to assign all such rights in those derivative works to Unity.
3.3. You have a license to those derivative works. Subject to this License, Unity grants to you the same worldwide, non-exclusive, no-charge, and royalty-free copyright license to derivative works of the Software you create as is granted to you for the Software under this License.
1. *Trademarks*. You are not granted any right or license under this License to use any trademarks, service marks, trade names, products names, or branding of Unity or its affiliates ("**_Trademarks_**"). Descriptive uses of Trademarks are permitted; see, for example, Unity’s Branding Usage Guidelines at [https://unity3d.com/public-relations/brand](https://unity3d.com/public-relations/brand).
1. *Notices & Third-Party Rights*. This License, including the copyright notice above, must be provided in all substantial portions of the Software and derivative works thereof (or, if that is impracticable, in any other location where such notices are customarily placed). Further, if the Software is accompanied by a Unity "third-party notices" or similar file, you acknowledge and agree that software identified in that file is governed by those separate license terms.
1. *DISCLAIMER, LIMITATION OF LIABILITY*. THE SOFTWARE AND ANY DERIVATIVE WORKS THEREOF IS PROVIDED ON AN "AS IS" BASIS, AND IS PROVIDED WITHOUT WARRANTY OF ANY KIND, WHETHER EXPRESS OR IMPLIED, INCLUDING ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND/OR NONINFRINGEMENT. IN NO EVENT SHALL ANY COPYRIGHT HOLDER OR AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES (WHETHER DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL, INCLUDING PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA, OR PROFITS, AND BUSINESS INTERRUPTION), OR OTHER LIABILITY WHATSOEVER, WHETHER IN AN ACTION OF CONTRACT, TORT, OR OTHERWISE, ARISING FROM OR OUT OF, OR IN CONNECTION WITH, THE SOFTWARE OR ANY DERIVATIVE WORKS THEREOF OR THE USE OF OR OTHER DEALINGS IN SAME, EVEN WHERE ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1. *USE IS ACCEPTANCE and License Versions*. Your receipt and use of the Software constitutes your acceptance of this License and its terms and conditions. Software released by Unity under this License may be modified or updated and the License with it; upon any such modification or update, you will comply with the terms of the updated License for any use of any of the Software under the updated License.
1. *Use in Compliance with Law and Termination*. Your exercise of the license granted herein will at all times be in compliance with applicable law and will not infringe any proprietary rights (including intellectual property rights); this License will terminate immediately on any breach by you of this License.
Render Pipeline Core copyright © 2018 Unity Technologies ApS
1. *Severability*. If any provision of this License is held to be unenforceable or invalid, that provision will be enforced to the maximum extent possible and the other provisions will remain in full force and effect.
Licensed under the Unity Companion License for Unity-dependent projects--see [Unity Companion License](http://www.unity3d.com/legal/licenses/Unity_Companion_License).
1. *Governing Law and Venue*. This License is governed by and construed in accordance with the laws of Denmark, except for its conflict of laws rules; the United Nations Convention on Contracts for the International Sale of Goods will not apply. If you reside (or your principal place of business is) within the United States, you and Unity agree to submit to the personal and exclusive jurisdiction of and venue in the state and federal courts located in San Francisco County, California concerning any dispute arising out of this License ("**_Dispute_**"). If you reside (or your principal place of business is) outside the United States, you and Unity agree to submit to the personal and exclusive jurisdiction of and venue in the courts located in Copenhagen, Denmark concerning any Dispute.
Unless expressly provided otherwise, the Software under this license is made available strictly on an “AS IS” BASIS WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. Please review the license for details on these and other terms and conditions.

70
ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md


All notable changes to this package will be documented in this file.
## [2018.2 undecided]
### Improvements
- Add stripper of shader variant when building a player. Save shader compile time.
- Disable per-object culling that was executed in C++ in HD whereas it was not used (Optimization)
- Enable texture streaming debugging (was not working before 2018.2)
### Changed, Removals and deprecations
- Removed GlobalLightLoopSettings.maxPlanarReflectionProbes and instead use value of GlobalLightLoopSettings.planarReflectionProbeCacheSize
## [2018.1 undecided]
### Improvements
- Configure the volumetric lighting code path to be on by default
- Trigger a build exception when trying to build an unsupported platform
- Introduce the VolumetricLightingController component, which can (and should) be placed on the camera, and allows one to control the near and the far plane of the V-Buffer (volumetric "froxel" buffer) along with the depth distribution (from logarithmic to linear)
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
### Changed, Removals and deprecations
- Remove Resource folder of PreIntegratedFGD and add the resource to RenderPipeline Asset
- Default number of planar reflection change from 4 to 2
## [0.1.6] - 2018-xx-yy
### Bug fixes
- Fix ConvertPhysicalLightIntensityToLightIntensity() function used when creating light from script to match HDLightEditor behavior
- Fix numerical issues with the default value of mean free path of volumetric fog
- Fix the bug preventing decals from coexisting with density volumes
### Changelog starting
## [2018.1.0f2]
Started Changelog
### Improvements
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
- Added buffered frame history per camera
- Port Global Density Volumes to the Interpolation Volume System.
- Optimize ImportanceSampleLambert() to not require the tangent frame.
- Generalize SampleVBuffer() to handle different sampling and reconstruction methods.
- Improve the quality of volumetric lighting reprojection.
- Optimize Morton Order code in the Subsurface Scattering pass.
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Use an atlas instead of a texture array for cluster transparent decals
- Add a debug view to visualize the decal atlas
- Only store decal textures to atlas if decal is visible, debounce out of memory decal atlas warning.
- Add manipulator gizmo on decal to improve authoring workflow
- Add a minimal StackLit material (work in progress, this version can be used as template to add new material)
### Changed, Removals and deprecations
- EnableShadowMask in FrameSettings (But shadowMaskSupport still disable by default)
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
- Moved RTHandle static methods to RTHandles
- Renamed RTHandle to RTHandleSystem.RTHandle
- Move code for PreIntegratedFDG (Lit.shader) into its dedicated folder to be share with other material
- Move code for LTCArea (Lit.shader) into its dedicated folder to be share with other material
### Bug fixes
- Fix fog flags in scene view is now taken into account
- Fix sky in preview windows that were disappearing after a load of a new level
- Fix numerical issues in IntersectRayAABB().
- Fix alpha blending of volumetric lighting with transparent objects.
- Fix the near plane of the V-Buffer causing out-of-bounds look-ups in the clustered data structure.
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Fix decal atlas debug view to work correctly when shadow atlas view is also enabled
## [2018.1.0b13]
...

67
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


HDAdditionalCameraData m_AdditionalCameraData;
BufferedRTHandleSystem m_HistoryRTSystem = new BufferedRTHandleSystem();
public HDCamera(Camera cam)
{
camera = cam;

// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.Swap();
int maxWidth = RTHandle.maxWidth;
int maxHeight = RTHandle.maxHeight;
int maxWidth = RTHandles.maxWidth;
int maxHeight = RTHandles.maxHeight;
m_CameraScaleBias.x = (float)m_ActualWidth / maxWidth;
m_CameraScaleBias.y = (float)m_ActualHeight / maxHeight;

// Warning: different views can use the same camera!
public long GetViewID()
{
if (camera.cameraType == CameraType.Game)
{
long viewID = camera.GetInstanceID();
// Make it positive.
viewID += (-(long)int.MinValue) + 1;
Debug.Assert(viewID > 0);
return viewID;
}
else
{
return 0;
}
long viewID = camera.GetInstanceID();
// Make it positive.
viewID += (-(long)int.MinValue) + 1;
return viewID;
}
public void Reset()

return hdcam;
}
public static void ClearAll()
{
foreach (var cam in s_Cameras)
cam.Value.ReleaseHistoryBuffer();
s_Cameras.Clear();
s_Cleanup.Clear();
}
// Look for any camera that hasn't been used in the last frame and remove them for the pool.
public static void CleanUnused()
{

{
if (kvp.Value.m_LastFrameActive != frameCheck)
if (kvp.Value.m_LastFrameActive < frameCheck)
{
var hdCam = s_Cameras[cam];
if (hdCam.m_HistoryRTSystem != null)
{
hdCam.m_HistoryRTSystem.Dispose();
hdCam.m_HistoryRTSystem = null;
}
}
s_Cleanup.Clear();
}

cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewMatrixStereo, invViewStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvProjMatrixStereo, invProjStereo);
cmd.SetGlobalMatrixArray(HDShaderIDs._InvViewProjMatrixStereo, invViewProjStereo);
}
public RTHandleSystem.RTHandle GetPreviousFrameRT(int id)
{
return m_HistoryRTSystem.GetFrameRT(id, 1);
}
public RTHandleSystem.RTHandle GetCurrentFrameRT(int id)
{
return m_HistoryRTSystem.GetFrameRT(id, 0);
}
// Allocate buffers frames and return current frame
public RTHandleSystem.RTHandle AllocHistoryFrameRT(int id, Func<string, int, RTHandleSystem, RTHandleSystem.RTHandle> allocator)
{
m_HistoryRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), 2);
return m_HistoryRTSystem.GetFrameRT(id, 0);
}
void ReleaseHistoryBuffer()
{
m_HistoryRTSystem.ReleaseAll();
}
}
}

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs


public void UpdateMaterials()
{
//if (mipMapDebugSettings.debugMipMapMode != 0)
// Texture.SetStreamingTextureMaterialDebugProperties();
#if UNITY_2018_2_OR_NEWER
if (mipMapDebugSettings.debugMipMapMode != 0)
Texture.SetStreamingTextureMaterialDebugProperties();
#endif
}
public bool DebugNeedsExposure()

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs


private Texture2DAtlas m_Atlas = null;
public bool m_AllocationSuccess = true;
public bool m_PrevAllocationSuccess = true;
public Texture2DAtlas Atlas
{

{
if (m_NumResults == 0)
return;
// only add if anything in this decal set is visible.
AddToTextureList(ref instance.m_TextureList);
int instanceCount = 0;
int batchCount = 0;
Matrix4x4[] decalToWorldBatch = null;

// updates textures, texture atlas indices and blend value
public void UpdateCachedMaterialData()
{
//instance.m_AllocationSuccess = true;
pair.Value.InitializeMaterialValues();
pair.Value.AddToTextureList(ref m_TextureList);
pair.Value.InitializeMaterialValues();
}
}

AddTexture(cmd, textureScaleBias);
}
if(!m_AllocationSuccess) // still failed to allocate, decal atlas size needs to increase
if(!m_AllocationSuccess && m_PrevAllocationSuccess) // still failed to allocate, decal atlas size needs to increase, debounce so that we don't spam the console with warnings
m_PrevAllocationSuccess = m_AllocationSuccess;
}
public void CreateDrawData()

{
using (new ProfilingSample(cmd, "Display Decal Atlas", CustomSamplerId.DisplayDebugDecalsAtlas.GetSampler()))
{
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1,1,0,0), new Vector4(width / hdCamera.actualWidth, overlaySize / hdCamera.actualHeight, x / hdCamera.actualWidth, y / hdCamera.actualHeight), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
cmd.SetViewport(new Rect(x, y, overlaySize, overlaySize));
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1, 1, 0 ,0), new Vector4(1, 1, 0, 0), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs


newAsset.opaqueAtmosphericScattering = Load<Shader>(HDRenderPipelinePath + "Sky/OpaqueAtmosphericScattering.shader");
newAsset.hdriSky = Load<Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader");
newAsset.proceduralSky = Load<Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader");
// Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor
newAsset.skyboxCubemap = Shader.Find("Skybox/Cubemap");
// Material
newAsset.preIntegratedFGD = Load<Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD.shader");
// Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor
newAsset.skyboxCubemap = Shader.Find("Skybox/Cubemap");
// Shadow
newAsset.shadowClearShader = Load<Shader>(CorePath + "Shadow/ShadowClear.shader");

31
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Lighting/HDLightEditor.cs


public SerializedProperty directionalIntensity;
public SerializedProperty punctualIntensity;
public SerializedProperty areaIntensity;
public SerializedProperty enableSpotReflector;
public SerializedProperty spotInnerPercent;
public SerializedProperty lightDimmer;
public SerializedProperty fadeDistance;

public SerializedProperty spotLightShape;
public SerializedProperty enableSpotReflector;
public SerializedProperty shapeWidth;
public SerializedProperty shapeHeight;
public SerializedProperty aspectRatio;

directionalIntensity = o.Find(x => x.directionalIntensity),
punctualIntensity = o.Find(x => x.punctualIntensity),
areaIntensity = o.Find(x => x.areaIntensity),
enableSpotReflector = o.Find(x => x.enableSpotReflector),
spotInnerPercent = o.Find(x => x.m_InnerSpotPercent),
lightDimmer = o.Find(x => x.lightDimmer),
fadeDistance = o.Find(x => x.fadeDistance),

spotLightShape = o.Find(x => x.spotLightShape),
enableSpotReflector = o.Find(x => x.enableSpotReflector),
spotLightShape = o.Find(x => x.spotLightShape),
shapeWidth = o.Find(x => x.shapeWidth),
shapeHeight = o.Find(x => x.shapeHeight),
aspectRatio = o.Find(x => x.aspectRatio),

if (spotLightShape == SpotLightShape.Cone)
{
settings.DrawSpotAngle();
EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
EditorGUILayout.Slider(m_AdditionalLightData.spotInnerPercent, 0f, 100f, s_Styles.spotInnerPercent);
}
// TODO : replace with angle and ratio

EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
EditorGUILayout.Slider(m_AdditionalLightData.aspectRatio, 0.05f, 20.0f, s_Styles.aspectRatioPyramid);
}
else if (spotLightShape == SpotLightShape.Box)

}
else if (spotLightShape == SpotLightShape.Pyramid)
{
var aspectRatio = m_AdditionalLightData.aspectRatio.floatValue;
// Since the smallest angles is = to the fov, and we don't care of the angle order, simply make sure the aspect ratio is > 1
if ( aspectRatio < 1f ) aspectRatio = 1f/aspectRatio;
var angleA = settings.spotAngle.floatValue * Mathf.Deg2Rad;
var halfAngle = angleA * 0.5f; // half of the smallest angle
var length = Mathf.Tan(halfAngle); // half length of the smallest side of the rectangle
length *= aspectRatio; // half length of the bigest side of the rectangle
halfAngle = Mathf.Atan(length); // half of the bigest angle
var angleB = halfAngle * 2f;
float angleA, angleB;
LightUtils.CalculateAnglesForPyramid( m_AdditionalLightData.aspectRatio.floatValue, settings.spotAngle.floatValue,
out angleA, out angleB);
settings.intensity.floatValue = LightUtils.ConvertFrustrumLightIntensity(m_AdditionalLightData.punctualIntensity.floatValue, angleA, angleB );
}

case LightShape.Point:
case LightShape.Spot:
EditorGUILayout.PropertyField(m_AdditionalLightData.punctualIntensity, s_Styles.punctualIntensity);
// Only display reflector option if it make sense
if (m_LightShape == LightShape.Spot)
{
var spotLightShape = (SpotLightShape)m_AdditionalLightData.spotLightShape.enumValueIndex;
if (spotLightShape == SpotLightShape.Cone || spotLightShape == SpotLightShape.Pyramid)
EditorGUILayout.PropertyField(m_AdditionalLightData.enableSpotReflector, s_Styles.enableSpotReflector);
}
break;
case LightShape.Rectangle:

550
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs


using System;
class StackLitGUI : BaseUnlitGUI
class StackLitGUI : BaseMaterialGUI
protected static class Styles
protected static class StylesStackLit
public static string InputsText = "Inputs";
public static GUIContent doubleSidedNormalModeText = new GUIContent("Normal mode", "This will modify the normal base on the selected mode. Mirror: Mirror the normal with vertex normal plane, Flip: Flip the normal");
public static GUIContent useLocalPlanarMapping = new GUIContent("Use Local Planar Mapping", "Use local space for planar/triplanar mapping instead of world space");
};
public static GUIContent baseColorText = new GUIContent("Base Color + Opacity", "Albedo (RGB) and Opacity (A)");
#region Strings
protected const string k_DoubleSidedNormalMode = "_DoubleSidedNormalMode";
// Scalar scale factors for: metallic and the two lobe perceptual smoothness.
public static GUIContent metallicText = new GUIContent("Metallic", "Metallic scale factor");
public static GUIContent smoothnessAText = new GUIContent("Primary Lobe Smoothness", "Primary lobe smoothness scale factor");
public static GUIContent smoothnessBText = new GUIContent("Secondary Lobe Smoothness", "Secondary lobe smoothness scale factor");
public static GUIContent lobeMixText = new GUIContent("Lobe Mixing", "Lobe mixing factor");
protected const string k_UVBase = "_UVBase";
public static GUIContent smoothnessARemappingText = new GUIContent("Primary Lobe Smoothness Remapping", "Primary lobe smoothness remapping");
public static GUIContent smoothnessBRemappingText = new GUIContent("Secondary Lobe Smoothness Remapping", "Secondary lobe smoothness remapping");
public static GUIContent maskMapASText = new GUIContent("Primary mask map - M(R), AO(G), D(B), S1(A)", "Primary mask map");
public static GUIContent maskMapBSText = new GUIContent("Secondary mask Map - (R), (G), (B), S2(A)", "Secondary mask map");
// Base
protected const string k_BaseColor = "_BaseColor";
protected const string k_BaseColorMap = "_BaseColorMap";
protected const string k_BaseColorMapUV = "_BaseColorMapUV";
protected const string k_Metallic = "_Metallic";
protected const string k_MetallicMap = "_MetallicMap";
protected const string k_MetallicMapUV = "_MetallicMapUV";
protected const string k_MetallicRemap = "_MetallicRemap";
protected const string k_MetallicRemapInverted = "_MetallicRemapInverted";
protected const string k_MetallicRange = "_MetallicRange";
public static GUIContent normalMapText = new GUIContent("Normal Map", "Normal Map (BC7/BC5/DXT5(nm))");
protected const string k_Smoothness1 = "_SmoothnessA";
protected const string k_Smoothness1Map = "_SmoothnessAMap";
protected const string k_Smoothness1MapUV = "_SmoothnessAMapUV";
protected const string k_Smoothness1Remap = "_SmoothnessARemap";
protected const string k_Smoothness1RemapInverted = "_SmoothnessARemapInverted";
protected const string k_Smoothness1Range = "_SmoothnessARange";
public static GUIContent UVBaseMappingText = new GUIContent("UV mapping usage", "");
protected const string k_NormalMap = "_NormalMap";
protected const string k_NormalMapUV = "_NormalMapUV";
protected const string k_NormalScale = "_NormalScale";
public static GUIContent anisotropyText = new GUIContent("Anisotropy", "Anisotropy scale factor");
public static GUIContent coatEnableText = new GUIContent("Clear Coat Enable", "Clear Coat Enable");
public static GUIContent coatSmoothnessText = new GUIContent("Clear Coat Smoothness", "Clear Coat Smoothness");
public static GUIContent coatIorText = new GUIContent("Clear Coat IOR", "Clear Coat IOR");
public static GUIContent coatThicknessText = new GUIContent("Clear Coat Thickness", "Clear Coat Thickness");
public static GUIContent coatExtinctionText = new GUIContent("Clear Coat Extinction", "Clear Coat Beer-Lambert Extinction");
// Emissive
protected const string k_EmissiveColor = "_EmissiveColor";
protected const string k_EmissiveColorMap = "_EmissiveColorMap";
protected const string k_EmissiveColorMapUV = "_EmissiveColorMapUV";
protected const string k_EmissiveIntensity = "_EmissiveIntensity";
protected const string k_AlbedoAffectEmissive = "_AlbedoAffectEmissive";
// Emissive
public static string emissiveLabelText = "Emissive Inputs";
public static GUIContent emissiveText = new GUIContent("Emissive Color", "Emissive");
public static GUIContent emissiveIntensityText = new GUIContent("Emissive Intensity", "Emissive");
public static GUIContent albedoAffectEmissiveText = new GUIContent("Albedo Affect Emissive", "Specifies whether or not the emissive color is multiplied by the albedo.");
// SSS
protected const string k_DiffusionProfile = "_DiffusionProfile";
protected const string k_SubsurfaceMask = "_SubsurfaceMask";
protected const string k_SubsurfaceMaskMap = "_SubsurfaceMaskMap";
protected const string k_SubsurfaceMaskMapUV = "_SubsurfaceMaskMapUV";
protected const string k_SubsurfaceMaskRemap = "_SubsurfaceMaskRemap";
protected const string k_SubsurfaceMaskRemapInverted = "_SubsurfaceMaskRemapInverted";
protected const string k_SubsurfaceMaskRange = "_SubsurfaceMaskRange";
}
// Translucency
protected const string k_Thickness = "_Thickness";
protected const string k_ThicknessMap = "_ThicknessMap";
protected const string k_ThicknessMapUV = "_ThicknessMapUV";
protected const string k_ThicknessRemap = "_ThicknessRemap";
protected const string k_ThicknessRemapInverted = "_ThicknessRemapInverted";
protected const string k_ThicknessRange = "_ThicknessRange";
public enum DoubleSidedNormalMode
{
Flip,
Mirror,
None
}
// Second Lobe.
protected const string k_Smoothness2 = "_SmoothnessB";
protected const string k_Smoothness2Map = "_SmoothnessBMap";
protected const string k_Smoothness2MapUV = "_SmoothnessBMapUV";
protected const string k_Smoothness2Remap = "_SmoothnessBRemap";
protected const string k_Smoothness2RemapInverted = "_SmoothnessBRemapInverted";
protected const string k_Smoothness2Range = "_SmoothnessBRange";
protected const string k_LobeMix = "_LobeMix";
public enum UVBaseMapping
{
UV0,
UV1,
UV2,
UV3,
Planar,
Triplanar
}
//// transparency params
//protected MaterialProperty transmissionEnable = null;
//protected const string kTransmissionEnable = "_TransmissionEnable";
protected MaterialProperty doubleSidedNormalMode = null;
protected const string kDoubleSidedNormalMode = "_DoubleSidedNormalMode";
//protected MaterialProperty ior = null;
//protected const string kIor = "_Ior";
//protected MaterialProperty transmittanceColor = null;
//protected const string kTransmittanceColor = "_TransmittanceColor";
//protected MaterialProperty transmittanceColorMap = null;
//protected const string kTransmittanceColorMap = "_TransmittanceColorMap";
//protected MaterialProperty atDistance = null;
//protected const string kATDistance = "_ATDistance";
//protected MaterialProperty thicknessMultiplier = null;
//protected const string kThicknessMultiplier = "_ThicknessMultiplier";
//protected MaterialProperty refractionModel = null;
//protected const string kRefractionModel = "_RefractionModel";
//protected MaterialProperty refractionSSRayModel = null;
//protected const string kRefractionSSRayModel = "_RefractionSSRayModel";
#endregion
// Example UV mapping mask, TODO: could have for multiple maps, and channel mask for scalars
protected MaterialProperty UVBase = null;
protected const string kUVBase = "_UVBase";
protected MaterialProperty UVMappingMask = null;
protected const string kUVMappingMask = "_UVMappingMask"; // hidden, see enum material property drawer in .shader
// Add the properties into an array.
private readonly GroupProperty _baseMaterialProperties = null;
private readonly GroupProperty _materialProperties = null;
public StackLitGUI()
{
_baseMaterialProperties = new GroupProperty(this, "_BaseMaterial", new BaseProperty[]
{
// JFFTODO: Find the proper condition, and proper way to display this.
new Property(this, k_DoubleSidedNormalMode, "Normal mode", "This will modify the normal base on the selected mode. Mirror: Mirror the normal with vertex normal plane, Flip: Flip the normal.", false),
});
protected MaterialProperty baseColor = null;
protected const string kBaseColor = "_BaseColor";
protected MaterialProperty baseColorMap = null;
protected const string kBaseColorMap = "_BaseColorMap";
_materialProperties = new GroupProperty(this, "_Material", new BaseProperty[]
{
new GroupProperty(this, "_Standard", "Standard", new BaseProperty[]
{
new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Base Color + Opacity", "Albedo (RGB) and Opacity (A)", true, false),
new TextureProperty(this, k_MetallicMap, k_Metallic, "Metallic", "Metallic", false, false),
new TextureProperty(this, k_Smoothness1Map, k_Smoothness1, "Smoothness", "Smoothness", false, false),
// TODO: Special case for normal maps.
new TextureProperty(this, k_NormalMap, k_NormalScale, "Normal TODO", "Normal Map", false, false, true),
protected const string kMetallic = "_Metallic";
protected MaterialProperty metallic = null;
//new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Dielectric IoR", "Index of Refraction for Dielectric", false),
}),
// Primary lobe smoothness
protected MaterialProperty smoothnessA = null;
protected const string kSmoothnessA = "_SmoothnessA";
protected MaterialProperty smoothnessARemapMin = null;
protected const string kSmoothnessARemapMin = "_SmoothnessARemapMin";
protected MaterialProperty smoothnessARemapMax = null;
protected const string kSmoothnessARemapMax = "_SmoothnessARemapMax";
protected const string klobeMix = "_LobeMix";
protected MaterialProperty lobeMix = null;
new GroupProperty(this, "_Emissive", "Emissive", new BaseProperty[]
{
new TextureProperty(this, k_EmissiveColorMap, k_EmissiveColor, "Emissive Color", "Emissive", true, false),
new Property(this, k_EmissiveIntensity, "Emissive Intensity", "Emissive", false),
new Property(this, k_AlbedoAffectEmissive, "Albedo Affect Emissive", "Specifies whether or not the emissive color is multiplied by the albedo.", false),
}),
// Secondary lobe smoothness
protected MaterialProperty smoothnessB = null;
protected const string kSmoothnessB = "_SmoothnessB";
protected MaterialProperty smoothnessBRemapMin = null;
protected const string kSmoothnessBRemapMin = "_SmoothnessBRemapMin";
protected MaterialProperty smoothnessBRemapMax = null;
protected const string kSmoothnessBRemapMax = "_SmoothnessBRemapMax";
//new GroupProperty(this, "_Coat", "Coat", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "SmoothnessCoat", "smoothnessCoat", false, false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Index Of Refraction", "iorCoat", false, false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Normal", "normal Coat", false, false),
//}),
new GroupProperty(this, "_SSS", "Sub-Surface Scattering", new BaseProperty[]
{
new DiffusionProfileProperty(this, k_DiffusionProfile, "Diffusion Profile", "A profile determines the shape of the SSS/transmission filter.", false),
new TextureProperty(this, k_SubsurfaceMaskMap, k_SubsurfaceMask, "Subsurface mask map (R)", "Determines the strength of the subsurface scattering effect.", false, false),
}/*, _ => _materialId == MaterialId.SubSurfaceScattering*/),
// Two mask maps for the two smoothnesses
protected MaterialProperty maskMapA = null;
protected const string kMaskMapA = "_MaskMapA";
protected MaterialProperty maskMapB = null;
protected const string kMaskMapB = "_MaskMapB";
new GroupProperty(this, "_Lobe2", "Second Specular Lobe", new BaseProperty[]
{
new TextureProperty(this, k_Smoothness2Map, k_Smoothness2, "Smoothness2", "Smoothness2", false, false),
new Property(this, k_LobeMix, "Lobe Mix", "Lobe Mix", false),
}),
protected MaterialProperty normalScale = null;
protected const string kNormalScale = "_NormalScale";
protected MaterialProperty normalMap = null;
protected const string kNormalMap = "_NormalMap";
//new GroupProperty(this, "_Anisotropy", "Anisotropy", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Anisotropy Strength", "anisotropy strength", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Rotation", "rotation", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Tangent", "tangent", false),
//}),
// Anisotropy
protected MaterialProperty anisotropy = null;
protected const string kAnisotropy = "_Anisotropy";
new GroupProperty(this, "_Transmission", "Transmission", new BaseProperty[]
{
new DiffusionProfileProperty(this, k_DiffusionProfile, "Diffusion Profile", "A profile determines the shape of the SSS/transmission filter.", false),
new TextureProperty(this, k_ThicknessMap, k_Thickness, "Thickness", "If subsurface scattering is enabled, low values allow some light to be transmitted through the object.", false),
}),
// Clear Coat
protected MaterialProperty coatEnable = null;
protected const string kCoatEnable = "_CoatEnable";
//new GroupProperty(this, "_Iridescence", "Iridescence", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Index of Refraction", "Index of Refraction for Iridescence", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Thickness", "Thickness", false),
//}),
protected MaterialProperty coatSmoothness = null;
protected const string kCoatSmoothness = "_CoatSmoothness";
protected MaterialProperty coatIor = null;
protected const string kCoatIor = "_CoatIor";
protected MaterialProperty coatThickness = null;
protected const string kCoatThickness = "_CoatThickness";
protected MaterialProperty coatExtinction = null;
protected const string kCoatExtinction = "_CoatExtinction";
//new GroupProperty(this, "_Glint", "Glint", new BaseProperty[]
//{
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Density", "Density:", false),
// new TextureProperty(this, k_BaseColorMap, k_BaseColor, "Tint", "Tint", false),
//}),
});
}
protected MaterialProperty emissiveColor = null;
protected const string kEmissiveColor = "_EmissiveColor";
protected MaterialProperty emissiveColorMap = null;
protected const string kEmissiveColorMap = "_EmissiveColorMap";
protected MaterialProperty emissiveIntensity = null;
protected const string kEmissiveIntensity = "_EmissiveIntensity";
protected MaterialProperty albedoAffectEmissive = null;
protected const string kAlbedoAffectEmissive = "_AlbedoAffectEmissive";
protected override bool ShouldEmissionBeEnabled(Material material)
{
return material.GetFloat(k_EmissiveIntensity) > 0.0f;
}
doubleSidedNormalMode = FindProperty(kDoubleSidedNormalMode, props);
_baseMaterialProperties.OnFindProperty(props);
override protected void FindMaterialProperties(MaterialProperty[] props)
protected override void FindMaterialProperties(MaterialProperty[] props)
UVBase = FindProperty(kUVBase, props);
UVMappingMask = FindProperty(kUVMappingMask, props);
baseColor = FindProperty(kBaseColor, props);
baseColorMap = FindProperty(kBaseColorMap, props);
metallic = FindProperty(kMetallic, props);
smoothnessA = FindProperty(kSmoothnessA, props);
smoothnessARemapMin = FindProperty(kSmoothnessARemapMin, props);
smoothnessARemapMax = FindProperty(kSmoothnessARemapMax, props);
smoothnessB = FindProperty(kSmoothnessB, props);
smoothnessBRemapMin = FindProperty(kSmoothnessBRemapMin, props);
smoothnessBRemapMax = FindProperty(kSmoothnessBRemapMax, props);
lobeMix = FindProperty(klobeMix, props);
maskMapA = FindProperty(kMaskMapA, props);
maskMapB = FindProperty(kMaskMapB, props);
normalMap = FindProperty(kNormalMap, props);
normalScale = FindProperty(kNormalScale, props);
anisotropy = FindProperty(kAnisotropy, props);
// Clear Coat
coatEnable = FindProperty(kCoatEnable, props);
coatSmoothness = FindProperty(kCoatSmoothness, props);
coatIor = FindProperty(kCoatIor, props);
coatThickness = FindProperty(kCoatThickness, props);
coatExtinction = FindProperty(kCoatExtinction, props);
emissiveColor = FindProperty(kEmissiveColor, props);
emissiveColorMap = FindProperty(kEmissiveColorMap, props);
emissiveIntensity = FindProperty(kEmissiveIntensity, props);
albedoAffectEmissive = FindProperty(kAlbedoAffectEmissive, props);
//base.FindMaterialProperties(props);
_materialProperties.OnFindProperty(props);
EditorGUI.indentLevel++;
// This follow double sided option, see BaseUnlitUI.BaseMaterialPropertiesGUI()
// Don't put anything between base.BaseMaterialPropertiesGUI(); above and this:
if (doubleSidedEnable.floatValue > 0.0f)
{
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(doubleSidedNormalMode, Styles.doubleSidedNormalModeText);
EditorGUI.indentLevel--;
}
//TODO: m_MaterialEditor.ShaderProperty(enableMotionVectorForVertexAnimation, StylesBaseUnlit.enableMotionVectorForVertexAnimationText);
//refs to this ?
EditorGUI.indentLevel--;
_baseMaterialProperties.OnGUI();
EditorGUILayout.LabelField(Styles.InputsText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
m_MaterialEditor.TexturePropertySingleLine(Styles.baseColorText, baseColorMap, baseColor);
m_MaterialEditor.ShaderProperty(metallic, Styles.metallicText);
// maskMaps and smoothness rescaling controls:
if(maskMapA.textureValue == null)
{
m_MaterialEditor.ShaderProperty(smoothnessA, Styles.smoothnessAText);
}
else
{
float remapMin = smoothnessARemapMin.floatValue;
float remapMax = smoothnessARemapMax.floatValue;
EditorGUI.BeginChangeCheck();
EditorGUILayout.MinMaxSlider(Styles.smoothnessARemappingText, ref remapMin, ref remapMax, 0.0f, 1.0f);
if (EditorGUI.EndChangeCheck())
{
smoothnessARemapMin.floatValue = remapMin;
smoothnessARemapMax.floatValue = remapMax;
}
}
if(maskMapB.textureValue == null)
{
m_MaterialEditor.ShaderProperty(smoothnessB, Styles.smoothnessBText);
}
else
{
float remapMin = smoothnessBRemapMin.floatValue;
float remapMax = smoothnessBRemapMax.floatValue;
EditorGUI.BeginChangeCheck();
EditorGUILayout.MinMaxSlider(Styles.smoothnessBRemappingText, ref remapMin, ref remapMax, 0.0f, 1.0f);
if (EditorGUI.EndChangeCheck())
{
smoothnessBRemapMin.floatValue = remapMin;
smoothnessBRemapMax.floatValue = remapMax;
}
}
m_MaterialEditor.ShaderProperty(lobeMix, Styles.lobeMixText);
m_MaterialEditor.TexturePropertySingleLine(Styles.maskMapASText, maskMapA);
m_MaterialEditor.TexturePropertySingleLine(Styles.maskMapBSText, maskMapB);
// Normal map:
m_MaterialEditor.TexturePropertySingleLine(Styles.normalMapText, normalMap, normalScale);
m_MaterialEditor.ShaderProperty(anisotropy, Styles.anisotropyText);
// Clear Coat
m_MaterialEditor.ShaderProperty(coatEnable, Styles.coatEnableText);
m_MaterialEditor.ShaderProperty(coatSmoothness, Styles.coatSmoothnessText);
m_MaterialEditor.ShaderProperty(coatIor, Styles.coatIorText);
m_MaterialEditor.ShaderProperty(coatThickness, Styles.coatThicknessText);
m_MaterialEditor.ShaderProperty(coatExtinction, Styles.coatExtinctionText);
// UV Mapping:
EditorGUILayout.Space();
EditorGUI.BeginChangeCheck(); // UV mapping selection
m_MaterialEditor.ShaderProperty(UVBase, Styles.UVBaseMappingText);
UVBaseMapping uvBaseMapping = (UVBaseMapping)UVBase.floatValue;
float X, Y, Z, W;
X = (uvBaseMapping == UVBaseMapping.UV0) ? 1.0f : 0.0f;
Y = (uvBaseMapping == UVBaseMapping.UV1) ? 1.0f : 0.0f;
Z = (uvBaseMapping == UVBaseMapping.UV2) ? 1.0f : 0.0f;
W = (uvBaseMapping == UVBaseMapping.UV3) ? 1.0f : 0.0f;
UVMappingMask.colorValue = new Color(X, Y, Z, W);
//TODO:
//if ((uvBaseMapping == UVBaseMapping.Planar) || (uvBaseMapping == UVBaseMapping.Triplanar))
//if (GUILayout.Button("Generate All Properties"))
// m_MaterialEditor.ShaderProperty(TexWorldScale, Styles.texWorldScaleText);
// Debug.Log(_materialProperties.ToShaderPropertiesStringInternal());
m_MaterialEditor.TextureScaleOffsetProperty(baseColorMap);
if (EditorGUI.EndChangeCheck()) // ...UV mapping selection
{
}
EditorGUI.indentLevel--; // inputs
EditorGUILayout.Space();
// Surface type:
var surfaceTypeValue = (SurfaceType)surfaceType.floatValue;
if (surfaceTypeValue == SurfaceType.Transparent)
{
EditorGUILayout.Space();
EditorGUILayout.LabelField(StylesBaseUnlit.TransparencyInputsText, EditorStyles.boldLabel);
++EditorGUI.indentLevel;
DoDistortionInputsGUI();
--EditorGUI.indentLevel;
}
// TODO: see DoEmissiveGUI( ) in LitUI.cs: custom uvmapping for emissive
EditorGUILayout.Space();
EditorGUILayout.LabelField(Styles.emissiveLabelText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
m_MaterialEditor.TexturePropertySingleLine(Styles.emissiveText, emissiveColorMap, emissiveColor);
m_MaterialEditor.ShaderProperty(emissiveIntensity, Styles.emissiveIntensityText);
m_MaterialEditor.ShaderProperty(albedoAffectEmissive, Styles.albedoAffectEmissiveText);
EditorGUI.indentLevel--;
_materialProperties.OnGUI();
}
protected override void MaterialPropertiesAdvanceGUI(Material material)

protected override void VertexAnimationPropertiesGUI()
{
protected override bool ShouldEmissionBeEnabled(Material mat)
protected override void SetupMaterialKeywordsAndPassInternal(Material material)
return mat.GetFloat(kEmissiveIntensity) > 0.0f;
SetupMaterialKeywordsAndPass(material);
protected override void SetupMaterialKeywordsAndPassInternal(Material material)
protected static void SetupTextureMaterialProperty(Material material, string basePropertyName)
SetupMaterialKeywordsAndPass(material);
string useMapPropertyName = basePropertyName + "UseMap";
string mapPropertyName = basePropertyName + "Map";
string remapPropertyName = basePropertyName + "Remap";
string invertPropertyName = basePropertyName + "RemapInverted";
string rangePropertyName = basePropertyName + "Range";
string channelPropertyName = basePropertyName + "MapChannel";
string channelMaskPropertyName = basePropertyName + "MapChannelMask";
if (material.GetTexture(mapPropertyName))
{
Vector4 rangeVector = material.GetVector(remapPropertyName);
if (material.GetFloat(invertPropertyName) > 0.0f)
{
float s = rangeVector.x;
rangeVector.x = rangeVector.y;
rangeVector.y = s;
}
material.SetFloat(useMapPropertyName, 1.0f);
material.SetVector(rangePropertyName, rangeVector);
int channel = (int)material.GetFloat(channelPropertyName);
switch (channel)
{
case 0:
material.SetVector(channelMaskPropertyName, new Vector4(1.0f, 0.0f, 0.0f, 0.0f));
break;
case 1:
material.SetVector(channelMaskPropertyName, new Vector4(0.0f, 1.0f, 0.0f, 0.0f));
break;
case 2:
material.SetVector(channelMaskPropertyName, new Vector4(0.0f, 0.0f, 1.0f, 0.0f));
break;
case 3:
material.SetVector(channelMaskPropertyName, new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
break;
}
}
else
{
material.SetFloat(useMapPropertyName, 0.0f);
material.SetVector(rangePropertyName, new Vector4(0.0f, 1.0f, 0.0f, 0.0f));
material.SetVector(channelMaskPropertyName, new Vector4(1.0f, 0.0f, 0.0f, 0.0f));
}
static public void SetupMaterialKeywordsAndPass(Material material)
public static void SetupMaterialKeywordsAndPass(Material material)
{
//TODO see BaseLitUI.cs:SetupBaseLitKeywords (stencil etc)
SetupBaseUnlitKeywords(material);

if (doubleSidedEnable)
{
DoubleSidedNormalMode doubleSidedNormalMode = (DoubleSidedNormalMode)material.GetFloat(kDoubleSidedNormalMode);
BaseLitGUI.DoubleSidedNormalMode doubleSidedNormalMode = (BaseLitGUI.DoubleSidedNormalMode)material.GetFloat(k_DoubleSidedNormalMode);
case DoubleSidedNormalMode.Mirror: // Mirror mode (in tangent space)
case BaseLitGUI.DoubleSidedNormalMode.Mirror: // Mirror mode (in tangent space)
case DoubleSidedNormalMode.Flip: // Flip mode (in tangent space)
case BaseLitGUI.DoubleSidedNormalMode.Flip: // Flip mode (in tangent space)
case DoubleSidedNormalMode.None: // None mode (in tangent space)
case BaseLitGUI.DoubleSidedNormalMode.None: // None mode (in tangent space)
//NOTE: For SSS in forward and split lighting, obviously we don't have a gbuffer pass,
//NOTE: For SSS in forward and split lighting, obviously we don't have a gbuffer pass,
//TODO: stencil state, displacement, wind, depthoffset, tesselation
//TODO: stencil state, displacement, wind, depthoffset, tessellation
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMap));
CoreUtils.SetKeyword(material, "_MASKMAPA", material.GetTexture(kMaskMapA));
CoreUtils.SetKeyword(material, "_MASKMAPB", material.GetTexture(kMaskMapB));
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(k_NormalMap));
bool needUV2 = (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV2;
bool needUV3 = (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV3;
SetupTextureMaterialProperty(material, k_Metallic);
SetupTextureMaterialProperty(material, k_Smoothness1);
SetupTextureMaterialProperty(material, k_Smoothness2);
SetupTextureMaterialProperty(material, k_SubsurfaceMask);
SetupTextureMaterialProperty(material, k_Thickness);
if (needUV3)
// Check if we are using specific UVs.
TextureProperty.UVMapping[] uvIndices = new[]
material.DisableKeyword("_REQUIRE_UV2");
material.EnableKeyword("_REQUIRE_UV3");
}
else if (needUV2)
{
material.EnableKeyword("_REQUIRE_UV2");
material.DisableKeyword("_REQUIRE_UV3");
}
else
(TextureProperty.UVMapping)material.GetFloat(k_BaseColorMapUV),
(TextureProperty.UVMapping)material.GetFloat(k_MetallicMapUV),
(TextureProperty.UVMapping)material.GetFloat(k_NormalMapUV),
(TextureProperty.UVMapping)material.GetFloat(k_Smoothness1MapUV),
(TextureProperty.UVMapping)material.GetFloat(k_Smoothness2MapUV),
(TextureProperty.UVMapping)material.GetFloat(k_EmissiveColorMapUV),
(TextureProperty.UVMapping)material.GetFloat(k_SubsurfaceMaskMapUV),
(TextureProperty.UVMapping)material.GetFloat(k_ThicknessMapUV),
};
bool requireUv2 = false;
bool requireUv3 = false;
bool requireTriplanar = false;
for (int i = 0; i < uvIndices.Length; ++i)
material.DisableKeyword("_REQUIRE_UV2");
material.DisableKeyword("_REQUIRE_UV3");
requireUv2 = requireUv2 || uvIndices[i] == TextureProperty.UVMapping.UV2;
requireUv3 = requireUv3 || uvIndices[i] == TextureProperty.UVMapping.UV3;
requireTriplanar = requireTriplanar || uvIndices[i] == TextureProperty.UVMapping.Triplanar;
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
bool clearCoatEnabled = material.HasProperty(kCoatEnable) && (material.GetFloat(kCoatEnable) > 0.0f);
bool anisotropyEnabled = material.HasProperty(kAnisotropy) && (material.GetFloat(kAnisotropy) != 0.0f);
// TODO: When we have a map, also test for map for enable. (This scheme doesn't allow enabling from
// neutral value though, better to still have flag and uncheck it in UI code when reach neutral
// value and re-enable otherwise).
// Note that we don't use the materialId (cf Lit.shader) mechanism in the UI
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_ANISOTROPY", anisotropyEnabled);
CoreUtils.SetKeyword(material, "_MATERIAL_FEATURE_CLEAR_COAT", clearCoatEnabled);
//CoreUtils.SetKeyword(material, "_USE_UV2", requireUv2);
//CoreUtils.SetKeyword(material, "_USE_UV3", requireUv3);
CoreUtils.SetKeyword(material, "_USE_TRIPLANAR", requireTriplanar);
}
}
} // namespace UnityEditor

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/GlobalLightLoopSettingsUI.cs


EditorGUILayout.PropertyField(d.planarReflectionCacheCompressed, _.GetContent("Compress Planar Reflection Probe Cache"));
EditorGUILayout.PropertyField(d.planarReflectionCubemapSize, _.GetContent("Planar Reflection Texture Size"));
EditorGUILayout.PropertyField(d.planarReflectionProbeCacheSize, _.GetContent("Planar Probe Cache Size"));
EditorGUILayout.PropertyField(d.maxPlanarReflectionProbes, _.GetContent("Max Planar Probe Per Frame"));
d.maxPlanarReflectionProbes.intValue = Mathf.Max(1, d.maxPlanarReflectionProbes.intValue);
--EditorGUI.indentLevel;
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/HDRenderPipelineUI.cs


{
EditorGUILayout.PropertyField(d.renderPipelineResources, _.GetContent("Render Pipeline Resources|Set of resources that need to be loaded when creating stand alone"));
EditorGUILayout.PropertyField(d.diffusionProfileSettings, _.GetContent("Diffusion Profile Settings"));
EditorGUILayout.PropertyField(d.allowShaderVariantStripping, _.GetContent("Enable Shader Variant Stripping (experimental)"));
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedGlobalLightLoopSettings.cs


public SerializedProperty planarReflectionProbeCacheSize;
public SerializedProperty planarReflectionCubemapSize;
public SerializedProperty planarReflectionCacheCompressed;
public SerializedProperty maxPlanarReflectionProbes;
public SerializedProperty skyReflectionSize;
public SerializedProperty skyLightingOverrideLayerMask;

planarReflectionProbeCacheSize = root.Find((GlobalLightLoopSettings s) => s.planarReflectionProbeCacheSize);
planarReflectionCubemapSize = root.Find((GlobalLightLoopSettings s) => s.planarReflectionTextureSize);
planarReflectionCacheCompressed = root.Find((GlobalLightLoopSettings s) => s.planarReflectionCacheCompressed);
maxPlanarReflectionProbes = root.Find((GlobalLightLoopSettings s) => s.maxPlanarReflectionProbes);
skyReflectionSize = root.Find((GlobalLightLoopSettings s) => s.skyReflectionSize);
skyLightingOverrideLayerMask = root.Find((GlobalLightLoopSettings s) => s.skyLightingOverrideLayerMask);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/RenderLoopSettings/SerializedHDRenderPipelineAsset.cs


public SerializedProperty renderPipelineResources;
public SerializedProperty diffusionProfileSettings;
public SerializedProperty allowShaderVariantStripping;
public SerializedRenderPipelineSettings renderPipelineSettings;
public SerializedFrameSettings defaultFrameSettings;

renderPipelineResources = serializedObject.FindProperty("m_RenderPipelineResources");
diffusionProfileSettings = serializedObject.Find((HDRenderPipelineAsset s) => s.diffusionProfileSettings);
allowShaderVariantStripping = serializedObject.Find((HDRenderPipelineAsset s) => s.allowShaderVariantStripping);
renderPipelineSettings = new SerializedRenderPipelineSettings(serializedObject.Find((HDRenderPipelineAsset a) => a.renderPipelineSettings));
defaultFrameSettings = new SerializedFrameSettings(serializedObject.FindProperty("m_FrameSettings"));

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs


TransparentDepthPostpass,
ObjectsVelocity,
CameraVelocity,
GaussianPyramidColor,
PyramidDepth,
ColorPyramid,
DepthPyramid,
PostProcessing,
RenderDebug,
ClearBuffers,

220
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


};
readonly HDRenderPipelineAsset m_Asset;
public HDRenderPipelineAsset asset { get { return m_Asset; } }
DiffusionProfileSettings m_InternalSSSAsset;
public DiffusionProfileSettings diffusionProfileSettings

RenderTargetIdentifier[] m_MRTCache2 = new RenderTargetIdentifier[2];
// 'm_CameraColorBuffer' does not contain diffuse lighting of SSS materials until the SSS pass. It is stored within 'm_CameraSssDiffuseLightingBuffer'.
RTHandle m_CameraColorBuffer;
RTHandle m_CameraSssDiffuseLightingBuffer;
RTHandleSystem.RTHandle m_CameraColorBuffer;
RTHandleSystem.RTHandle m_CameraSssDiffuseLightingBuffer;
RTHandle m_CameraDepthStencilBuffer;
RTHandle m_CameraDepthBufferCopy;
RTHandle m_CameraStencilBufferCopy;
RTHandleSystem.RTHandle m_CameraDepthStencilBuffer;
RTHandleSystem.RTHandle m_CameraDepthBufferCopy;
RTHandleSystem.RTHandle m_CameraStencilBufferCopy;
RTHandle m_VelocityBuffer;
RTHandle m_DeferredShadowBuffer;
RTHandle m_AmbientOcclusionBuffer;
RTHandle m_DistortionBuffer;
RTHandleSystem.RTHandle m_VelocityBuffer;
RTHandleSystem.RTHandle m_DeferredShadowBuffer;
RTHandleSystem.RTHandle m_AmbientOcclusionBuffer;
RTHandleSystem.RTHandle m_DistortionBuffer;
// The pass "SRPDefaultUnlit" is a fall back to legacy unlit rendering and is required to support unity 2d + unity UI that render in the scene.
ShaderPassName[] m_ForwardAndForwardOnlyPassNames = { new ShaderPassName(), new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName };

int m_CurrentHeight;
// Use to detect frame changes
int m_FrameCount;
uint m_FrameCount;
float m_LastTime, m_Time;
public int GetCurrentShadowCount() { return m_LightLoop.GetCurrentShadowCount(); }

public DebugDisplaySettings debugDisplaySettings { get { return m_DebugDisplaySettings; } }
static DebugDisplaySettings s_NeutralDebugDisplaySettings = new DebugDisplaySettings();
DebugDisplaySettings m_CurrentDebugDisplaySettings;
RTHandle m_DebugColorPickerBuffer;
RTHandle m_DebugFullScreenTempBuffer;
RTHandleSystem.RTHandle m_DebugColorPickerBuffer;
RTHandleSystem.RTHandle m_DebugFullScreenTempBuffer;
bool m_FullScreenDebugPushed;
bool m_ValidAPI; // False by default mean we render normally, true mean we don't render anything

// Initial state of the RTHandle system.
// Tells the system that we will require MSAA or not so that we can avoid wasteful render texture allocation.
// TODO: Might want to initialize to at least the window resolution to avoid un-necessary re-alloc in the player
RTHandle.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAA, m_Asset.renderPipelineSettings.msaaSampleCount);
RTHandles.Initialize(1, 1, m_Asset.renderPipelineSettings.supportMSAA, m_Asset.renderPipelineSettings.msaaSampleCount);
if(!m_Asset.renderPipelineSettings.supportForwardOnly)
m_GbufferManager.CreateBuffers();

m_BufferPyramid.CreateBuffers();
m_CameraColorBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB : false, enableRandomWrite: true, enableMSAA: true, name : "CameraColor");
m_CameraSssDiffuseLightingBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "CameraSSSDiffuseLighting");
m_CameraColorBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB : false, enableRandomWrite: true, enableMSAA: true, name : "CameraColor");
m_CameraSssDiffuseLightingBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "CameraSSSDiffuseLighting");
m_CameraDepthStencilBuffer = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthStencilBuffer = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencil");
m_CameraDepthBufferCopy = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilCopy");
m_CameraDepthBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.Depth24, colorFormat: RenderTextureFormat.Depth, filterMode: FilterMode.Point, bindTextureMS: true, enableMSAA: true, name: "CameraDepthStencilCopy");
m_CameraStencilBufferCopy = RTHandle.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: RenderTextureFormat.R8, sRGB: false, filterMode: FilterMode.Point, enableMSAA: true, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity
m_CameraStencilBufferCopy = RTHandles.Alloc(Vector2.one, depthBufferBits: DepthBits.None, colorFormat: RenderTextureFormat.R8, sRGB: false, filterMode: FilterMode.Point, enableMSAA: true, name: "CameraStencilCopy"); // DXGI_FORMAT_R8_UINT is not supported by Unity
m_AmbientOcclusionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Bilinear, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "AmbientOcclusion");
m_AmbientOcclusionBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Bilinear, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "AmbientOcclusion");
m_VelocityBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBufferSRGBFlag(), enableMSAA: true, name: "Velocity");
m_VelocityBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetVelocityBufferFormat(), sRGB: Builtin.GetVelocityBufferSRGBFlag(), enableMSAA: true, name: "Velocity");
m_DistortionBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag(), name: "Distortion");
m_DistortionBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag(), name: "Distortion");
m_DeferredShadowBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
m_DeferredShadowBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
m_DebugColorPickerBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugColorPicker");
m_DebugFullScreenTempBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugFullScreen");
m_DebugColorPickerBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugColorPicker");
m_DebugFullScreenTempBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, name: "DebugFullScreen");
}
}

m_DbufferManager.DestroyBuffers();
m_BufferPyramid.DestroyBuffers();
RTHandle.Release(m_CameraColorBuffer);
RTHandle.Release(m_CameraSssDiffuseLightingBuffer);
RTHandles.Release(m_CameraColorBuffer);
RTHandles.Release(m_CameraSssDiffuseLightingBuffer);
RTHandle.Release(m_CameraDepthStencilBuffer);
RTHandle.Release(m_CameraDepthBufferCopy);
RTHandle.Release(m_CameraStencilBufferCopy);
RTHandles.Release(m_CameraDepthStencilBuffer);
RTHandles.Release(m_CameraDepthBufferCopy);
RTHandles.Release(m_CameraStencilBufferCopy);
RTHandle.Release(m_AmbientOcclusionBuffer);
RTHandle.Release(m_VelocityBuffer);
RTHandle.Release(m_DistortionBuffer);
RTHandle.Release(m_DeferredShadowBuffer);
RTHandles.Release(m_AmbientOcclusionBuffer);
RTHandles.Release(m_VelocityBuffer);
RTHandles.Release(m_DistortionBuffer);
RTHandles.Release(m_DeferredShadowBuffer);
RTHandle.Release(m_DebugColorPickerBuffer);
RTHandle.Release(m_DebugFullScreenTempBuffer);
RTHandles.Release(m_DebugColorPickerBuffer);
RTHandles.Release(m_DebugFullScreenTempBuffer);
HDCamera.CleanUnused();
}

bool IsSupportedPlatform()
{
// Note: If you add new platform in this function, think about adding support when building the player to in HDRPCustomBuildProcessor.cs
if (!SystemInfo.supportsComputeShaders)
return false;

}
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!
m_VolumetricLightingSystem.ResizeVBuffer(hdCamera, hdCamera.actualWidth, hdCamera.actualHeight);
m_VolumetricLightingSystem.ResizeVBufferAndUpdateProperties(hdCamera, m_FrameCount);
// update recorded window resolution
m_CurrentWidth = hdCamera.actualWidth;

m_DbufferManager.PushGlobalParams(cmd, m_FrameSettings);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd, m_FrameCount);
var ssrefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
var ssRefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
ssrefraction.PushShaderParameters(cmd);
ssRefraction.PushShaderParameters(cmd);
var previousDepthPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid);
if (previousDepthPyramidRT != null)
{
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, previousDepthPyramidRT);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(
previousDepthPyramidRT.referenceSize.x,
previousDepthPyramidRT.referenceSize.y,
1f / previousDepthPyramidRT.referenceSize.x,
1f / previousDepthPyramidRT.referenceSize.y
));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(
previousDepthPyramidRT.referenceSize.x / (float)previousDepthPyramidRT.rt.width,
previousDepthPyramidRT.referenceSize.y / (float)previousDepthPyramidRT.rt.height,
Mathf.Log(Mathf.Min(previousDepthPyramidRT.rt.width, previousDepthPyramidRT.rt.height), 2),
0.0f
));
}
var previousColorPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
if (previousColorPyramidRT != null)
{
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, previousColorPyramidRT);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(
previousColorPyramidRT.referenceSize.x,
previousColorPyramidRT.referenceSize.y,
1f / previousColorPyramidRT.referenceSize.x,
1f / previousColorPyramidRT.referenceSize.y
));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(
previousColorPyramidRT.referenceSize.x / (float)previousColorPyramidRT.rt.width,
previousColorPyramidRT.referenceSize.y / (float)previousColorPyramidRT.rt.height,
Mathf.Log(Mathf.Min(previousColorPyramidRT.rt.width, previousColorPyramidRT.rt.height), 2),
0.0f
));
}
}
}

return m_LightLoop.GetFeatureVariantsEnabled();
}
RTHandle GetDepthTexture()
RTHandleSystem.RTHandle GetDepthTexture()
{
return NeedDepthBufferCopy() ? m_CameraDepthBufferCopy : m_CameraDepthStencilBuffer;
}

// Therefore, outside of the Play Mode we update the time at 60 fps,
// and in the Play Mode we rely on 'Time.frameCount'.
float t = Time.realtimeSinceStartup;
int c = Time.frameCount;
uint c = (uint)Time.frameCount;
bool newFrame;

DecalSystem.instance.EndCull();
m_DbufferManager.vsibleDecalCount = DecalSystem.m_DecalsVisibleThisFrame;
DecalSystem.instance.UpdateCachedMaterialData(); // textures, alpha or fade distances could've changed
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible, needs to happen after CreateDrawData
}
}
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);

// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
RenderPyramidDepth(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
RenderDepthPyramid(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
StopStereoRendering(renderContext, hdCamera.camera);

}
}
{
// Set fog parameters for volumetric lighting.
var visualEnv = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
}
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings, m_FrameCount);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings, m_FrameCount);
RenderDeferredLighting(hdCamera, cmd);

RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.PreRefraction);
RenderGaussianPyramidColor(hdCamera, cmd, renderContext, true);
RenderColorPyramid(hdCamera, cmd, renderContext, true);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.Transparent);

RenderTransparentDepthPostpass(m_CullResults, hdCamera, renderContext, cmd, ForwardPass.Transparent);
RenderGaussianPyramidColor(hdCamera, cmd, renderContext, false);
RenderColorPyramid(hdCamera, cmd, renderContext, false);
AccumulateDistortion(m_CullResults, hdCamera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources, hdCamera);

m_DebugScreenSpaceTracingData.GetData(m_DebugScreenSpaceTracingDataArray);
var data = m_DebugScreenSpaceTracingDataArray[0];
m_CurrentDebugDisplaySettings.screenSpaceTracingDebugData = data;
// Assign -1 in tracing model to notifiy we took the data.
// When debugging in forward, we want only the first time the pixel is drawn
data.tracingModel = (Lit.RefractionSSRayModel)(-1);
m_DebugScreenSpaceTracingDataArray[0] = data;
m_DebugScreenSpaceTracingData.SetData(m_DebugScreenSpaceTracingDataArray);
}
} // For each camera
}

using (new ProfilingSample(cmd, "ApplyDistortion", CustomSamplerId.ApplyDistortion.GetSampler()))
{
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
var colorPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
var pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, colorPyramidRT);
// Need to account for the fact that the gaussian pyramid is actually rendered inside the camera viewport in a square texture so we mutiply by the PyramidToScreen scale
var size = new Vector4(hdCamera.screenSize.x, hdCamera.screenSize.y, pyramidScale.x / hdCamera.screenSize.x, pyramidScale.y / hdCamera.screenSize.y);

cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, m_BufferPyramid.colorPyramid);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._ColorPyramidTexture, colorPyramidRT);
cmd.SetComputeTextureParam(m_applyDistortionCS, m_applyDistortionKernel, HDShaderIDs._CameraColorTexture, m_CameraColorBuffer);
cmd.SetComputeVectorParam(m_applyDistortionCS, HDShaderIDs._Size, size);

void RenderSky(HDCamera hdCamera, CommandBuffer cmd)
{
// Rendering the sky is the first time in the frame where we need fog parameters so we push them here for the whole frame.
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
if (visualEnv.fogType != FogType.None || m_VolumetricLightingSystem.preset != VolumetricLightingSystem.VolumetricLightingPreset.Off)
if (visualEnv.fogType != FogType.None)
m_SkyManager.RenderOpaqueAtmosphericScattering(cmd);
}

var camera = hdCamera.camera;
m_LightLoop.RenderForward(camera, cmd, pass == ForwardPass.Opaque);
var debugScreenSpaceTracing = m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing;
if (pass == ForwardPass.Opaque)
{

var passNames = m_FrameSettings.enableForwardRenderingOnly
? m_ForwardAndForwardOnlyPassNames
: m_ForwardOnlyPassNames;
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingReflection);
if (debugSSTThisPass)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
}
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
// Assign debug data
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(1, m_DebugScreenSpaceTracingData);
}
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceTracing
&& pass == ForwardPass.Transparent)
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
}
}
}

}
}
void RenderGaussianPyramidColor(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, bool isPreRefraction)
void RenderColorPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, bool isPreRefraction)
{
if (isPreRefraction)
{

return;
}
using (new ProfilingSample(cmd, "Gaussian Pyramid Color", CustomSamplerId.GaussianPyramidColor.GetSampler()))
m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer);
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorPyramid, m_BufferPyramid.AllocColorRT);
using (new ProfilingSample(cmd, "Color Pyramid", CustomSamplerId.ColorPyramid.GetSampler()))
m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer, cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
PushFullScreenDebugTextureMip(cmd, m_BufferPyramid.colorPyramid, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
void RenderPyramidDepth(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)
void RenderDepthPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)
using (new ProfilingSample(cmd, "Pyramid Depth", CustomSamplerId.PyramidDepth.GetSampler()))
m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture());
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid)
?? hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthPyramid, m_BufferPyramid.AllocDepthRT);
using (new ProfilingSample(cmd, "Depth Pyramid", CustomSamplerId.DepthPyramid.GetSampler()))
m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture(), cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera);
PushFullScreenDebugTextureMip(cmd, m_BufferPyramid.depthPyramid, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
}
void RenderPostProcess(HDCamera hdcamera, CommandBuffer cmd, PostProcessLayer layer)

}
}
public void PushColorPickerDebugTexture(CommandBuffer cmd, RTHandle textureID, HDCamera hdCamera)
public void PushColorPickerDebugTexture(CommandBuffer cmd, RTHandleSystem.RTHandle textureID, HDCamera hdCamera)
{
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{

}
}
public void PushFullScreenDebugTexture(CommandBuffer cmd, RTHandle textureID, HDCamera hdCamera, FullScreenDebugMode debugMode)
public void PushFullScreenDebugTexture(CommandBuffer cmd, RTHandleSystem.RTHandle textureID, HDCamera hdCamera, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

}
void PushFullScreenDebugTextureMip(CommandBuffer cmd, RTHandle texture, int lodCount, Vector4 scaleBias, HDCamera hdCamera, FullScreenDebugMode debugMode)
void PushFullScreenDebugTextureMip(CommandBuffer cmd, RTHandleSystem.RTHandle texture, int lodCount, Vector4 scaleBias, HDCamera hdCamera, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

// (i.e. we have perform a flip, we need to flip the input texture)
m_DebugFullScreen.SetFloat(HDShaderIDs._RequireToFlipInputTexture, hdCamera.camera.cameraType != CameraType.SceneView ? 1.0f : 0.0f);
m_DebugFullScreen.SetBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, m_BufferPyramid.depthPyramid);
m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid));
HDUtils.DrawFullScreen(cmd, hdCamera, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
PushColorPickerDebugTexture(cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget, hdCamera);

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipelineAsset.cs


{
return new ReflectionSystemParameters
{
maxPlanarReflectionProbes = renderPipelineSettings.lightLoopSettings.maxPlanarReflectionProbes,
maxPlanarReflectionProbes = renderPipelineSettings.lightLoopSettings.planarReflectionProbeCacheSize,
planarReflectionProbeSize = renderPipelineSettings.lightLoopSettings.planarReflectionTextureSize
};
}

{
return renderPipelineSettings;
}
public bool allowShaderVariantStripping = true;
[SerializeField]
public DiffusionProfileSettings diffusionProfileSettings;

44
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Source4 = Shader.PropertyToID("_Source4");
public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _CornetteShanksConstant = Shader.PropertyToID("_CornetteShanksConstant");
public static readonly int _VBufferResolution = Shader.PropertyToID("_VBufferResolution");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");
public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity");
public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting");
public static readonly int _VBufferLightingIntegral = Shader.PropertyToID("_VBufferLightingIntegral");
public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _CornetteShanksConstant = Shader.PropertyToID("_CornetteShanksConstant");
public static readonly int _VBufferResolution = Shader.PropertyToID("_VBufferResolution");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferPrevResolution = Shader.PropertyToID("_VBufferPrevResolution");
public static readonly int _VBufferPrevSliceCount = Shader.PropertyToID("_VBufferPrevSliceCount");
public static readonly int _VBufferPrevDepthEncodingParams = Shader.PropertyToID("_VBufferPrevDepthEncodingParams");
public static readonly int _VBufferPrevDepthDecodingParams = Shader.PropertyToID("_VBufferPrevDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");
public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity");
public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting");
public static readonly int _VBufferLightingIntegral = Shader.PropertyToID("_VBufferLightingIntegral");
public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
}
}

38
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs


return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
private static void SetViewportAndClear(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor)
private static void SetViewportAndClear(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag, Color clearColor)
{
// Clearing a partial viewport currently does not go through the hardware clear.
// Instead it goes through a quad rendered with a specific shader.

// This set of RenderTarget management methods is supposed to be used when rendering into a camera dependent render texture.
// This will automatically set the viewport based on the camera size and the RTHandle scaling info.
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle buffer, ClearFlag clearFlag = ClearFlag.None, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandle colorBuffer, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = 0)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag = ClearFlag.None)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
public static void SetRenderTarget(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthBuffer, ClearFlag clearFlag, Color clearColor)
{
cmd.SetRenderTarget(colorBuffers, depthBuffer);
SetViewport(cmd, camera, depthBuffer);

// When we render using a camera whose viewport is smaller than the RTHandles reference size (and thus smaller than the RT actual size), we need to set it explicitly (otherwise, native code will set the viewport at the size of the RT)
// For auto-scaled RTs (like for example a half-resolution RT), we need to scale this viewport accordingly.
// For non scaled RTs we just do nothing, the native code will set the viewport at the size of the RT anyway.
public static void SetViewport(CommandBuffer cmd, HDCamera camera, RTHandle target)
public static void SetViewport(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle target)
{
if (target.useScaling)
{

cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 2 : 3, MeshTopology.Quads, 4, 1, s_PropertyBlock);
}
public static void BlitTexture(CommandBuffer cmd, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
public static void BlitTexture(CommandBuffer cmd, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)
{
s_PropertyBlock.SetTexture(HDShaderIDs._BlitTexture, source);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBias);

// It means that we can end up rendering inside a partial viewport for one of these "camera space" rendering.
// In this case, we need to make sure than when we blit from one such camera texture to another, we only blit the necessary portion corresponding to the camera viewport.
// Here, both source and destination are camera-scaled.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

// This case, both source and destination are camera-scaled but we want to override the scale/bias parameter.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel = 0.0f, bool bilinear = false)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Rect destViewport, float mipLevel = 0.0f, bool bilinear = false)
{
SetRenderTarget(cmd, camera, destination);
cmd.SetViewport(destViewport);

// This particular case is for blitting a camera-scaled texture into a non scaling texture. So we setup the full viewport (implicit in cmd.Blit) but have to scale the input UVs.
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandle source, RenderTargetIdentifier destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RTHandleSystem.RTHandle source, RenderTargetIdentifier destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandle destination)
public static void BlitCameraTexture(CommandBuffer cmd, HDCamera camera, RenderTargetIdentifier source, RTHandleSystem.RTHandle destination)
{
// Will set the correct camera viewport as well.
SetRenderTarget(cmd, camera, destination);

// These method should be used to render full screen triangles sampling auto-scaling RTs.
// This will set the proper viewport and UV scale.
public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer,
RTHandleSystem.RTHandle colorBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer);

public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RTHandle colorBuffer, RTHandle depthStencilBuffer,
RTHandleSystem.RTHandle colorBuffer, RTHandleSystem.RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffer, depthStencilBuffer);

public static void DrawFullScreen( CommandBuffer commandBuffer, HDCamera camera, Material material,
RenderTargetIdentifier[] colorBuffers, RTHandle depthStencilBuffer,
RenderTargetIdentifier[] colorBuffers, RTHandleSystem.RTHandle depthStencilBuffer,
MaterialPropertyBlock properties = null, int shaderPassId = 0)
{
HDUtils.SetRenderTarget(commandBuffer, camera, colorBuffers, depthStencilBuffer);

48
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Light/HDAdditionalLightData.cs


public float punctualIntensity = 600.0f; // Light default to 600 lumen, i.e ~48 candela
public float areaIntensity = 200.0f; // Light default to 200 lumen to better match point light
// Only for Spotlight, should be hide for other light
public bool enableSpotReflector = false;
[Range(0.0f, 100.0f)]
public float m_InnerSpotPercent = 0.0f; // To display this field in the UI this need to be public

// Only for Spotlight, should be hide for other light
public SpotLightShape spotLightShape = SpotLightShape.Cone;
// Only for Spotlight, should be hide for other light
public bool enableSpotReflector = false;
// Only for Rectangle/Line/box projector lights
public float shapeWidth = 0.5f;

switch (light.type)
{
case LightType.Directional:
light.intensity = directionalIntensity;
light.intensity = Mathf.Max(0, directionalIntensity);
light.intensity = LightUtils.ConvertPointLightIntensity(punctualIntensity);
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
// Spot should used conversion which take into account the angle, and thus the intensity vary with angle.
// This is not easy to manipulate for lighter, so we simply consider any spot light as just occluded point light. So reuse the same code.
light.intensity = LightUtils.ConvertPointLightIntensity(punctualIntensity);
// TODO: What to do with box shape ?
// var spotLightShape = (SpotLightShape)m_AdditionalspotLightShape.enumValueIndex;
if (enableSpotReflector)
{
if (spotLightShape == SpotLightShape.Cone)
{
light.intensity = LightUtils.ConvertSpotLightIntensity(Mathf.Max(0, punctualIntensity), light.spotAngle * Mathf.Deg2Rad, true);
}
else if (spotLightShape == SpotLightShape.Pyramid)
{
float angleA, angleB;
LightUtils.CalculateAnglesForPyramid(aspectRatio, light.spotAngle,
out angleA, out angleB);
light.intensity = LightUtils.ConvertFrustrumLightIntensity(Mathf.Max(0, punctualIntensity), angleA, angleB);
}
else // Box shape, fallback to punctual light.
{
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
}
}
else
{
// Spot should used conversion which take into account the angle, and thus the intensity vary with angle.
// This is not easy to manipulate for lighter, so we simply consider any spot light as just occluded point light. So reuse the same code.
light.intensity = LightUtils.ConvertPointLightIntensity(Mathf.Max(0, punctualIntensity));
// TODO: What to do with box shape ?
// var spotLightShape = (SpotLightShape)m_AdditionalspotLightShape.enumValueIndex;
}
break;
}

light.intensity = LightUtils.ConvertRectLightIntensity(areaIntensity, shapeWidth, shapeHeight);
light.intensity = LightUtils.ConvertRectLightIntensity(Mathf.Max(0, areaIntensity), shapeWidth, shapeHeight);
light.intensity = LightUtils.CalculateLineLightIntensity(areaIntensity, shapeWidth);
light.intensity = LightUtils.CalculateLineLightIntensity(Mathf.Max(0, areaIntensity), shapeWidth);
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs


public float weight;
public float multiplier;
public Vector3 sampleDirectionDiscardWS;
// Sampling properties
public int envIndex;
};

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl


float3 boxSideFadeNegative;
float weight;
float multiplier;
float3 sampleDirectionDiscardWS;
int envIndex;
};

float GetMultiplier(EnvLightData value)
{
return value.multiplier;
}
float3 GetSampleDirectionDiscardWS(EnvLightData value)
{
return value.sampleDirectionDiscardWS;
}
int GetEnvIndex(EnvLightData value)
{

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs


public int pointCookieSize = 128;
public int cubeCookieTexArraySize = 16;
public int reflectionProbeCacheSize = 4;
public int reflectionProbeCacheSize = 2;
public int maxPlanarReflectionProbes = 128;
public SkyResolution skyReflectionSize = SkyResolution.SkyResolution256;
public LayerMask skyLightingOverrideLayerMask = 0;
}

78
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


m_lightList = new LightList();
m_lightList.Allocate();
m_Env2DCaptureVP.Clear();
for (int i = 0, c = Mathf.Max(1, hdAsset.renderPipelineSettings.lightLoopSettings.maxPlanarReflectionProbes); i < c; ++i)
for (int i = 0, c = Mathf.Max(1, hdAsset.renderPipelineSettings.lightLoopSettings.planarReflectionProbeCacheSize); i < c; ++i)
m_Env2DCaptureVP.Add(Matrix4x4.identity);
m_DirectionalLightDatas = new ComputeBuffer(k_MaxDirectionalLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DirectionalLightData)));

var capturePosition = Vector3.zero;
var influenceToWorld = probe.influenceToWorld;
var sampleDirectionDiscardWS = Vector3.zero;
// 31 bits index, 1 bit cache type
var envIndex = -1;
if (probe.planarReflectionProbe != null)

// We transform it to object space by translating the capturePosition
var vp = gpuProj * gpuView * Matrix4x4.Translate(capturePosition);
m_Env2DCaptureVP[fetchIndex] = vp;
sampleDirectionDiscardWS = captureRotation * Vector3.forward;
}
else if (probe.reflectionProbe != null)
{

envLightData.blendDistanceNegative = probe.blendDistanceNegative;
envLightData.boxSideFadePositive = probe.boxSideFadePositive;
envLightData.boxSideFadeNegative = probe.boxSideFadeNegative;
envLightData.sampleDirectionDiscardWS = sampleDirectionDiscardWS;
envLightData.influenceRight = influenceToWorld.GetColumn(0).normalized;
envLightData.influenceUp = influenceToWorld.GetColumn(1).normalized;

public void UpdateCullingParameters(ref ScriptableCullingParameters cullingParams)
{
m_ShadowMgr.UpdateCullingParameters( ref cullingParams );
// In HDRP we don't need per object light/probe info so we disable the native code that handles it.
#if UNITY_2018_2_OR_NEWER
cullingParams.cullingFlags |= CullFlag.DisablePerObjectCulling;
#endif
}
public bool IsBakedShadowMaskLight(Light light)

var stereoEnabled = m_FrameSettings.enableStereo;
Vector3 camPosWS = camera.transform.position;
var worldToView = WorldToCamera(camera);
var rightEyeWorldToView = Matrix4x4.identity;
if (stereoEnabled)
{
worldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Left);
rightEyeWorldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Right);
}
// Note: Light with null intensity/Color are culled by the C++, no need to test it here
if (cullResults.visibleLights.Count != 0 || cullResults.visibleReflectionProbes.Count != 0)
{

// 2. Go through all lights, convert them to GPU format.
// Simultaneously create data for culling (LightVolumeData and SFiniteLightBound)
Vector3 camPosWS = camera.transform.position;
var worldToView = WorldToCamera(camera);
var rightEyeWorldToView = Matrix4x4.identity;
if (stereoEnabled)
{
worldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Left);
rightEyeWorldToView = WorldToViewStereo(camera, Camera.StereoscopicEye.Right);
}
for (int sortIndex = 0; sortIndex < sortCount; ++sortIndex)
{

}
}
// Inject density volumes into the clustered data structure for efficient look up.
m_densityVolumeCount = densityVolumes.bounds != null ? densityVolumes.bounds.Count : 0;
Matrix4x4 worldToViewCR = worldToView;
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// The OBBs are camera-relative, the matrix is not. Fix it.
worldToViewCR.SetColumn(3, new Vector4(0, 0, 0, 1));
}
for (int i = 0, n = m_densityVolumeCount; i < n; i++)
{
// Density volumes are not lights and therefore should not affect light classification.
LightFeatureFlags featureFlags = 0;
AddBoxVolumeDataAndBound(densityVolumes.bounds[i], LightCategory.DensityVolume, featureFlags, worldToViewCR);
}
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count + m_densityVolumeCount;
Debug.Assert(m_lightCount == m_lightList.bounds.Count);
Debug.Assert(m_lightCount == m_lightList.lightVolumes.Count);
int decalDatasCount = Math.Min(DecalSystem.m_DecalDatasCount, k_MaxDecalsOnScreen);
if (decalDatasCount > 0)

m_lightCount += decalDatasCount;
}
// Inject density volumes into the clustered data structure for efficient look up.
m_densityVolumeCount = densityVolumes.bounds != null ? densityVolumes.bounds.Count : 0;
Matrix4x4 worldToViewCR = worldToView;
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
// The OBBs are camera-relative, the matrix is not. Fix it.
worldToViewCR.SetColumn(3, new Vector4(0, 0, 0, 1));
}
for (int i = 0, n = m_densityVolumeCount; i < n; i++)
{
// Density volumes are not lights and therefore should not affect light classification.
LightFeatureFlags featureFlags = 0;
AddBoxVolumeDataAndBound(densityVolumes.bounds[i], LightCategory.DensityVolume, featureFlags, worldToViewCR);
}
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count + m_densityVolumeCount + decalDatasCount;
Debug.Assert(m_lightCount == m_lightList.bounds.Count);
Debug.Assert(m_lightCount == m_lightList.lightVolumes.Count);
if (stereoEnabled)
{
// TODO: Proper decal + stereo cull management

m_lightList.bounds.AddRange(m_lightList.rightEyeBounds);
m_lightList.lightVolumes.AddRange(m_lightList.rightEyeLightVolumes);
}
UpdateDataBuffers();
return m_enableBakeShadowMask;

// XRTODO: If possible, we could generate a non-oblique stereo projection
// matrix. It's ok if it's not the exact same matrix, as long as it encompasses
// the same FOV as the original projection matrix (which would mean padding each half
// of the frustum with the max half-angle). We don't need the light information in
// of the frustum with the max half-angle). We don't need the light information in
// real projection space. We just use screen space to figure out what is proximal
// to a cluster or tile.
// Once we generate this non-oblique projection matrix, it can be shared across both eyes (un-array)

public bool outputSplitLighting;
}
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandleSystem.RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
{
if (m_CurrentSunLight == null || m_CurrentSunLight.GetComponent<AdditionalShadowData>() == null || m_CurrentSunLightShadowIndex < 0)
{

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightUtils.cs


//radiance = power / (length * (4 * Pi)).
return intensity / (4.0f * Mathf.PI * lineWidth);
}
public static void CalculateAnglesForPyramid(float aspectRatio, float spotAngle, out float angleA, out float angleB)
{
// Since the smallest angles is = to the fov, and we don't care of the angle order, simply make sure the aspect ratio is > 1
if (aspectRatio < 1.0f)
aspectRatio = 1.0f / aspectRatio;
angleA = spotAngle * Mathf.Deg2Rad;
var halfAngle = angleA * 0.5f; // half of the smallest angle
var length = Mathf.Tan(halfAngle); // half length of the smallest side of the rectangle
length *= aspectRatio; // half length of the bigest side of the rectangle
halfAngle = Mathf.Atan(length); // half of the bigest angle
angleB = halfAngle * 2.0f;
}
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


private void OnEnable()
{
DensityVolumeManager.manager.RegisterVolume(this);
DensityVolumeManager.manager.DeRegisterVolume(this);
}
private void Update()

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute


float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_linear_clamp_sampler),
centerWS,
_PrevViewProjMatrix,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
_VBufferPrevResolution,
_VBufferPrevSliceCount.xy,
_VBufferPrevDepthEncodingParams,
_VBufferPrevDepthDecodingParams,
false, false, true);
// Compute the exponential moving average over 'n' frames:

// Store the feedback for the voxel.
// TODO: dynamic lights (which update their position, rotation, cookie or shadow at runtime)
// do not support reprojection and should neither read nor write to the history buffer.
// to the history buffer. This will cause them to alias, but it is the only way
// to prevent ghosting.
// This will cause them to alias, but it is the only way to prevent ghosting.
_VBufferLightingFeedback[voxelCoord] = float4(blendedRadiance, centerTransmInt);

360
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


[Serializable]
public struct DensityVolumeParameters
{
public Color albedo; // Single scattering albedo [0, 1]. Alpha is ignored
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public Color albedo; // Single scattering albedo: [0, 1]. Alpha is ignored
public float meanFreePath; // In meters: [1, 1000000]. Should be chromatic - this is an optimization!
public float asymmetry; // Controls the phase function: [-1, 1]
public void Constrain()
{

Normal,
Ultra,
Count
}
class VBuffer
} // enum VolumetricLightingPreset
[Serializable]
public struct ControllerParameters
{
public float vBufferNearPlane; // Distance in meters
public float vBufferFarPlane; // Distance in meters
public float depthSliceDistributionUniformity; // Controls the exponential depth distribution: [0, 1]
} // struct ControllerParameters
public class VBuffer
public struct Parameters
{
public Vector4 resolution;
public Vector2 sliceCount;
public Vector4 depthEncodingParams;
public Vector4 depthDecodingParams;
public Parameters(int w, int h, int d, ControllerParameters controlParams)
{
resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
sliceCount = new Vector2(d, 1.0f / d);
depthEncodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
depthDecodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
Update(controlParams);
}
public void Update(ControllerParameters controlParams)
{
float n = controlParams.vBufferNearPlane;
float f = controlParams.vBufferFarPlane;
float c = 2 - 2 * controlParams.depthSliceDistributionUniformity; // remap [0, 1] -> [2, 0]
depthEncodingParams = ComputeLogarithmicDepthEncodingParams(n, f, c);
depthDecodingParams = ComputeLogarithmicDepthDecodingParams(n, f, c);
}
} // struct Parameters
const int k_NumFrames = 2; // Double-buffer history and feedback
const int k_NumBuffers = 4; // See the list below
long m_ViewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
long m_ViewID = -1; // (m_ViewID > 0) if valid
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
Parameters[] m_Params = null; // For the current and the previous frame
public long GetViewID()
{
return m_ViewID;
}
public bool IsValid()
{
return m_ViewID > 0 && m_Textures != null && m_Textures[0] != null;
}
public Parameters GetParameters(uint frameIndex)
{
return m_Params[frameIndex & 1];
}
public void SetParameters(Parameters parameters, uint frameIndex)
{
m_Params[frameIndex & 1] = parameters;
}
Debug.Assert(m_ViewID >= 0);
Debug.Assert(IsValid());
Debug.Assert(m_ViewID >= 0);
Debug.Assert(IsValid());
public RenderTargetIdentifier GetLightingHistoryBuffer() // From the previous frame
public RenderTargetIdentifier GetLightingHistoryBuffer(uint frameIndex) // From the previous frame
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexHistory + (Time.renderedFrameCount & 1)];
Debug.Assert(IsValid());
return m_Identifiers[k_IndexHistory + (frameIndex & 1)];
public RenderTargetIdentifier GetLightingFeedbackBuffer() // For the next frame
public RenderTargetIdentifier GetLightingFeedbackBuffer(uint frameIndex) // For the next frame
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexFeedback - (Time.renderedFrameCount & 1)];
Debug.Assert(IsValid());
return m_Identifiers[k_IndexFeedback - (frameIndex & 1)];
public void Create(long viewID, int w, int h, int d)
public void Create(long viewID, int w, int h, int d, ControllerParameters controlParams)
Debug.Assert(viewID >= 0);
Debug.Assert(viewID > 0);
// Only Game Views need history and feedback buffers.
bool isGameView = viewID > 0;
int n = isGameView ? 4 : 2;
m_Textures = new RenderTexture[n];
m_Identifiers = new RenderTargetIdentifier[n];
m_Textures = new RenderTexture[k_NumBuffers];
m_Identifiers = new RenderTargetIdentifier[k_NumBuffers];
m_Params = new Parameters[k_NumFrames];
for (int i = 0; i < n; i++)
for (int i = 0; i < k_NumBuffers; i++)
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i].hideFlags = HideFlags.HideAndDontSave;
m_Textures[i].filterMode = FilterMode.Trilinear; // Custom
m_Textures[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout

m_Identifiers[i] = new RenderTargetIdentifier(m_Textures[i]);
}
// Start with the same parameters for both frames. Then incrementally update them.
Parameters parameters = new Parameters(w, h, d, controlParams);
m_Params[0] = parameters;
m_Params[1] = parameters;
}
public void Destroy()

for (int i = 0, n = m_Textures.Length; i < n; i++)
for (int i = 0; i < k_NumBuffers; i++)
{
if (m_Textures[i] != null)
{

m_ViewID = -1;
m_Textures = null;
m_Identifiers = null;
}
public void GetResolution(ref int w, ref int h, ref int d)
{
Debug.Assert(m_Textures != null);
Debug.Assert(m_Textures[0] != null);
Debug.Assert(m_Identifiers != null);
w = m_Textures[0].width;
h = m_Textures[0].height;
d = m_Textures[0].volumeDepth;
}
public long GetViewID()
{
return m_ViewID;
}
public bool IsValid()
{
return m_ViewID >= 0 && m_Textures != null && m_Textures[0] != null;
m_Params = null;
ComputeShader m_VolumeVoxelizationCS = null;
ComputeShader m_VolumetricLightingCS = null;
static ComputeShader m_VolumeVoxelizationCS = null;
static ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
static ComputeBuffer s_VisibleVolumeBoundsBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f; // Tweak constant, controls the logarithmic depth distribution
static ComputeBuffer s_VisibleVolumeBoundsBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
public void Build(HDRenderPipelineAsset asset)
{

CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)
public void ResizeVBufferAndUpdateProperties(HDCamera camera, uint frameIndex)
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return;
long viewID = camera.GetViewID();
var controller = camera.camera.GetComponent<VolumetricLightingController>();
Debug.Assert(viewID >= 0);
if (camera.camera.cameraType == CameraType.SceneView)
{
// HACK: since it's not possible to add a component to a scene camera,
// we take one from the "main" camera (if present).
Camera mainCamera = Camera.main;
if (mainCamera != null)
{
controller = mainCamera.GetComponent<VolumetricLightingController>();
}
}
if (controller == null) return;
int screenWidth = (int)camera.screenSize.x;
int screenHeight = (int)camera.screenSize.y;
long viewID = camera.GetViewID();
Debug.Assert(viewID > 0);
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(preset, screenWidth, screenHeight, ref w, ref h, ref d);

if (vBuffer != null)
{
int width = 0, height = 0, depth = 0;
vBuffer.GetResolution(ref width, ref height, ref depth);
VBuffer.Parameters frameParams = vBuffer.GetParameters(frameIndex);
if (w == width && h == height && d == depth)
if (w == frameParams.resolution.x &&
h == frameParams.resolution.y &&
d == frameParams.sliceCount.x)
// Everything matches, nothing to do here.
// The resolution matches.
// Depth parameters may have changed, so update those.
frameParams.Update(controller.parameters);
vBuffer.SetParameters(frameParams, frameIndex);
return;
}
}

m_VBuffers.Add(vBuffer);
}
vBuffer.Create(viewID, w, h, d);
vBuffer.Create(viewID, w, h, d, controller.parameters);
Debug.Assert(viewID >= 0);
Debug.Assert(viewID > 0);
VBuffer vBuffer = null;

// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
// the VBuffer can potentially extend past the boundaries of the viewport.
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
// Note: for performance reasons, scale is unused (implicitly 1). The error is typically under 1%.
// Note: for performance reasons, the scale is unused (implicitly 1). The error is typically under 1%.
static Vector2 ComputeVBufferResolutionAndScale(VolumetricLightingPreset preset,
int screenWidth, int screenHeight,
ref int w, ref int h, ref int d)

float n = nearPlane;
float f = farPlane;
depthParams.x = Mathf.Log(c, 2) * (1.0f / Mathf.Log(c * (f - n) + 1, 2));
c = Mathf.Max(c, 0.001f); // Avoid NaNs
depthParams.x = Mathf.Log(c, 2) * depthParams.y;
depthParams.z = n - 1.0f / c; // Same
depthParams.w = 0.0f;

float n = nearPlane;
float f = farPlane;
c = Mathf.Max(c, 0.001f); // Avoid NaNs
depthParams.y = c * (f - n) + 1;
depthParams.y = Mathf.Log(c * (f - n) + 1, 2);
depthParams.z = n - 1.0f / c; // Same
depthParams.w = 0.0f;

return (1.0f / (4.0f * Mathf.PI)) * 1.5f * (1.0f - g * g) / (2.0f + g * g);
}
public void PushGlobalParams(HDCamera camera, CommandBuffer cmd)
public void PushGlobalParams(HDCamera camera, CommandBuffer cmd, uint frameIndex)
// Modify the near plane.
// Warning: it can screw up the reprojection. However, we have to do it in order for clustered lighting to work correctly.
m_VBufferNearPlane = camera.camera.nearClipPlane;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
// VisualEnvironment sets global fog parameters: _GlobalAsymmetry, _GlobalScattering, _GlobalExtinction.
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null)
{
// Set the neutral black texture.
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, CoreUtils.blackVolumeTexture);
return;
}
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();

cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthDecodingParams, ComputeLogarithmicDepthDecodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, vBuffer.GetLightingIntegralBuffer());
var currFrameParams = vBuffer.GetParameters(frameIndex);
var prevFrameParams = vBuffer.GetParameters(frameIndex - 1);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, currFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, currFrameParams.sliceCount);
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, currFrameParams.depthEncodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthDecodingParams, currFrameParams.depthDecodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevResolution, prevFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevSliceCount, prevFrameParams.sliceCount);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, vBuffer.GetLightingIntegralBuffer());
}
public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera camera, CommandBuffer cmd)

if (preset == VolumetricLightingPreset.Off) return densityVolumes;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{

m_VisibleVolumeData.Clear();
// Collect all visible finite volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
HomogeneousDensityVolume[] volumes = DensityVolumeManager.manager.GetAllVolumes();
// Only test active finite volumes.
if (volume.enabled)
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// Handle camera-relative rendering.
obb.center -= camOffset;
// Handle camera-relative rendering.
obb.center -= camOffset;
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var data = volume.parameters.GetData();
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var data = volume.parameters.GetData();
m_VisibleVolumeBounds.Add(obb);
m_VisibleVolumeData.Add(data);
}
m_VisibleVolumeBounds.Add(obb);
m_VisibleVolumeData.Add(data);
}
}

// Fill the struct with pointers in order to share the data with the light loop.
densityVolumes.bounds = m_VisibleVolumeBounds;
densityVolumes.bounds = m_VisibleVolumeBounds;
densityVolumes.density = m_VisibleVolumeData;
return densityVolumes;

public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings)
public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
using (new ProfilingSample(cmd, "Volume Voxelization"))
{
int numVisibleVolumes = m_VisibleVolumeBounds.Count;

// Clear the render target instead of running the shader.
// Note: the clear must take the global fog into account!
// Use the workaround by running the full shader with 0 density.
// Use the workaround by running the full shader with 0 density
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
var frameParams = vBuffer.GetParameters(frameIndex);
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
// Compose the matrix which allows us to compute the world space view direction.
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);

cmd.SetComputeMatrixParam( m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
cmd.SetComputeIntParam( m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
int w = (int)resolution.x;
int h = (int)resolution.y;
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);

return coords;
}
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings)
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric)
{
// Clear the render target instead of running the shader.
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// return;
// Clearing 3D textures does not seem to work!
// Use the workaround by running the full shader with 0 density.
}
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

: "VolumetricLightingBruteforce");
}
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
var frameParams = vBuffer.GetParameters(frameIndex);
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

// | x | x | x | x | x | x | x |
float[] zSeq = {7.0f/14.0f, 3.0f/14.0f, 11.0f/14.0f, 5.0f/14.0f, 9.0f/14.0f, 1.0f/14.0f, 13.0f/14.0f};
int rfc = Time.renderedFrameCount;
int sampleIndex = rfc % 7;
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
int sampleIndex = (int)frameIndex % 7;
// TODO: should we somehow reorder offsets in Z based on the offset in XY? S.t. the samples more evenly cover the domain.
// Currently, we assume that they are completely uncorrelated, but maybe we should correlate them somehow.
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();

cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
if (enableReprojection)
{
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer()); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer(frameIndex)); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer(frameIndex)); // Read
int w = (int)resolution.x;
int h = (int)resolution.y;
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/MRTBufferManager.cs


{
protected int m_BufferCount;
protected RenderTargetIdentifier[] m_RTIDs;
protected RTHandle[] m_RTs;
protected RTHandleSystem.RTHandle[] m_RTs;
protected int[] m_TextureShaderIDs;
public int bufferCount { get { return m_BufferCount; } }

m_BufferCount = maxBufferCount;
m_RTIDs = new RenderTargetIdentifier[maxBufferCount];
m_RTs = new RTHandle[maxBufferCount];
m_RTs = new RTHandleSystem.RTHandle[maxBufferCount];
m_TextureShaderIDs = new int[maxBufferCount];
}

return m_RTIDs;
}
public RTHandle GetBuffer(int index)
public RTHandleSystem.RTHandle GetBuffer(int index)
{
Debug.Assert(index < m_BufferCount);
return m_RTs[index];

{
for (int i = 0; i < m_BufferCount; ++i)
{
RTHandle.Release(m_RTs[i]);
RTHandles.Release(m_RTs[i]);
m_RTs[i] = null;
}
}

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs


{
public int vsibleDecalCount { get; set; }
RTHandle m_HTile;
RTHandleSystem.RTHandle m_HTile;
public DBufferManager()
: base(Decal.GetMaterialDBufferCount())

for (int dbufferIndex = 0; dbufferIndex < m_BufferCount; ++dbufferIndex)
{
m_RTs[dbufferIndex] = RTHandle.Alloc(Vector2.one, colorFormat: rtFormat[dbufferIndex], sRGB: sRGBFlags[dbufferIndex], filterMode: FilterMode.Point, name: string.Format("DBuffer{0}", dbufferIndex));
m_RTs[dbufferIndex] = RTHandles.Alloc(Vector2.one, colorFormat: rtFormat[dbufferIndex], sRGB: sRGBFlags[dbufferIndex], filterMode: FilterMode.Point, name: string.Format("DBuffer{0}", dbufferIndex));
m_HTile = RTHandle.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "DBufferHTile"); // Enable UAV
m_HTile = RTHandles.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "DBufferHTile"); // Enable UAV
RTHandle.Release(m_HTile);
RTHandles.Release(m_HTile);
}
public void ClearTargets(CommandBuffer cmd, HDCamera camera)

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/GBufferManager.cs


for (int gbufferIndex = 0; gbufferIndex < m_GBufferCount; ++gbufferIndex)
{
m_RTs[gbufferIndex] = RTHandle.Alloc(Vector2.one, colorFormat: rtFormat[gbufferIndex], sRGB: sRGBFlags[gbufferIndex], filterMode: FilterMode.Point, name: string.Format("GBuffer{0}", gbufferIndex));
m_RTs[gbufferIndex] = RTHandles.Alloc(Vector2.one, colorFormat: rtFormat[gbufferIndex], sRGB: sRGBFlags[gbufferIndex], filterMode: FilterMode.Point, name: string.Format("GBuffer{0}", gbufferIndex));
m_RTIDs[gbufferIndex] = m_RTs[gbufferIndex].nameID;
m_TextureShaderIDs[gbufferIndex] = HDShaderIDs._GBufferTexture[gbufferIndex];
m_RTIDsNoShadowMask[gbufferIndex] = HDShaderIDs._GBufferTexture[gbufferIndex];

{
m_RTs[m_GBufferCount] = RTHandle.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMaskSRGBFlag(), filterMode: FilterMode.Point, name: "GBufferShadowMask");
m_RTs[m_GBufferCount] = RTHandles.Alloc(Vector2.one, colorFormat: Builtin.GetShadowMaskBufferFormat(), sRGB: Builtin.GetShadowMaskSRGBFlag(), filterMode: FilterMode.Point, name: "GBufferShadowMask");
m_RTIDs[m_GBufferCount] = new RenderTargetIdentifier(m_RTs[m_GBufferCount]);
m_TextureShaderIDs[m_GBufferCount] = HDShaderIDs._ShadowMaskTexture;
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


float roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness);
R = lerp(R, preLightData.iblR, saturate(smoothstep(0, 1, roughness * roughness)));
float3 sampleDirectionDiscardWS = lightData.sampleDirectionDiscardWS;
if (dot(sampleDirectionDiscardWS, R) < 0) // Use by planar reflection to early reject opposite plan reflection, neutral for reflection probe
return lighting;
float3 F = preLightData.specularFGD;
float iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness);

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/PreIntegratedFGD/PreIntegratedFGD.cs


int m_refCounting;
// For image based lighting
Material m_InitPreFGD;
Material m_PreIntegratedFGDMaterial;
RenderTexture m_PreIntegratedFGD;
PreIntegratedFGD()

if (m_refCounting == 0)
{
m_InitPreFGD = CoreUtils.CreateEngineMaterial("Hidden/HDRenderPipeline/PreIntegratedFGD");
var hdrp = GraphicsSettings.renderPipelineAsset as HDRenderPipelineAsset;
m_PreIntegratedFGDMaterial = CoreUtils.CreateEngineMaterial(hdrp.renderPipelineResources.preIntegratedFGD);
m_PreIntegratedFGD = new RenderTexture(128, 128, 0, RenderTextureFormat.ARGB2101010, RenderTextureReadWrite.Linear);
m_PreIntegratedFGD.hideFlags = HideFlags.HideAndDontSave;

if (m_isInit)
return;
using (new ProfilingSample(cmd, "Init PreFGD"))
using (new ProfilingSample(cmd, "PreIntegratedFGD Material Generation"))
CoreUtils.DrawFullScreen(cmd, m_InitPreFGD, new RenderTargetIdentifier(m_PreIntegratedFGD));
CoreUtils.DrawFullScreen(cmd, m_PreIntegratedFGDMaterial, new RenderTargetIdentifier(m_PreIntegratedFGD));
}
m_isInit = true;

if (m_refCounting == 0)
{
CoreUtils.Destroy(m_InitPreFGD);
CoreUtils.Destroy(m_PreIntegratedFGDMaterial);
CoreUtils.Destroy(m_PreIntegratedFGD);
m_isInit = false;

24
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl


//#include "../SubsurfaceScattering/SubsurfaceScattering.hlsl"
//#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
//NEWLITTODO : wireup CBUFFERs for ambientocclusion, and other uniforms and samplers used:
//NEWLITTODO : wireup CBUFFERs for ambientocclusion, and other uniforms and samplers used:
//
// We need this for AO, Depth/Color pyramids, LTC lights data, FGD pre-integrated data.
//

// This function is similar to ApplyDebugToSurfaceData but for BSDFData
//
// NOTE:
// NOTE:
// This will be available and used in ShaderPassForward.hlsl since in StackLit.shader,
// just before including the core code of the pass (ShaderPassForward.hlsl) we include
// Material.hlsl (or Lighting.hlsl which includes it) which in turn includes us,
// This will be available and used in ShaderPassForward.hlsl since in StackLit.shader,
// just before including the core code of the pass (ShaderPassForward.hlsl) we include
// Material.hlsl (or Lighting.hlsl which includes it) which in turn includes us,
// StackLit.shader, via the #if defined(UNITY_MATERIAL_*) glue mechanism.
//
void ApplyDebugToBSDFData(inout BSDFData bsdfData)

// this can be use also in case of debug lighting mode like specular only
//NEWLITTODO
//bool overrideSpecularColor = _DebugLightingSpecularColor.x != 0.0;

//-----------------------------------------------------------------------------
// PreLightData
//
// PreLightData
//
// Make sure we respect naming conventions to reuse ShaderPassForward as is,
// ie struct (even if opaque to the ShaderPassForward) name is PreLightData,
// GetPreLightData prototype.

float NdotL = dot(N, L);
//float LdotV = dot(L, V);
// color and attenuation are outputted by EvaluateLight:
// color and attenuation are outputted by EvaluateLight:
float3 color;
float attenuation;
EvaluateLight_Directional(lightLoopContext, posInput, lightData, bakeLightingData, N, L, color, attenuation);

return lighting;
}
// NEWLITTODO: For a refence rendering option for area light, like LIT_DISPLAY_REFERENCE_AREA option in eg EvaluateBSDF_<area light type> :
// NEWLITTODO: For a refence rendering option for area light, like LIT_DISPLAY_REFERENCE_AREA option in eg EvaluateBSDF_<area light type> :
//#include "LitReference.hlsl"
//-----------------------------------------------------------------------------

// Steps are:
// -Calculate influence weights from intersection with the proxies.
// Since the weights are influence blending weights, we can correctly
// Since the weights are influence blending weights, we can correctly
// use our lobe weight and mix them.
// -Fudge the sampling direction to dampen boundary artefacts.
// -Do early discard for planar reflections.

// (second part of the split-sum approx.,
// and common to all Env. Lights. using the same BSDF and
// we only have GGX thus only one FGD map for now)
// -Multiply the two split sum terms together for each lobe
// -Multiply the two split sum terms together for each lobe
// and lerp them and/or add them.
// Note: using influenceShapeType and projectionShapeType instead of (lightData|proxyData).shapeType allow to make compiler optimization in case the type is know (like for sky)

109
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader


// Reminder. Color here are in linear but the UI (color picker) do the conversion sRGB to linear
// Be careful, do not change the name here to _Color. It will conflict with the "fake" parameters (see end of properties) required for GI.
_BaseColor("BaseColor", Color) = (1,1,1,1)
_BaseColorMap("BaseColorMap", 2D) = "white" {}
_BaseColorMap("BaseColor Map", 2D) = "white" {}
[HideInInspector] _BaseColorMapShow("BaseColor Map Show", Float) = 0
_BaseColorMapUV("BaseColor Map UV", Float) = 0.0
_BaseColorMapUVLocal("BaseColorMap UV Local", Float) = 0.0
[HideInInspector] _MetallicMapShow("Metallic Map Show", Float) = 0
_MetallicMap("Metallic Map", 2D) = "black" {}
_MetallicUseMap("Metallic Use Map", Float) = 0
_MetallicMapUV("Metallic Map UV", Float) = 0.0
_MetallicMapUVLocal("Metallic Map UV Local", Float) = 0.0
_MetallicMapChannel("Metallic Map Channel", Float) = 0.0
_MetallicMapChannelMask("Metallic Map Channel Mask", Vector) = (1, 0, 0, 0)
_MetallicRemap("Metallic Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _MetallicRemapInverted("Invert Metallic Remap", Float) = 0.0
[HideInInspector] _MetallicRange("Metallic Range", Vector) = (0, 1, 0, 0)
[HideInInspector] _SmoothnessAMapShow("SmoothnessA Map Show", Float) = 0
_SmoothnessARemapMin("SmoothnessARemapMin", Float) = 0.0
_SmoothnessARemapMax("SmoothnessARemapMax", Float) = 1.0
_SmoothnessAMap("SmoothnessA Map", 2D) = "white" {}
_SmoothnessAUseMap("SmoothnessA Use Map", Float) = 0
_SmoothnessAMapUV("SmoothnessA Map UV", Float) = 0.0
_SmoothnessAMapUVLocal("_SmoothnessA Map UV Local", Float) = 0.0
_SmoothnessAMapChannel("SmoothnessA Map Channel", Float) = 0.0
_SmoothnessAMapChannelMask("SmoothnessA Map Channel Mask", Vector) = (1, 0, 0, 0)
_SmoothnessARemap("SmoothnessA Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _SmoothnessARemapInverted("Invert SmoothnessA Remap", Float) = 0.0
[HideInInspector] _SmoothnessARange("SmoothnessA Range", Vector) = (0, 1, 0, 0)
[HideInInspector] _SmoothnessBMapShow("SmoothnessB Map Show", Float) = 0
_SmoothnessBRemapMin("SmoothnessBRemapMin", Float) = 0.0
_SmoothnessBRemapMax("SmoothnessBRemapMax", Float) = 1.0
_LobeMix("lobeMix", Range(0.0, 1.0)) = 0
_MaskMapA("MaskMapA", 2D) = "white" {}
_MaskMapB("MaskMapB", 2D) = "white" {}
_SmoothnessBMap("SmoothnessB Map", 2D) = "white" {}
_SmoothnessBUseMap("SmoothnessB Use Map", Float) = 0
_SmoothnessBMapUV("SmoothnessB Map UV", Float) = 0.0
_SmoothnessAMapUVLocal("_SmoothnessB Map UV Local", Float) = 0.0
_SmoothnessBMapChannel("SmoothnessB Map Channel", Float) = 0.0
_SmoothnessBMapChannelMask("SmoothnessB Map Channel Mask", Vector) = (1, 0, 0, 0)
_SmoothnessBRemap("SmoothnessB Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _SmoothnessBRemapInverted("Invert SmoothnessB Remap", Float) = 0.0
[HideInInspector] _SmoothnessBRange("SmoothnessB Range", Vector) = (0, 1, 0, 0)
_LobeMix("Lobe Mix", Range(0.0, 1.0)) = 0
// TODO: TangentMap, AnisotropyMap and CoatIorMap (SmoothnessMap ?)

_CoatThickness("Coat Thickness", Range(0.0, 0.99)) = 0.0
_CoatExtinction("Coat Extinction Coefficient", Color) = (1,1,1,1) // in thickness^-1 units
[HideInInspector] _NormalMapShow("NormalMap Show", Float) = 0.0
_NormalScale("_NormalScale", Range(0.0, 2.0)) = 1
_NormalMapUV("NormalMapUV", Float) = 0.0
_NormalScale("Normal Scale", Range(0.0, 2.0)) = 1
[Enum(UV0, 0, UV1, 1, UV2, 2, UV3, 3, Planar, 4, Triplanar, 5)] _UVBase("UV Set for base", Float) = 0
[HideInInspector] _UVMappingMask("_UVMappingMask", Color) = (1, 0, 0, 0)
[HideInInspector] _EmissiveColorMapShow("Emissive Color Map Show", Float) = 0.0
_EmissiveColor("Emissive Color", Color) = (1, 1, 1)
_EmissiveColorMap("Emissive Color Map", 2D) = "white" {}
_EmissiveColorMapUV("Emissive Color Map UV", Range(0.0, 1.0)) = 0
_EmissiveColorMapUVLocal("Emissive Color Map UV Local", Float) = 0.0
_EmissiveIntensity("Emissive Intensity", Float) = 0
[ToggleUI] _AlbedoAffectEmissive("Albedo Affect Emissive", Float) = 0.0
[HideInInspector] _SubsurfaceMaskMapShow("Subsurface Mask Map Show", Float) = 0
_SubsurfaceMask("Subsurface Mask", Range(0.0, 1.0)) = 1.0
_SubsurfaceMaskMap("Subsurface Mask Map", 2D) = "black" {}
_SubsurfaceMaskUSeMap("Subsurface Mask Use Map", Float) = 0
_SubsurfaceMaskMapUV("Subsurface Mask Map UV", Float) = 0.0
_SubsurfaceMaskMapChannel("Subsurface Mask Map Channel", Float) = 0.0
_SubsurfaceMaskMapChannelMask("Subsurface Mask Map Channel Mask", Vector) = (1, 0, 0, 0)
_SubsurfaceMaskRemap("Subsurface Mask Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _SubsurfaceMaskRemapInverted("Invert Subsurface Mask Remap", Float) = 0.0
[HideInInspector] _SubsurfaceMaskRange("Subsurface Mask Range", Vector) = (0, 1, 0, 0)
_EmissiveColor("EmissiveColor", Color) = (1, 1, 1)
_EmissiveColorMap("EmissiveColorMap", 2D) = "white" {}
_EmissiveIntensity("EmissiveIntensity", Float) = 0
[ToggleUI] _AlbedoAffectEmissive("Albedo Affect Emissive", Float) = 0.0
[HideInInspector] _ThicknessMapShow("Thickness Show", Float) = 0
_Thickness("Thickness", Range(0.0, 1.0)) = 1.0
_ThicknessMap("Thickness Map", 2D) = "black" {}
_ThicknessUseMap("Thickness Use Map", Float) = 0
_ThicknessMapUV("Thickness Map UV", Float) = 0.0
_ThicknessMapChannel("Thickness Map Channel", Float) = 0.0
_ThicknessMapChannelMask("Thickness Map Channel Mask", Vector) = (1, 0, 0, 0)
_ThicknessRemap("Thickness Remap", Vector) = (0, 1, 0, 0)
[ToggleUI] _ThicknessRemapInverted("Invert Thickness Remap", Float) = 0.0
[HideInInspector] _ThicknessRange("Thickness Range", Vector) = (0, 1, 0, 0)
_DistortionVectorMap("DistortionVectorMap", 2D) = "black" {}
[ToggleUI] _DistortionEnable("Enable Distortion", Float) = 0.0

_AlphaCutoff("Alpha Cutoff", Range(0.0, 1.0)) = 0.5
_TransparentSortPriority("_TransparentSortPriority", Float) = 0
// Stencil state
[HideInInspector] _StencilRef("_StencilRef", Int) = 2 // StencilLightingUsage.RegularLighting (fixed at compile time)
[HideInInspector] _StencilWriteMask("_StencilWriteMask", Int) = 7 // StencilMask.Lighting (fixed at compile time)

[ToggleUI] _EnableFogOnTransparent("Enable Fog", Float) = 1.0
[ToggleUI] _EnableBlendModePreserveSpecularLighting("Enable Blend Mode Preserve Specular Lighting", Float) = 1.0
// Sections show values.
[HideInInspector] _StandardShow("_StandardShow", Float) = 0.0
[HideInInspector] _EmissiveShow("_EmissiveShow", Float) = 0.0
[HideInInspector] _CoatShow("_CoatShow", Float) = 0.0
[HideInInspector] _SSSShow("_SSSShow", Float) = 0.0
[HideInInspector] _Lobe2Show("_Lobe2Show", Float) = 0.0
[HideInInspector] _AnisotropyShow("_AnisotropyShow", Float) = 0.0
[HideInInspector] _TransmissionShow("_TransmissionShow", Float) = 0.0
[HideInInspector] _IridescenceShow("_IridescenceShow", Float) = 0.0
[HideInInspector] _GlintShow("_GlintShow", Float) = 0.0
// Caution: C# code in BaseLitUI.cs call LightmapEmissionFlagsProperty() which assume that there is an existing "_EmissionColor"
// value that exist to identify if the GI emission need to be enabled.
// In our case we don't use such a mechanism but need to keep the code quiet. We declare the value and always enable it.

#pragma shader_feature _DOUBLESIDED_ON
#pragma shader_feature _NORMALMAP_TANGENT_SPACE
#pragma shader_feature _ _REQUIRE_UV2 _REQUIRE_UV3
// ...TODO: for surface gradient framework eg see litdata.hlsl,
// but we need it right away for toggle with LayerTexCoord mapping so we might need them
#pragma shader_feature _USE_UV2
#pragma shader_feature _USE_UV3
#pragma shader_feature _USE_TRIPLANAR
// ...TODO: for surface gradient framework eg see litdata.hlsl,
// but we need it right away for toggle with LayerTexCoord mapping so we might need them
// from the Frag input right away. See also ShaderPass/StackLitSharePass.hlsl.
#pragma shader_feature _NORMALMAP

ZWrite [_ZWrite]
Cull [_CullModeForward]
//
// NOTE: For _CullModeForward, see BaseLitUI and the handling of TransparentBackfaceEnable:
// NOTE: For _CullModeForward, see BaseLitUI and the handling of TransparentBackfaceEnable:
// Basically, we need to use it to support a TransparentBackface pass before this pass
// (and it should be placed just before this one) for separate backface and frontface rendering,
// eg for "hair shader style" approximate sorting, see eg Thorsten Scheuermann writeups on this:

//
// See Lit.shader and the order of the passes after a DistortionVectors, we have:
// See Lit.shader and the order of the passes after a DistortionVectors, we have:
// TransparentDepthPrepass, TransparentBackface, Forward, TransparentDepthPostpass
HLSLPROGRAM

300
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl


#include "CoreRP/ShaderLibrary/Sampling/SampleUVMapping.hlsl"
#include "../MaterialUtilities.hlsl"
//-----------------------------------------------------------------------------
// Texture Mapping (think of LayerTexCoord as simply TexCoordMappings,
// ie no more layers here - cf Lit materials)
//-----------------------------------------------------------------------------
////-----------------------------------------------------------------------------
//// Texture Mapping (think of LayerTexCoord as simply TexCoordMappings,
//// ie no more layers here - cf Lit materials)
////-----------------------------------------------------------------------------
// For easier copying of code for now use a LayerTexCoord wrapping struct.
// We don't have details yet.
////
//// For easier copying of code for now use a LayerTexCoord wrapping struct.
//// We don't have details yet.
////
//// NEWLITTODO: Eventually, we could quickly share GetBuiltinData of LitBuiltinData.hlsl
//// in our GetSurfaceAndBuiltinData( ) here, since we will use the LayerTexCoord identifier,
//// and an identical ComputeLayerTexCoord( ) prototype
////
//struct LayerTexCoord
//{
// UVMapping base;
// NEWLITTODO: Eventually, we could quickly share GetBuiltinData of LitBuiltinData.hlsl
// in our GetSurfaceAndBuiltinData( ) here, since we will use the LayerTexCoord identifier,
// and an identical ComputeLayerTexCoord( ) prototype
// // Store information that will be share by all UVMapping
// float3 vertexNormalWS; // TODO: store also object normal map for object triplanar
//};
struct LayerTexCoord
{
UVMapping base;
UVMapping details;
//// Want to use only one sampler for normalmap/bentnormalmap either we use OS or TS. And either we have normal map or bent normal or both.
////
//// Note (compared to Lit shader):
////
//// We don't have a layered material with which we are sharing code here like the LayeredLit shader, but we can also save a couple of
//// samplers later if we use bentnormals.
////
//// _IDX suffix is meaningless here, could use the name SAMPLER_NORMALMAP_ID instead of SAMPLER_NORMALMAP_IDX and replace all
//// indirect #ifdef _NORMALMAP_TANGENT_SPACE_IDX #ifdef and _NORMALMAP_IDX tests with the more direct
//// shader_feature keywords _NORMALMAP_TANGENT_SPACE and _NORMALMAP.
////
//// (Originally in the LayeredLit shader, shader_feature keywords like _NORMALMAP become _NORMALMAP0 but since files are shared,
//// LitDataIndividualLayer will use a generic _NORMALMAP_IDX defined before its inclusion by the client LitData or LayeredLitData.
//// That way, LitDataIndividualLayer supports multiple inclusions)
// Store information that will be share by all UVMapping
float3 vertexNormalWS; // TODO: store also object normal map for object triplanar
};
// Want to use only one sampler for normalmap/bentnormalmap either we use OS or TS. And either we have normal map or bent normal or both.
//
// Note (compared to Lit shader):
//
// We don't have a layered material with which we are sharing code here like the LayeredLit shader, but we can also save a couple of
// samplers later if we use bentnormals.
//
// _IDX suffix is meaningless here, could use the name SAMPLER_NORMALMAP_ID instead of SAMPLER_NORMALMAP_IDX and replace all
// indirect #ifdef _NORMALMAP_TANGENT_SPACE_IDX #ifdef and _NORMALMAP_IDX tests with the more direct
// shader_feature keywords _NORMALMAP_TANGENT_SPACE and _NORMALMAP.
//
// (Originally in the LayeredLit shader, shader_feature keywords like _NORMALMAP become _NORMALMAP0 but since files are shared,
// LitDataIndividualLayer will use a generic _NORMALMAP_IDX defined before its inclusion by the client LitData or LayeredLitData.
// That way, LitDataIndividualLayer supports multiple inclusions)
//
//
#ifdef _NORMALMAP_TANGENT_SPACE
#if defined(_NORMALMAP)
#define SAMPLER_NORMALMAP_ID sampler_NormalMap

//#endif
#endif
void ComputeLayerTexCoord( // Uv related parameters
float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3, float4 uvMappingMask,
// scale and bias for base
float2 texScale, float2 texBias,
// mapping type and output
int mappingType, inout LayerTexCoord layerTexCoord)
{
//TODO: Planar, Triplanar, detail map, surface_gradient.
// Handle uv0, uv1, uv2, uv3 based on _UVMappingMask weight (exclusif 0..1)
float2 uvBase = uvMappingMask.x * texCoord0 +
uvMappingMask.y * texCoord1 +
uvMappingMask.z * texCoord2 +
uvMappingMask.w * texCoord3;
// Copy data in uvmapping fields: used by generic sampling code (see especially SampleUVMappingNormalInternal.hlsl)
layerTexCoord.base.mappingType = mappingType;
layerTexCoord.base.normalWS = layerTexCoord.vertexNormalWS;
// Apply tiling options
layerTexCoord.base.uv = uvBase * texScale + texBias;
}
float3 GetNormalTS(FragInputs input, LayerTexCoord layerTexCoord, float3 detailNormalTS, float detailMask)
float3 GetNormalTS(FragInputs input, float2 texCoord, float3 detailNormalTS, float detailMask)
{
// TODO: different spaces (eg #ifdef _NORMALMAP_TANGENT_SPACE #elif object space, SURFACE_GRADIENT, etc.)
// and use detail map

// Note we don't use the _NORMALMAP_IDX mechanism of the Lit shader, since we don't have "layers", we can
// Note we don't use the _NORMALMAP_IDX mechanism of the Lit shader, since we don't have "layers", we can
normalTS = SAMPLE_UVMAPPING_NORMALMAP(_NormalMap, SAMPLER_NORMALMAP_ID, layerTexCoord.base, _NormalScale);
normalTS = float3(0.0, 0.0, 1.0); //normalTS = SAMPLE_UVMAPPING_NORMALMAP(_NormalMap, SAMPLER_NORMALMAP_ID, texCoord, _NormalScale);
#else
normalTS = float3(0.0, 0.0, 1.0);
#endif

// This maybe call directly by tessellation (domain) shader, thus all part regarding surface gradient must be done
// in function with FragInputs input as parameters
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3,
float3 positionWS, float3 vertexNormalWS, inout LayerTexCoord layerTexCoord)
//-----------------------------------------------------------------------------
// Texture Mapping
//-----------------------------------------------------------------------------
#define TEXCOORD_INDEX_UV0 (0)
#define TEXCOORD_INDEX_UV1 (1)
#define TEXCOORD_INDEX_UV2 (2)
#define TEXCOORD_INDEX_UV3 (3)
#define TEXCOORD_INDEX_PLANAR_XY (4)
#define TEXCOORD_INDEX_PLANAR_YZ (5)
#define TEXCOORD_INDEX_PLANAR_ZX (6)
#define TEXCOORD_INDEX_TRIPLANAR (7)
#define TEXCOORD_INDEX_COUNT (TEXCOORD_INDEX_TRIPLANAR) // Triplanar is not consider as having mapping
struct TextureUVMapping
layerTexCoord.vertexNormalWS = vertexNormalWS;
// TODO:
//layerTexCoord.triplanarWeights = ComputeTriplanarWeights(vertexNormalWS);
float2 texcoords[TEXCOORD_INDEX_COUNT][2];
#ifdef _USE_TRIPLANAR
float3 triplanarWeights[2];
#endif
#ifdef SURFACE_GRADIENT
// float3 vertexTangentWS[4];
// float3 vertexBitangentWS[4];
#endif
};
int mappingType = UV_MAPPING_UVSET;
float4 SampleTexture2DPlanar(TEXTURE2D_ARGS(textureName, samplerName), float textureNameUV, float textureNameUVLocal, float4 textureNameST, TextureUVMapping uvMapping)
{
return SAMPLE_TEXTURE2D(textureName, samplerName, (uvMapping.texcoords[textureNameUV][textureNameUVLocal] * textureNameST.xy + textureNameST.zw));
}
//TODO: _MAPPING_PLANAR, _MAPPING_TRIPLANAR
// If we use triplanar on any of the properties, then we enable the triplanar path
#ifdef _USE_TRIPLANAR
float4 SampleTexture2DTriplanar(TEXTURE2D_ARGS(textureName, samplerName), float textureNameUV, float textureNameUVLocal, float4 textureNameST, TextureUVMapping uvMapping)
{
if (textureNameUV == TEXCOORD_INDEX_TRIPLANAR)
{
float4 val = float4(0.0, 0.0, 0.0, 0.0);
// Be sure that the compiler is aware that we don't use UV1 to UV3 for main layer so it can optimize code
ComputeLayerTexCoord( texCoord0, texCoord1, texCoord2, texCoord3, _UVMappingMask, /* TODO _UVDetailsMappingMask, */
_BaseColorMap_ST.xy, _BaseColorMap_ST.zw, /* TODO _DetailMap_ST.xy, _DetailMap_ST.zw, 1.0, _LinkDetailsWithBase,
/* TODO positionWS, _TexWorldScale, */
mappingType, layerTexCoord);
if (uvMapping.triplanarWeights[textureNameUVLocal].x > 0.0)
val += uvMapping.triplanarWeights[textureNameUVLocal].x * SAMPLE_TEXTURE2D(textureName, samplerName, (uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_YZ][textureNameUVLocal] * textureNameST.xy + textureNameST.zw));
if (uvMapping.triplanarWeights[textureNameUVLocal].y > 0.0)
val += uvMapping.triplanarWeights[textureNameUVLocal].y * SAMPLE_TEXTURE2D(textureName, samplerName, (uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_ZX][textureNameUVLocal] * textureNameST.xy + textureNameST.zw));
if (uvMapping.triplanarWeights[textureNameUVLocal].z > 0.0)
val += uvMapping.triplanarWeights[textureNameUVLocal].z * SAMPLE_TEXTURE2D(textureName, samplerName, (uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_XY][textureNameUVLocal] * textureNameST.xy + textureNameST.zw));
return val;
}
else
{
return SampleTexture2DPlanar(TEXTURE2D_PARAM(textureName, samplerName), textureNameUV, textureNameUVLocal, textureNameST, uvMapping);
}
// This is call only in this file
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(FragInputs input, inout LayerTexCoord layerTexCoord)
#define SAMPLE_TEXTURE2D_SCALE_BIAS(name) SampleTexture2DTriplanar(name, sampler##name, name##UV, name##UVLocal, name##_ST, uvMapping)
#else
#define SAMPLE_TEXTURE2D_SCALE_BIAS(name) SampleTexture2DPlanar(name, sampler##name, name##UV, name##UVLocal, name##_ST, uvMapping)
#endif // _USE_TRIPLANAR
void InitializeMappingData(FragInputs input, out TextureUVMapping uvMapping)
// TODO: SURFACE_GRADIENT
//#ifdef SURFACE_GRADIENT
//GenerateLayerTexCoordBasisTB(input, layerTexCoord);
//#endif
float3 position = GetAbsolutePositionWS(input.positionWS);
float2 uvXZ;
float2 uvXY;
float2 uvZY;
// Build the texcoords array.
uvMapping.texcoords[TEXCOORD_INDEX_UV0][0] = uvMapping.texcoords[TEXCOORD_INDEX_UV0][1] = input.texCoord0.xy;
uvMapping.texcoords[TEXCOORD_INDEX_UV1][0] = uvMapping.texcoords[TEXCOORD_INDEX_UV1][1] = input.texCoord1.xy;
uvMapping.texcoords[TEXCOORD_INDEX_UV2][0] = uvMapping.texcoords[TEXCOORD_INDEX_UV2][1] = input.texCoord2.xy;
uvMapping.texcoords[TEXCOORD_INDEX_UV3][0] = uvMapping.texcoords[TEXCOORD_INDEX_UV3][1] = input.texCoord3.xy;
// planar/triplanar
GetTriplanarCoordinate(position, uvXZ, uvXY, uvZY);
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_XY][0] = uvXY;
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_YZ][0] = uvZY;
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_ZX][0] = uvXZ;
// If we use local planar mapping, convert to local space
position = TransformWorldToObject(position);
GetTriplanarCoordinate(position, uvXZ, uvXY, uvZY);
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_XY][1] = uvXY;
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_YZ][1] = uvZY;
uvMapping.texcoords[TEXCOORD_INDEX_PLANAR_ZX][1] = uvXZ;
#ifdef _USE_TRIPLANAR
float3 vertexNormal = input.worldToTangent[2].xyz;
uvMapping.triplanarWeights[0] = ComputeTriplanarWeights(vertexNormal);
// If we use local planar mapping, convert to local space
vertexNormal = TransformWorldToObjectDir(vertexNormal);
uvMapping.triplanarWeights[1] = ComputeTriplanarWeights(vertexNormal);
#endif
// Normal mapping with surface gradient
#ifdef SURFACE_GRADIENT
float3 vertexNormalWS = input.worldToTangent[2];
uvMapping.vertexTangentWS[0] = input.worldToTangent[0];
uvMapping.vertexBitangentWS[0] = input.worldToTangent[1];
GetLayerTexCoord( input.texCoord0, input.texCoord1, input.texCoord2, input.texCoord3,
input.positionWS, input.worldToTangent[2].xyz, layerTexCoord);
float3 dPdx = ddx_fine(input.positionWS);
float3 dPdy = ddy_fine(input.positionWS);
float3 sigmaX = dPdx - dot(dPdx, vertexNormalWS) * vertexNormalWS;
float3 sigmaY = dPdy - dot(dPdy, vertexNormalWS) * vertexNormalWS;
//float flipSign = dot(sigmaY, cross(vertexNormalWS, sigmaX) ) ? -1.0 : 1.0;
float flipSign = dot(dPdy, cross(vertexNormalWS, dPdx)) < 0.0 ? -1.0 : 1.0; // gives same as the commented out line above
SurfaceGradientGenBasisTB(vertexNormalWS, sigmaX, sigmaY, flipSign, input.texCoord1, uvMapping.vertexTangentWS[1], uvMapping.vertexBitangentWS[1]);
SurfaceGradientGenBasisTB(vertexNormalWS, sigmaX, sigmaY, flipSign, input.texCoord2, uvMapping.vertexTangentWS[2], uvMapping.vertexBitangentWS[2]);
SurfaceGradientGenBasisTB(vertexNormalWS, sigmaX, sigmaY, flipSign, input.texCoord3, uvMapping.vertexTangentWS[3], uvMapping.vertexBitangentWS[3]);
#endif // SURFACE_GRADIENT
// ...Texture Mapping
// GetSurfaceAndBuiltinData
// cf with
// cf with
// LitBuiltinData.hlsl:GetBuiltinData()
// LitBuiltinData.hlsl:GetBuiltinData()
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal.
LayerTexCoord layerTexCoord;
ZERO_INITIALIZE(LayerTexCoord, layerTexCoord);
GetLayerTexCoord(input, layerTexCoord);
TextureUVMapping uvMapping;
InitializeMappingData(input, uvMapping);
// -------------------------------------------------------------
// Surface Data:

//float3 bentNormalTS;
//float3 bentNormalWS;
//float alpha = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).a * _BaseColor.a;
float alpha = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).a * _BaseColor.a;
float alpha = SAMPLE_TEXTURE2D_SCALE_BIAS(_BaseColorMap).a * _BaseColor.a;
#ifdef _ALPHATEST_ON
//NEWLITTODO: Once we include those passes in the main StackLit.shader, add handling of CUTOFF_TRANSPARENT_DEPTH_PREPASS and _POSTPASS
// and the related properties (in the .shader) and uniforms (in the StackLitProperties file) _AlphaCutoffPrepass, _AlphaCutoffPostpass

float3 detailNormalTS = float3(0.0, 0.0, 0.0);
float detailMask = 0.0;
//TODO remove the following and use fetching macros that use uvmapping :
//float2 baseColorMapUv = TRANSFORM_TEX(input.texCoord0, _BaseColorMap);
//surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).rgb * _BaseColor.rgb;
surfaceData.baseColor = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).rgb * _BaseColor.rgb;
surfaceData.baseColor = SAMPLE_TEXTURE2D_SCALE_BIAS(_BaseColorMap).rgb * _BaseColor.rgb;
//surfaceData.normalWS = float3(0.0, 0.0, 0.0);
normalTS = GetNormalTS(input, layerTexCoord, detailNormalTS, detailMask);
normalTS = float3(0, 0, 1); // GetNormalTS(input, texcoords[_NormalMapUV], detailNormalTS, detailMask);
#if defined(_MASKMAPA)
surfaceData.perceptualSmoothnessA = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapA, sampler_MaskMapA, layerTexCoord.base).a;
surfaceData.perceptualSmoothnessA = lerp(_SmoothnessARemapMin, _SmoothnessARemapMax, surfaceData.perceptualSmoothnessA);
#else
surfaceData.perceptualSmoothnessA = _SmoothnessA;
#endif
surfaceData.perceptualSmoothnessA = dot(SAMPLE_TEXTURE2D_SCALE_BIAS(_SmoothnessAMap), _SmoothnessAMapChannelMask);
surfaceData.perceptualSmoothnessA = lerp(_SmoothnessARange.x, _SmoothnessARange.y, surfaceData.perceptualSmoothnessA);
surfaceData.perceptualSmoothnessA = lerp(_SmoothnessA, surfaceData.perceptualSmoothnessA, _SmoothnessAUseMap);
surfaceData.perceptualSmoothnessB = dot(SAMPLE_TEXTURE2D_SCALE_BIAS(_SmoothnessBMap), _SmoothnessBMapChannelMask);
surfaceData.perceptualSmoothnessB = lerp(_SmoothnessBRange.x, _SmoothnessBRange.y, surfaceData.perceptualSmoothnessB);
surfaceData.perceptualSmoothnessB = lerp(_SmoothnessB, surfaceData.perceptualSmoothnessB, _SmoothnessBUseMap);
#if defined(_MASKMAPB)
surfaceData.perceptualSmoothnessB = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapB, sampler_MaskMapB, layerTexCoord.base).a;
surfaceData.perceptualSmoothnessB = lerp(_SmoothnessBRemapMin, _SmoothnessBRemapMax, surfaceData.perceptualSmoothnessB);
#else
surfaceData.perceptualSmoothnessB = _SmoothnessB;
#endif
// MaskMapA is RGBA: Metallic, Ambient Occlusion (Optional), detail Mask (Optional), Smoothness
#ifdef _MASKMAPA
surfaceData.metallic = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapA, sampler_MaskMapA, layerTexCoord.base).r;
#else
surfaceData.metallic = 1.0;
#endif
surfaceData.metallic *= _Metallic;
surfaceData.metallic = dot(SAMPLE_TEXTURE2D_SCALE_BIAS(_MetallicMap), _MetallicMapChannelMask);
surfaceData.metallic = lerp(_MetallicRange.x, _MetallicRange.y, surfaceData.metallic);
surfaceData.metallic = lerp(_Metallic, surfaceData.metallic, _MetallicUseMap);
// TODO: As we add features, or-set the flags eg MATERIALFEATUREFLAGS_LIT_* with #ifdef
// on corresponding _MATERIAL_FEATURE_* shader_feature kerwords (set by UI) so the compiler
// TODO: As we add features, or-set the flags eg MATERIALFEATUREFLAGS_LIT_* with #ifdef
// on corresponding _MATERIAL_FEATURE_* shader_feature kerwords (set by UI) so the compiler
// knows the value of surfaceData.materialFeatures.
surfaceData.materialFeatures = MATERIALFEATUREFLAGS_LIT_STANDARD;

GetNormalWS(input, V, normalTS, surfaceData.normalWS); // MaterialUtilities.hlsl
surfaceData.baseColor = GetTextureDataDebug(_DebugMipMapMode, layerTexCoord.base.uv, _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.baseColor);
surfaceData.baseColor = GetTextureDataDebug(_DebugMipMapMode, texcoords[_BaseColorMapUV], _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.baseColor);
surfaceData.metallic = 0;
}
#endif

// -------------------------------------------------------------
// NEWLITTODO: for all BuiltinData, might need to just refactor and use a comon function like that
// NEWLITTODO: for all BuiltinData, might need to just refactor and use a comon function like that
// contained in LitBuiltinData.hlsl
builtinData.opacity = alpha;

// Emissive Intensity is only use here, but is part of BuiltinData to enforce UI parameters as we want the users to fill one color and one intensity
builtinData.emissiveIntensity = _EmissiveIntensity; // We still store intensity here so we can reuse it with debug code
builtinData.emissiveColor = _EmissiveColor * builtinData.emissiveIntensity * lerp(float3(1.0, 1.0, 1.0), surfaceData.baseColor.rgb, _AlbedoAffectEmissive);
#ifdef _EMISSIVE_COLOR_MAP
builtinData.emissiveColor *= SAMPLE_TEXTURE2D(_EmissiveColorMap, sampler_EmissiveColorMap, TRANSFORM_TEX(input.texCoord0, _EmissiveColorMap)).rgb;
#endif
builtinData.emissiveColor *= SAMPLE_TEXTURE2D_SCALE_BIAS(_EmissiveColorMap).rgb;
//NEWLITTODO: shader feature SHADOWS_SHADOWMASK not there yet.
//NEWLITTODO: shader feature SHADOWS_SHADOWMASK not there yet.
builtinData.shadowMask0 = 0.0;
builtinData.shadowMask1 = 0.0;
builtinData.shadowMask2 = 0.0;

#endif
builtinData.depthOffset = 0.0;
}

84
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl


TEXTURE2D(_DistortionVectorMap);
SAMPLER(sampler_DistortionVectorMap);
TEXTURE2D(_EmissiveColorMap);
SAMPLER(sampler_EmissiveColorMap);
TEXTURE2D(_MaskMapA);
SAMPLER(sampler_MaskMapA);
TEXTURE2D(_MetallicMap);
SAMPLER(sampler_MetallicMap);
TEXTURE2D(_SmoothnessAMap);
SAMPLER(sampler_SmoothnessAMap);
TEXTURE2D(_SmoothnessBMap);
SAMPLER(sampler_SmoothnessBMap);
TEXTURE2D(_MaskMapB);
SAMPLER(sampler_MaskMapB);
TEXTURE2D(_SubsurfaceMaskMap);
SAMPLER(sampler_SubsurfaceMaskMap);
TEXTURE2D(_ThicknessMap);
SAMPLER(sampler_ThicknessMap);
TEXTURE2D(_EmissiveColorMap);
SAMPLER(sampler_EmissiveColorMap);
TEXTURE2D(_NormalMap);
SAMPLER(sampler_NormalMap);

float4 _BaseColorMap_ST;
float4 _BaseColorMap_TexelSize;
float4 _BaseColorMap_MipInfo;
float _BaseColorMapUV;
float _BaseColorMapUVLocal;
float _MetallicUseMap;
float _MetallicMapUV;
float _MetallicMapUVLocal;
float4 _MetallicMap_ST;
float4 _MetallicMap_TexelSize;
float4 _MetallicMap_MipInfo;
float4 _MetallicMapChannelMask;
float4 _MetallicRange;
float _SmoothnessARemapMin;
float _SmoothnessARemapMax;
float _SmoothnessAUseMap;
float _SmoothnessAMapUV;
float _SmoothnessAMapUVLocal;
float4 _SmoothnessAMap_ST;
float4 _SmoothnessAMap_TexelSize;
float4 _SmoothnessAMap_MipInfo;
float4 _SmoothnessAMapChannelMask;
float4 _SmoothnessARange;
float _SmoothnessBRemapMin;
float _SmoothnessBRemapMax;
float _SmoothnessBUseMap;
float _SmoothnessBMapUV;
float _SmoothnessBMapUVLocal;
float4 _SmoothnessBMap_ST;
float4 _SmoothnessBMap_TexelSize;
float4 _SmoothnessBMap_MipInfo;
float4 _SmoothnessBMapChannelMask;
float4 _SmoothnessBRange;
float _LobeMix;
float _Anisotropy;

float4 _CoatExtinction;
float _NormalScale;
float4 _UVMappingMask;
float _NormalMapUV;
float4 _NormalMap_ST;
float4 _NormalMap_TexelSize;
float4 _NormalMap_MipInfo;
float4 _EmissiveColorMap_TexelSize;
float4 _EmissiveColorMap_MipInfo;
float _EmissiveColorMapUV;
float _EmissiveColorMapUVLocal;
float _SubsurfaceMask;
float _SubsurfaceMaskUseMap;
float _SubsurfaceMaskMapUV;
float4 _SubsurfaceMaskMap_ST;
float4 _SubsurfaceMaskMap_TexelSize;
float4 _SubsurfaceMaskMap_MipInfo;
float4 _SubsurfaceMaskMapChannelMask;
float4 _SubsurfaceMaskRange;
float _Thickness;
float _ThicknessUseMap;
float _ThicknessMapUV;
float4 _ThicknessMap_ST;
float4 _ThicknessMap_TexelSize;
float4 _ThicknessMap_MipInfo;
float4 _ThicknessMapChannelMask;
float4 _ThicknessRange;
float _AlphaCutoff;
float4 _DoubleSidedConstants;

float _DistortionBlurScale;
float _DistortionBlurRemapMin;
float _DistortionBlurRemapMax;
// Caution: C# code in BaseLitUI.cs call LightmapEmissionFlagsProperty() which assume that there is an existing "_EmissionColor"
// value that exist to identify if the GI emission need to be enabled.

22
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


public int sssBufferCount { get { return k_MaxSSSBuffer; } }
RTHandle[] m_ColorMRTs = new RTHandle[k_MaxSSSBuffer];
RTHandleSystem.RTHandle[] m_ColorMRTs = new RTHandleSystem.RTHandle[k_MaxSSSBuffer];
bool[] m_ExternalBuffer = new bool[k_MaxSSSBuffer];
// Disney SSS Model

RTHandle m_HTile;
RTHandleSystem.RTHandle m_HTile;
// End Disney SSS Model
// Jimenez SSS Model

// Jimenez need an extra buffer and Disney need one for some platform
RTHandle m_CameraFilteringBuffer;
RTHandleSystem.RTHandle m_CameraFilteringBuffer;
// This is use to be able to read stencil value in compute shader
Material m_CopyStencilForSplitLighting;

{
// In case of full forward we must allocate the render target for forward SSS (or reuse one already existing)
// TODO: Provide a way to reuse a render target
m_ColorMRTs[0] = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: true, name: "SSSBuffer");
m_ColorMRTs[0] = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: true, name: "SSSBuffer");
m_ExternalBuffer[0] = false;
}
else

if (ShaderConfig.k_UseDisneySSS == 0 || NeedTemporarySubsurfaceBuffer())
{
// Caution: must be same format as m_CameraSssDiffuseLightingBuffer
m_CameraFilteringBuffer = RTHandle.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "SSSCameraFiltering"); // Enable UAV
m_CameraFilteringBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RGB111110Float, sRGB: false, enableRandomWrite: true, enableMSAA: true, name: "SSSCameraFiltering"); // Enable UAV
m_HTile = RTHandle.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "SSSHtile"); // Enable UAV
m_HTile = RTHandles.Alloc(size => new Vector2Int((size.x + 7) / 8, (size.y + 7) / 8), filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.R8, sRGB: false, enableRandomWrite: true, name: "SSSHtile"); // Enable UAV
public RTHandle GetSSSBuffer(int index)
public RTHandleSystem.RTHandle GetSSSBuffer(int index)
{
Debug.Assert(index < sssBufferCount);
return m_ColorMRTs[index];

{
if (!m_ExternalBuffer[i])
{
RTHandle.Release(m_ColorMRTs[i]);
RTHandles.Release(m_ColorMRTs[i]);
RTHandle.Release(m_CameraFilteringBuffer);
RTHandle.Release(m_HTile);
RTHandles.Release(m_CameraFilteringBuffer);
RTHandles.Release(m_HTile);
}
public void PushGlobalParams(CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings)

// Combines specular lighting and diffuse lighting with subsurface scattering.
public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings,
RTHandle colorBufferRT, RTHandle diffuseBufferRT, RTHandle depthStencilBufferRT, RTHandle depthTextureRT)
RTHandleSystem.RTHandle colorBufferRT, RTHandleSystem.RTHandle diffuseBufferRT, RTHandleSystem.RTHandle depthStencilBufferRT, RTHandleSystem.RTHandle depthTextureRT)
{
if (sssParameters == null || !frameSettings.enableSubsurfaceScattering)
return;

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


public bool enableMSAA = false;
public MSAASamples msaaSampleCount { get; private set; }
public bool enableShadowMask = false;
public bool enableShadowMask = true;
public LightLoopSettings lightLoopSettings = new LightLoopSettings();

aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
// We must take care of the scene view fog flags in the editor
if (!CoreUtils.IsSceneViewFogEnabled(camera))
aggregate.enableAtmosphericScattering = false;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;
aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

120
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


using System.Collections.Generic;
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline

RTHandle m_ColorPyramidBuffer;
List<RTHandle> m_ColorPyramidMips = new List<RTHandle>();
RTHandle m_DepthPyramidBuffer;
List<RTHandle> m_DepthPyramidMips = new List<RTHandle>();
public RTHandle colorPyramid { get { return m_ColorPyramidBuffer; } }
public RTHandle depthPyramid { get { return m_DepthPyramidBuffer; } }
List<RTHandleSystem.RTHandle> m_ColorPyramidMips = new List<RTHandleSystem.RTHandle>();
List<RTHandleSystem.RTHandle> m_DepthPyramidMips = new List<RTHandleSystem.RTHandle>();
BufferPyramidProcessor m_Processor;

return scale;
}
public void CreateBuffers()
{
m_ColorPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.ARGBHalf, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "ColorPyramid");
m_DepthPyramidBuffer = RTHandle.Alloc(size => CalculatePyramidSize(size), filterMode: FilterMode.Trilinear, colorFormat: RenderTextureFormat.RGFloat, sRGB: false, useMipMap: true, autoGenerateMips: false, enableRandomWrite: true, name: "DepthPyramid"); // Need randomReadWrite because we downsample the first mip with a compute shader.
}
RTHandle.Release(m_ColorPyramidBuffer);
RTHandle.Release(m_DepthPyramidBuffer);
{
RTHandle.Release(rth);
}
RTHandles.Release(rth);
{
RTHandle.Release(rth);
}
RTHandles.Release(rth);
public int GetPyramidLodCount(HDCamera camera)
public int GetPyramidLodCount(Vector2Int size)
var minSize = Mathf.Min(camera.actualWidth, camera.actualHeight);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
var minSize = Mathf.Min(size.x, size.y);
return Mathf.Max(0, Mathf.FloorToInt(Mathf.Log(minSize, 2f)));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)

return new Vector2Int((int)(pyramidSize * GetXRscale()), pyramidSize);
}
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandle> mipList, int lodCount)
void UpdatePyramidMips(HDCamera camera, RenderTextureFormat format, List<RTHandleSystem.RTHandle> mipList, int lodCount)
{
int currentLodCount = mipList.Count;
if (lodCount > currentLodCount)

int mipIndexCopy = i + 1; // Don't remove this copy! It's important for the value to be correctly captured by the lambda.
RTHandle newMip = RTHandle.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
var newMip = RTHandles.Alloc(size => CalculatePyramidMipSize(CalculatePyramidSize(size), mipIndexCopy), colorFormat: format, sRGB: false, enableRandomWrite: true, useMipMap: false, filterMode: FilterMode.Bilinear, name: string.Format("PyramidMip{0}", i));
public Vector2 GetPyramidToScreenScale(HDCamera camera)
public Vector2 GetPyramidToScreenScale(HDCamera camera, RTHandleSystem.RTHandle rth)
return new Vector2((float)camera.actualWidth / m_DepthPyramidBuffer.rt.width, (float)camera.actualHeight / m_DepthPyramidBuffer.rt.height);
return new Vector2((float)camera.actualWidth / rth.rt.width, (float)camera.actualHeight / rth.rt.height);
}
public void RenderDepthPyramid(

RTHandle depthTexture)
RTHandleSystem.RTHandle sourceDepthTexture,
RTHandleSystem.RTHandle targetDepthTexture)
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_DepthPyramidBuffer.rt.format, m_DepthPyramidMips, lodCount);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetDepthTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping DepthPyramid calculation.");
return;
}
Vector2 scale = GetPyramidToScreenScale(hdCamera);
UpdatePyramidMips(hdCamera, targetDepthTexture.rt.format, m_DepthPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetDepthTexture);
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._DepthPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));

depthTexture,
m_DepthPyramidBuffer,
sourceDepthTexture,
targetDepthTexture,
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, m_DepthPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._DepthPyramidTexture, targetDepthTexture);
}
public void RenderColorPyramid(

RTHandle colorTexture)
RTHandleSystem.RTHandle sourceColorTexture,
RTHandleSystem.RTHandle targetColorTexture)
int lodCount = GetPyramidLodCount(hdCamera);
UpdatePyramidMips(hdCamera, m_ColorPyramidBuffer.rt.format, m_ColorPyramidMips, lodCount);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetColorTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping ColorPyramid calculation.");
return;
}
Vector2 scale = GetPyramidToScreenScale(hdCamera);
UpdatePyramidMips(hdCamera, targetColorTexture.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetColorTexture);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, new Vector4(hdCamera.actualWidth, hdCamera.actualHeight, 1f / hdCamera.actualWidth, 1f / hdCamera.actualHeight));
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, new Vector4(scale.x, scale.y, lodCount, 0.0f));

colorTexture,
m_ColorPyramidBuffer,
sourceColorTexture,
targetColorTexture,
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, m_ColorPyramidBuffer);
cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, targetColorTexture);
}
public RTHandleSystem.RTHandle AllocColorRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.ARGBHalf,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true,
name: string.Format("ColorPyramid-{0}-{1}", id, frameIndex)
);
}
public RTHandleSystem.RTHandle AllocDepthRT(string id, int frameIndex, RTHandleSystem rtHandleSystem)
{
return rtHandleSystem.Alloc(
size => CalculatePyramidSize(size),
filterMode: FilterMode.Trilinear,
colorFormat: RenderTextureFormat.RGFloat,
sRGB: false,
useMipMap: true,
autoGenerateMips: false,
enableRandomWrite: true, // Need randomReadWrite because we downsample the first mip with a compute shader.
name: string.Format("DepthPyramid-{0}-{1}", id, frameIndex)
);
}
}
}

27
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


public void RenderDepthPyramid(
int width, int height,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)

RTHandle src = targetTexture;
var src = targetTexture;
RTHandle dest = mips[i];
var dest = mips[i];
var srcMip = new RectInt(0, 0, width >> i, height >> i);
var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);

cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
srcWorkMip.width, srcWorkMip.height,
(1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
(1.0f / srcMip.width) * scale.x, (1.0f / srcMip.height) * scale.y)
);
cmd.DispatchCompute(

1
);
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.rt.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.rt.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);

public void RenderColorPyramid(
HDCamera hdCamera,
CommandBuffer cmd,
RTHandle sourceTexture,
RTHandle targetTexture,
List<RTHandle> mips,
RTHandleSystem.RTHandle sourceTexture,
RTHandleSystem.RTHandle targetTexture,
List<RTHandleSystem.RTHandle> mips,
int lodCount,
Vector2 scale
)

var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
//var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var srcWorkMip = new RectInt(
0,
0,

1
);
var dstMipWidthToCopy = Mathf.Min(dest.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset


hdriSky: {fileID: 4800000, guid: 9bd32a6ece529fd4f9408b8d7e00c10d, type: 3}
proceduralSky: {fileID: 4800000, guid: ec63f47fd265df243a7b1d40f9ef7fe7, type: 3}
skyboxCubemap: {fileID: 103, guid: 0000000000000000f000000000000000, type: 0}
preIntegratedFGD: {fileID: 4800000, guid: 123f13d52852ef547b2962de4bd9eaad, type: 3}
encodeBC6HCS: {fileID: 7200000, guid: aa922d239de60304f964e24488559eeb, type: 3}
cubeToPanoShader: {fileID: 4800000, guid: 595434cc3b6405246b6cd3086d0b6f7d, type: 3}
blitCubeTextureFace: {fileID: 4800000, guid: d850d0a2481878d4bbf17e5126b04163, type: 3}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs


public Shader opaqueAtmosphericScattering;
public Shader hdriSky;
public Shader proceduralSky;
// Material
public Shader preIntegratedFGD;
// Utilities / Core
public ComputeShader encodeBC6HCS;

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs


{
CameraRelativeRendering = 1, // Rendering sets the origin of the world to the position of the primary (scene view) camera
UseDisneySSS = 1, // Allow to chose between Burley Normalized Diffusion (Multi + Fix direction single scattering) or Jimenez diffusion approximation (Multiscattering only - More blurry) for Subsurface scattering
VolumetricLightingPreset = 0 // 0 = disabled, 1 = normal, 2 = ultra
VolumetricLightingPreset = 1 // 0 = disabled, 1 = normal, 2 = ultra
};
// Note: #define can't be use in include file in C# so we chose this way to configure both C# and hlsl

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存