浏览代码

Merge branch 'master' into LightweightPipeline

/main
Felipe Lira 7 年前
当前提交
e45df6b6
共有 397 个文件被更改,包括 3711 次插入3999 次删除
  1. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png.meta
  2. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png.meta
  3. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png.meta
  4. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png
  5. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png.meta
  6. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png.meta
  7. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png.meta
  8. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1204_Lit_Transparent_Fog.unity.png.meta
  9. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png.meta
  10. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta
  11. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1207_Lit_Displacement.unity.png.meta
  12. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1208_Lit_Displacement_POM.unity.png
  13. 45
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1208_Lit_Displacement_POM.unity.png.meta
  14. 4
      SampleScenes/Common/Scripts/CompressBC6HAndDisplay.cs
  15. 588
      SampleScenes/HDTest/BasicProfiling.unity
  16. 115
      SampleScenes/HDTest/ShadowsTest.unity
  17. 13
      SampleScenes/HDTest/SkyFogTest.unity
  18. 4
      ScriptableRenderPipeline/Core/Camera/FreeCamera.cs
  19. 24
      ScriptableRenderPipeline/Core/CoreUtils.cs
  20. 15
      ScriptableRenderPipeline/Core/Debugging/DebugMenuManager.cs
  21. 106
      ScriptableRenderPipeline/Core/Editor/CoreEditorUtils.cs
  22. 14
      ScriptableRenderPipeline/Core/Editor/CoreShaderIncludePaths.cs
  23. 2
      ScriptableRenderPipeline/Core/EncodeBC6H.cs
  24. 4
      ScriptableRenderPipeline/Core/EncodeBC6H.cs.meta
  25. 6
      ScriptableRenderPipeline/Core/Resources/EncodeBC6H.compute.meta
  26. 5
      ScriptableRenderPipeline/Core/ShaderLibrary/BC6H.hlsl
  27. 69
      ScriptableRenderPipeline/Core/ShaderLibrary/BSDF.hlsl
  28. 25
      ScriptableRenderPipeline/Core/ShaderLibrary/Color.hlsl
  29. 173
      ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl
  30. 15
      ScriptableRenderPipeline/Core/ShaderLibrary/CommonLighting.hlsl
  31. 49
      ScriptableRenderPipeline/Core/ShaderLibrary/CommonMaterial.hlsl
  32. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/ImageBasedLighting.hlsl
  33. 30
      ScriptableRenderPipeline/Core/ShaderLibrary/Macros.hlsl
  34. 2
      ScriptableRenderPipeline/Core/ShaderLibrary/NormalSurfaceGradient.hlsl
  35. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/Packing.hlsl
  36. 8
      ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl
  37. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl
  38. 2
      ScriptableRenderPipeline/Core/ShaderLibrary/VolumeRendering.hlsl
  39. 146
      ScriptableRenderPipeline/Core/TextureCache.cs
  40. 410
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs
  41. 50
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs.hlsl
  42. 2
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.hlsl
  43. 11
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader
  44. 8
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader
  45. 26
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDAssetFactory.cs
  46. 1
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.Styles.cs
  47. 19
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs
  48. 8
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineMenuItems.cs
  49. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDCustomSamplerId.cs
  50. 454
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  51. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset
  52. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset.meta
  53. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs
  54. 40
      ScriptableRenderPipeline/HDRenderPipeline/HDUtils.cs
  55. 8
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader
  56. 14
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Editor/HDLightEditor.Styles.cs
  57. 319
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Editor/HDLightEditor.cs
  58. 15
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs
  59. 20
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl
  60. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Lighting.hlsl
  61. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoopDef.hlsl
  62. 93
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.hlsl
  63. 20
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.cs.hlsl
  64. 983
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.cs
  65. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/scrbound.compute
  66. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/materialflags.compute
  67. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild.compute
  68. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild-clustered.compute
  69. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild-bigtile.compute
  70. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/builddispatchindirect.compute
  71. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/SortingComputeUtils.hlsl
  72. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/ShadowContext.hlsl
  73. 10
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/Shadow.hlsl
  74. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightingConvexHullUtils.hlsl
  75. 7
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/Deferred.compute
  76. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/ClusteredUtils.hlsl
  77. 71
      ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/Editor/LayeredLitUI.cs
  78. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl
  79. 19
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Editor/BaseLitUI.cs
  80. 54
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Editor/LitUI.cs
  81. 21
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs
  82. 47
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs.hlsl
  83. 687
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl
  84. 30
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.shader
  85. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl
  86. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl
  87. 33
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitTessellation.shader
  88. 4
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader
  89. 104
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute
  90. 15
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader
  91. 4
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SubsurfaceScatteringSettings.cs
  92. 32
      ScriptableRenderPipeline/HDRenderPipeline/Material/Unlit/Editor/BaseUnlitUI.cs
  93. 3
      ScriptableRenderPipeline/HDRenderPipeline/Material/Unlit/Editor/UnlitUI.cs
  94. 2
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader
  95. 26
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/DefaultHDMaterial.mat
  96. 1
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset
  97. 3
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/RenderPipelineResources.cs
  98. 7
      ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/SceneSettings.cs
  99. 5
      ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs
  100. 1
      ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs.hlsl

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png
文件差异内容过多而无法显示
查看文件

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1204_Lit_Transparent_Fog.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1207_Lit_Displacement.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1208_Lit_Displacement_POM.unity.png
文件差异内容过多而无法显示
查看文件

45
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1208_Lit_Displacement_POM.unity.png.meta


serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0

filterMode: -1
aniso: -1
mipBias: -1
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 0

spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0

platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 0
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1

4
SampleScenes/Common/Scripts/CompressBC6HAndDisplay.cs


m_EncodeBC6H = new EncodeBC6H(m_BC6HShader);
}
void OnPreRender(CommandBuffer cmb)
void EncodeBC6HTest(CommandBuffer cmb)
{
if (m_SourceTexture == null
|| m_SourceMaterial == null

void Update()
{
var cmd = new CommandBuffer { name = "EncodeBC6H Compress" };
OnPreRender(cmd);
EncodeBC6HTest(cmd);
Graphics.ExecuteCommandBuffer(cmd);
}

588
SampleScenes/HDTest/BasicProfiling.unity
文件差异内容过多而无法显示
查看文件

115
SampleScenes/HDTest/ShadowsTest.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1}
m_IndirectSpecularColor: {r: 0.12544414, g: 0.13212782, b: 0.11914396, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

m_EnableBakedLightmaps: 0
m_EnableRealtimeLightmaps: 0
m_LightmapEditorSettings:
serializedVersion: 9
serializedVersion: 10
m_TextureWidth: 1024
m_TextureHeight: 1024
m_AtlasSize: 1024
m_AO: 0
m_AOMaxDistance: 1
m_CompAOExponent: 1

m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_ShowResolutionOverlay: 1
m_LightingDataAsset: {fileID: 112000002, guid: e5562e1a270c3994d826092db897528c,
type: 2}
m_UseShadowmask: 1

m_EditorClassIdentifier:
m_CommonSettings: {fileID: 11400000, guid: 075f395cb6ba2534196f2ce83c32e633, type: 2}
m_SkySettings: {fileID: 11400000, guid: 622aa06566c087b41ac0edc80769f45f, type: 2}
m_SsaoSettings: {fileID: 0}
--- !u!4 &8862570
Transform:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!124 &25518111
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!4 &25518113
Transform:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &38548579
Transform:
m_ObjectHideFlags: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 38548578}
m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &38548581
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &43913312
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!81 &240603308
AudioListener:
m_ObjectHideFlags: 0

m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &240603309
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!4 &240603311
Transform:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_innerSpotPercent: 0
m_Version: 1
m_InnerSpotPercent: 0
archetype: 0
lightTypeExtent: 0
lightLength: 0
lightWidth: 0
shapeLength: 0
shapeWidth: 0
aspectRatio: 1
shapeRadius: 0
useOldInspector: 0
featuresFoldout: 1
showAdditionalSettings: 1
--- !u!108 &367594608
Light:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &401402600
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &454647942
Prefab:
m_ObjectHideFlags: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 948836267934e104294e03adad5c7bf7, type: 2}
m_StaticBatchInfo:

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 948836267934e104294e03adad5c7bf7, type: 2}
m_StaticBatchInfo:

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_innerSpotPercent: 0
m_Version: 1
m_InnerSpotPercent: 0
archetype: 0
lightTypeExtent: 0
lightLength: 0
lightWidth: 0
shapeLength: 0
shapeWidth: 0
aspectRatio: 1
shapeRadius: 0
useOldInspector: 0
featuresFoldout: 1
showAdditionalSettings: 1
--- !u!108 &886044557
Light:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &912444934
Transform:
m_ObjectHideFlags: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 912444933}
m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &912444936
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &935997991
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1175480045
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1188504521
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1232280518
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1238924665
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1239387815
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1291944202
Prefab:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_innerSpotPercent: 0
m_Version: 1
m_InnerSpotPercent: 0
archetype: 0
lightTypeExtent: 0
lightLength: 0
lightWidth: 0
shapeLength: 0
shapeWidth: 0
aspectRatio: 1
shapeRadius: 0
useOldInspector: 0
featuresFoldout: 1
showAdditionalSettings: 1
--- !u!108 &1395086339
Light:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1524083540
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1532183583
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1558117774
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1568431069
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1641371013
Transform:
m_ObjectHideFlags: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1641371012}
m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &1641371015
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1642984832
Prefab:
m_ObjectHideFlags: 0

m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1769631028}
m_Enabled: 1
m_ExtensionPropertyValues: []
--- !u!124 &1769631030
Behaviour:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!4 &1769631032
Transform:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &1909327634
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &1935664259
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &2007753896
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &2018192138
Prefab:
m_ObjectHideFlags: 0

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!4 &2095847156
Transform:
m_ObjectHideFlags: 0

m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
m_StereoMirrorMode: 0
--- !u!1001 &2097384754
Prefab:
m_ObjectHideFlags: 0

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_innerSpotPercent: 0
m_Version: 1
m_InnerSpotPercent: 0
archetype: 0
lightTypeExtent: 0
lightLength: 0
lightWidth: 0
shapeLength: 0
shapeWidth: 0
aspectRatio: 1
shapeRadius: 0
useOldInspector: 0
featuresFoldout: 1
showAdditionalSettings: 1
--- !u!108 &2102352250
Light:
m_ObjectHideFlags: 0

13
SampleScenes/HDTest/SkyFogTest.unity


m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0.11442507, g: 0.09270345, b: 0.05664562, a: 1}
m_IndirectSpecularColor: {r: 0.26041162, g: 0.2929252, b: 0.32061547, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0

m_EnableBakedLightmaps: 1
m_EnableRealtimeLightmaps: 1
m_LightmapEditorSettings:
serializedVersion: 9
serializedVersion: 10
m_TextureWidth: 1024
m_TextureHeight: 1024
m_AtlasSize: 1024
m_AO: 0
m_AOMaxDistance: 1
m_CompAOExponent: 1

m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_ShowResolutionOverlay: 1
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 1
--- !u!196 &4

m_Script: {fileID: 11500000, guid: 7a68c43fe1f2a47cfa234b5eeaa98012, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Version: 1
m_InnerSpotPercent: 0
lightDimmer: 1
fadeDistance: 10000

spotLightShape: 0
shapeLength: 0.5
shapeWidth: 0.5
aspectRatio: 1
shapeRadius: 0
maxSmoothness: 1
applyRangeAttenuation: 1

m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
m_IsActive: 0
--- !u!81 &1276802630
AudioListener:
m_ObjectHideFlags: 0

m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_RenderingLayerMask: 4294967295
m_Materials:
- {fileID: 2100000, guid: 948836267934e104294e03adad5c7bf7, type: 2}
m_StaticBatchInfo:

4
ScriptableRenderPipeline/Core/Camera/FreeCamera.cs


void Update()
{
// If the debug menu is running, we don't want to conflict with its inputs.
if(DebugMenuManager.instance.menuUI.isEnabled)
return;
float inputRotateAxisX = 0.0f;
float inputRotateAxisY = 0.0f;
if (Input.GetMouseButton(1))

24
ScriptableRenderPipeline/Core/CoreUtils.cs


public static class CoreUtils
{
// Data useful for various cubemap processes.
// Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/bb204881(v=vs.85).aspx
static public readonly Vector3[] lookAtList =
{
new Vector3(1.0f, 0.0f, 0.0f),
new Vector3(-1.0f, 0.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, -1.0f, 0.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 0.0f, -1.0f),
};
static public readonly Vector3[] upVectorList =
{
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 0.0f, -1.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
};
// Note: Color.Black have alpha channel set to 1. Most of the time we want alpha channel set to 0 as we use black to clear render target
public static Color clearColorAllBlack { get { return new Color(0f, 0f, 0f, 0f); } }

public static void ClearCubemap(CommandBuffer cmd, RenderTargetIdentifier buffer, Color clearColor)
{
for(int i = 0; i < 6; ++i)
SetRenderTarget(cmd, buffer, ClearFlag.Color, clearColorAllBlack, 0, (CubemapFace)i);
SetRenderTarget(cmd, buffer, ClearFlag.Color, clearColor, 0, (CubemapFace)i);
}
// Draws a full screen triangle as a faster alternative to drawing a full screen quad.

15
ScriptableRenderPipeline/Core/Debugging/DebugMenuManager.cs


m_DebugMenuStateDirty = true;
}
public void RemoveDebugItem(string debugPanelName, string name)
{
DebugPanel debugPanel = GetDebugPanel(debugPanelName);
if (debugPanel != null)
{
DebugItem item = debugPanel.GetDebugItem(name);
if (item != null)
debugPanel.RemoveDebugItem(item);
}
m_DebugMenuStateDirty = true;
}
public void SetDebugMenuState(DebugMenuState state)
{
m_DebugMenuStateDirty = true;

106
ScriptableRenderPipeline/Core/Editor/CoreEditorUtils.cs


using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using UnityEngine;

public static class CoreEditorUtils
{
// GUIContent cache utilities
static Dictionary<string, GUIContent> s_GUIContentCache = new Dictionary<string, GUIContent>();
public static GUIContent GetContent(string textAndTooltip)
{
if (string.IsNullOrEmpty(textAndTooltip))
return GUIContent.none;
GUIContent content;
if (!s_GUIContentCache.TryGetValue(textAndTooltip, out content))
{
var s = textAndTooltip.Split('|');
content = new GUIContent(s[0]);
if (s.Length > 1 && !string.IsNullOrEmpty(s[1]))
content.tooltip = s[1];
s_GUIContentCache.Add(textAndTooltip, content);
}
return content;
}
// Serialization helpers
public static string FindProperty<T, TValue>(Expression<Func<T, TValue>> expr)
{

return state;
}
public static bool DrawHeaderToggle(string title, SerializedProperty group, SerializedProperty activeField, Action<Vector2> contextAction = null)
{
var backgroundRect = GUILayoutUtility.GetRect(1f, 17f);
var labelRect = backgroundRect;
labelRect.xMin += 16f;
labelRect.xMax -= 20f;
var toggleRect = backgroundRect;
toggleRect.y += 2f;
toggleRect.width = 13f;
toggleRect.height = 13f;
// Background rect should be full-width
backgroundRect.xMin = 0f;
backgroundRect.width += 4f;
// Background
float backgroundTint = EditorGUIUtility.isProSkin ? 0.1f : 1f;
EditorGUI.DrawRect(backgroundRect, new Color(backgroundTint, backgroundTint, backgroundTint, 0.2f));
// Title
using (new EditorGUI.DisabledScope(!activeField.boolValue))
EditorGUI.LabelField(labelRect, GetContent(title), EditorStyles.boldLabel);
// Active checkbox
activeField.serializedObject.Update();
activeField.boolValue = GUI.Toggle(toggleRect, activeField.boolValue, GUIContent.none, CoreEditorStyles.smallTickbox);
activeField.serializedObject.ApplyModifiedProperties();
// Context menu
var menuIcon = EditorGUIUtility.isProSkin
? CoreEditorStyles.paneOptionsIconDark
: CoreEditorStyles.paneOptionsIconLight;
var menuRect = new Rect(labelRect.xMax + 4f, labelRect.y + 4f, menuIcon.width, menuIcon.height);
if (contextAction != null)
GUI.DrawTexture(menuRect, menuIcon);
// Handle events
var e = Event.current;
if (e.type == EventType.MouseDown)
{
if (contextAction != null && menuRect.Contains(e.mousePosition))
{
contextAction(new Vector2(menuRect.x, menuRect.yMax));
e.Use();
}
else if (labelRect.Contains(e.mousePosition))
{
if (e.button == 0)
group.isExpanded = !group.isExpanded;
else if (contextAction != null)
contextAction(e.mousePosition);
e.Use();
}
}
return group.isExpanded;
}
}
public static T[] GetAdditionalData<T>(params UnityEngine.Object[] targets)
where T : Component
{
// Handles multi-selection
var data = targets.Cast<Component>()
.Select(t => t.GetComponent<T>())
.ToArray();
for (int i = 0; i < data.Length; i++)
{
if (data[i] == null)
data[i] = Undo.AddComponent<T>(((Component)targets[i]).gameObject);
}
return data;
}
}
}

14
ScriptableRenderPipeline/Core/Editor/CoreShaderIncludePaths.cs


[ShaderIncludePath]
public static string[] GetPaths()
{
return new[]
var srpMarker = Directory.GetFiles(Application.dataPath, "SRPMARKER", SearchOption.AllDirectories).FirstOrDefault();
var paths = new string[srpMarker == null ? 1 : 2];
var index = 0;
if (srpMarker != null)
"Assets/ScriptableRenderPipeline/ScriptableRenderPipeline/Core",
Path.GetFullPath("Packages/com.unity.render-pipelines.core")
};
var rootPath = Directory.GetParent(srpMarker).ToString();
paths[index] = Path.Combine(rootPath, "ScriptableRenderPipeline/Core");
index++;
}
paths[index] = Path.GetFullPath("Packages/com.unity.render-pipelines.core");
return paths;
}
}
}

2
ScriptableRenderPipeline/Core/EncodeBC6H.cs


}
var startSlice = 6 * targetArrayIndex;
for (var mip = fromMip; mip <= toMip; ++mip)
for (var mip = actualFromMip; mip <= actualToMip; ++mip)
{
var rtMip = Mathf.Clamp(mip, actualFromMip, actualToMip);
for (var faceId = 0; faceId < 6; ++faceId)

4
ScriptableRenderPipeline/Core/EncodeBC6H.cs.meta


fileFormatVersion: 2
guid: ef7e375d470b6404a9e355690703502b
timeCreated: 1507290672
licenseType: Pro
guid: 379274e2dbcfbc247acccd1aab4243ee
MonoImporter:
externalObjects: {}
serializedVersion: 2

6
ScriptableRenderPipeline/Core/Resources/EncodeBC6H.compute.meta


fileFormatVersion: 2
guid: b69b95b3420fd904e8530b79f665a1f8
timeCreated: 1507123133
licenseType: Pro
guid: aa922d239de60304f964e24488559eeb
currentAPIMask: 4
currentAPIMask: 8196
userData:
assetBundleName:
assetBundleVariant:

5
ScriptableRenderPipeline/Core/ShaderLibrary/BC6H.hlsl


// Ref: https://github.com/knarkowicz/GPURealTimeBC6H/blob/master/bin/compress.hlsl
// Doc: https://msdn.microsoft.com/en-us/library/windows/desktop/hh308952(v=vs.85).aspx
#include "Common.hlsl"
// Measure compression error
float CalcMSLE(float3 a, float3 b)
{

// compute endpoints (min/max RGB bbox)
float3 blockMin = texels[ 0 ];
float3 blockMax = texels[ 0 ];
for ( uint i = 1; i < 16; ++i )
uint i;
for (i = 1; i < 16; ++i )
{
blockMin = min( blockMin, texels[ i ] );
blockMax = max( blockMax, texels[ i ] );

69
ScriptableRenderPipeline/Core/ShaderLibrary/BSDF.hlsl


// Specular BRDF
//-----------------------------------------------------------------------------
// With analytical light (not image based light) we clamp the minimun roughness in the NDF to avoid numerical instability.
#define UNITY_MIN_ROUGHNESS 0.002
float ClampRoughnessForAnalyticalLights(float roughness)
{
return max(roughness, UNITY_MIN_ROUGHNESS);
}
float a2 = roughness * roughness;
float f = (NdotH * a2 - NdotH) * NdotH + 1.0;
return a2 / (f * f);
float a2 = Sq(roughness);
float s = (NdotH * a2 - NdotH) * NdotH + 1.0;
return a2 / (s * s);
}
float D_GGX(float NdotH, float roughness)

// tan²(theta) = (1 - cos²(theta)) / cos²(theta) = 1 / cos²(theta) - 1.
// Assume that (VdotH > 0), e.i. (acos(LdotV) < Pi).
float a2 = roughness * roughness;
float z2 = NdotV * NdotV;
return 1 / (0.5 + 0.5 * sqrt(1.0 + a2 * (1.0 / z2 - 1.0)));
return 1.0 / (0.5 + 0.5 * sqrt(1.0 + Sq(roughness) * (1.0 / Sq(NdotV) - 1.0)));
}
// Ref: Understanding the Masking-Shadowing Function in Microfacet-Based BRDFs, p. 12.

// Precompute part of lambdaV
float GetSmithJointGGXPartLambdaV(float NdotV, float roughness)
{
float a2 = roughness * roughness;
float a2 = Sq(roughness);
return sqrt((-NdotV * a2 + NdotV) * NdotV + a2);
}

{
float a2 = roughness * roughness;
float a2 = Sq(roughness);
// Original formulation:
// lambda_v = (-1 + sqrt(a2 * (1 - NdotL2) / NdotL2 + 1)) * 0.5

// Inline D_GGX() * V_SmithJointGGX() together for better code generation.
float DV_SmithJointGGX(float NdotH, float NdotL, float NdotV, float roughness, float partLambdaV)
{
float a2 = roughness * roughness;
float f = (NdotH * a2 - NdotH) * NdotH + 1.0;
float2 D = float2(a2, f * f); // Fraction without the constant (1/Pi)
float a2 = Sq(roughness);
float s = (NdotH * a2 - NdotH) * NdotH + 1.0;
float2 G = float2(1, lambdaV + lambdaL); // Fraction without the constant (0.5)
float2 D = float2(a2, s * s); // Fraction without the multiplier (1/Pi)
float2 G = float2(1, lambdaV + lambdaL); // Fraction without the multiplier (1/2)
return (INV_PI * 0.5) * (D.x * G.x) / (D.y * G.y);
}

// roughnessB -> roughness in bitangent direction
float D_GGXAnisoNoPI(float TdotH, float BdotH, float NdotH, float roughnessT, float roughnessB)
{
float aT2 = roughnessT * roughnessT;
float aB2 = roughnessB * roughnessB;
float a2 = roughnessT * roughnessB;
float3 v = float3(roughnessB * TdotH, roughnessT * BdotH, a2 * NdotH);
float s = dot(v, v);
float f = TdotH * TdotH / aT2 + BdotH * BdotH / aB2 + NdotH * NdotH;
return 1.0 / (roughnessT * roughnessB * f * f);
return a2 * Sq(a2 / s);
}
float D_GGXAniso(float TdotH, float BdotH, float NdotH, float roughnessT, float roughnessB)

float GetSmithJointGGXAnisoPartLambdaV(float TdotV, float BdotV, float NdotV, float roughnessT, float roughnessB)
{
float aT2 = roughnessT * roughnessT;
float aB2 = roughnessB * roughnessB;
return sqrt(aT2 * TdotV * TdotV + aB2 * BdotV * BdotV + NdotV * NdotV);
return length(float3(roughnessT * TdotV, roughnessB * BdotV, NdotV));
}
// Note: V = G / (4 * NdotL * NdotV)

float aT2 = roughnessT * roughnessT;
float aB2 = roughnessB * roughnessB;
float lambdaL = NdotV * sqrt(aT2 * TdotL * TdotL + aB2 * BdotL * BdotL + NdotL * NdotL);
float lambdaL = NdotV * length(float3(roughnessT * TdotL, roughnessB * BdotL, NdotL));
return 0.5 / (lambdaV + lambdaL);
}

}
// Inline D_GGXAniso() * V_SmithJointGGXAniso() together for better code generation.
float DV_SmithJointGGXAniso(float TdotH, float BdotH, float NdotH,
float TdotV, float BdotV, float NdotV,
float DV_SmithJointGGXAniso(float TdotH, float BdotH, float NdotH, float NdotV,
float aT2 = roughnessT * roughnessT;
float aB2 = roughnessB * roughnessB;
float f = TdotH * TdotH / aT2 + BdotH * BdotH / aB2 + NdotH * NdotH;
float2 D = float2(1, roughnessT * roughnessB * f * f); // Fraction without the constant (1/Pi)
float a2 = roughnessT * roughnessB;
float3 v = float3(roughnessB * TdotH, roughnessT * BdotH, a2 * NdotH);
float s = dot(v, v);
float lambdaL = NdotV * sqrt(aT2 * TdotL * TdotL + aB2 * BdotL * BdotL + NdotL * NdotL);
float lambdaL = NdotV * length(float3(roughnessT * TdotL, roughnessB * BdotL, NdotL));
float2 G = float2(1, lambdaV + lambdaL); // Fraction without the constant (0.5)
float2 D = float2(a2 * a2 * a2, s * s); // Fraction without the multiplier (1/Pi)
float2 G = float2(1, lambdaV + lambdaL); // Fraction without the multiplier (1/2)
return (INV_PI * 0.5) * (D.x * G.x) / (D.y * G.y);
}

float roughnessT, float roughnessB)
{
float partLambdaV = GetSmithJointGGXAnisoPartLambdaV(TdotV, BdotV, NdotV, roughnessT, roughnessB);
return DV_SmithJointGGXAniso(TdotH, BdotH, NdotH,
TdotV, BdotV, NdotV,
TdotL, BdotL, NdotL,
return DV_SmithJointGGXAniso(TdotH, BdotH, NdotH, NdotV, TdotL, BdotL, NdotL,
roughnessT, roughnessB, partLambdaV);
}

25
ScriptableRenderPipeline/Core/ShaderLibrary/Color.hlsl


return dot(linearRgb, float3(0.2126729f, 0.7151522f, 0.0721750f));
}
// This function take a rgb color (best is to provide color in sRGB space)
// and return a YCoCg color in [0..1] space for 8bit (An offset is apply in the function)
#define CHROMA_BIAS (0.5 * 256.0 / 255.0)
#define YCOCG_CHROMA_BIAS (128.0 / 255.0)
YCoCg.y = dot(rgb, float3(0.5, 0.0, -0.5)) + CHROMA_BIAS;
YCoCg.z = dot(rgb, float3(-0.25, 0.5, -0.25)) + CHROMA_BIAS;
YCoCg.y = dot(rgb, float3(0.5, 0.0, -0.5)) + YCOCG_CHROMA_BIAS;
YCoCg.z = dot(rgb, float3(-0.25, 0.5, -0.25)) + YCOCG_CHROMA_BIAS;
return YCoCg;
}

float Y = YCoCg.x;
float Co = YCoCg.y - CHROMA_BIAS;
float Cg = YCoCg.z - CHROMA_BIAS;
float Co = YCoCg.y - YCOCG_CHROMA_BIAS;
float Cg = YCoCg.z - YCOCG_CHROMA_BIAS;
float3 rgb;
rgb.r = Y + Co - Cg;

return rgb;
}
// Following function can be use to reconstruct chroma component for a checkboard YCoCg pattern
// Reference: The Compact YCoCg Frame Buffer
float YCoCgCheckBoardEdgeFilter(float centerLum, float2 a0, float2 a1, float2 a2, float2 a3)
{
float4 lum = float4(a0.x, a1.x, a2.x, a3.x);
// Optimize: float4 w = 1.0 - step(30.0 / 255.0, abs(lum - centerLum));
float4 w = 1.0 - saturate((abs(lum.xxxx - centerLum) - 30.0 / 255.0) * HALF_MAX);
float W = w.x + w.y + w.z + w.w;
// handle the special case where all the weights are zero.
return (W == 0.0) ? a0.y : (w.x * a0.y + w.y* a1.y + w.z* a2.y + w.w * a3.y) / W;
}
#endif // UNITY_COLOR_INCLUDED

173
ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl


// ----------------------------------------------------------------------------
#ifndef INTRINSIC_BITFIELD_EXTRACT
// unsigned integer bit field extract implementation
// Unsigned integer bit field extraction.
// Note that the intrinsic itself generates a vector instruction.
// Wrap this function with WaveReadFirstLane() to get scalar output.
uint mask = 0xFFFFFFFFu >> (32u - numBits);
uint mask = UINT_MAX >> (32u - numBits);
bool IsBitSet(uint data, uint bitPos)
bool IsBitSet(uint data, uint offset)
{
return BitFieldExtract(data, 1u, offset) != 0;
}
void SetBit(inout uint data, uint offset)
{
data |= 1u << offset;
}
void ClearBit(inout uint data, uint offset)
return BitFieldExtract(data, 1u, bitPos) != 0;
data &= ~(1u << offset);
}
void ToggleBit(inout uint data, uint offset)
{
data ^= 1u << offset;
}
#ifndef INTRINSIC_WAVEREADFIRSTLANE

#endif // INTRINSIC_CUBEMAP_FACE_ID
// ----------------------------------------------------------------------------
// Common math definition and fastmath function
// Common math functions
#define PI 3.14159265359
#define TWO_PI 6.28318530718
#define FOUR_PI 12.56637061436
#define INV_PI 0.31830988618
#define INV_TWO_PI 0.15915494309
#define INV_FOUR_PI 0.07957747155
#define HALF_PI 1.57079632679
#define INV_HALF_PI 0.636619772367
#define INFINITY asfloat(0x7F800000)
#define LOG2_E 1.44269504089
#define FLT_EPSILON 1.192092896e-07 // Smallest positive number, such that 1.0 + FLT_EPSILON != 1.0
#define FLT_MIN 1.175494351e-38 // Minimum representable positive floating-point number
#define FLT_MAX 3.402823466e+38 // Maximum representable floating-point number
#define HFLT_MIN 0.00006103515625 // 2^14 it is the same for 10, 11 and 16bit float. ref: https://www.khronos.org/opengl/wiki/Small_Float_Formats
float DegToRad(float deg)
{

// Using pow often result to a warning like this
// "pow(f, e) will not work for negative f, use abs(f) or conditionally handle negative values if you expect them"
// PositivePow remove this warning when you know the value is positive and avoid inf/NAN.
TEMPLATE_2_FLT(PositivePow, base, power, return pow(max(abs(base), FLT_EPSILON), power))
TEMPLATE_2_FLT(PositivePow, base, power, return pow(max(abs(base), FLT_EPS), power))
// Computes (FastSign(s) * x) using 2x VALU.
// See the comment about FastSign() below.
float FastMulBySignOf(float s, float x, bool ignoreNegZero = true)
{
if (ignoreNegZero)
{
return (s >= 0) ? x : -x;
}
else
{
uint negZero = 0x80000000u;
uint signBit = negZero & asuint(s);
return asfloat(signBit ^ asuint(x));
}
}
// Ref: https://twitter.com/SebAaltonen/status/878250919879639040
// 2 mads (mad_sat and mad), faster than regular sign
float FastSign(float x)
// Returns -1 for negative numbers and 1 for positive numbers.
// 0 can be handled in 2 different ways.
// The IEEE floating point standard defines 0 as signed: +0 and -0.
// However, mathematics typically treats 0 as unsigned.
// Therefore, we treat -0 as +0 by default: FastSign(+0) = FastSign(-0) = 1.
// If (ignoreNegZero = false), FastSign(-0, false) = -1.
// Note that the sign() function in HLSL implements signum, which returns 0 for 0.
float FastSign(float s, bool ignoreNegZero = true)
return saturate(x * FLT_MAX) * 2.0 - 1.0;
return FastMulBySignOf(s, 1.0, ignoreNegZero);
}
// Orthonormalizes the tangent frame using the Gram-Schmidt process.

// Z buffer to linear depth.
// Correctly handles oblique view frustums. Only valid for projection matrices!
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.
float LinearEyeDepth(float2 positionSS, float depthRaw, float4 invProjParam)
float LinearEyeDepth(float2 positionNDC, float deviceDepth, float4 invProjParam)
float4 positionCS = float4(positionSS * 2.0 - 1.0, depthRaw, 1.0);
float4 positionCS = float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
}
// Z buffer to linear depth.
// Correctly handles oblique view frustums.
// Typically, this is the cheapest variant, provided you've already computed 'positionWS'.
float LinearEyeDepth(float3 positionWS, float4x4 viewProjMatrix)
{
return mul(viewProjMatrix, float4(positionWS, 1.0)).w;
}
// ----------------------------------------------------------------------------

// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float2 ComputeScreenSpacePosition(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 ComputeNormalizedDeviceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 positionSS = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
float2 positionNDC = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return positionSS;
return positionNDC;
float4 ComputeClipSpacePosition(float2 positionSS, float depthRaw)
float4 ComputeClipSpacePosition(float2 positionNDC, float deviceDepth)
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return float4(positionSS * 2.0 - 1.0, depthRaw, 1.0);
return float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float3 ComputeViewSpacePosition(float2 positionSS, float depthRaw, float4x4 invProjMatrix)
float3 ComputeViewSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, depthRaw);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;

float3 ComputeWorldSpacePosition(float2 positionSS, float depthRaw, float4x4 invViewProjMatrix)
float3 ComputeWorldSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invViewProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, depthRaw);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 hpositionWS = mul(invViewProjMatrix, positionCS);
return hpositionWS.xyz / hpositionWS.w;
}

struct PositionInputs
{
// Normalize screen position (offset by 0.5)
float2 positionSS;
// Unormalize screen position (offset by 0.5)
uint2 unPositionSS;
uint2 unTileCoord;
float depthRaw; // raw depth from depth buffer
float depthVS;
float3 positionWS;
float3 positionWS; // World space position (could be camera-relative)
float2 positionNDC; // Normalized screen UVs : [0, 1) (with the half-pixel offset)
uint2 positionSS; // Screen space pixel coordinates : [0, NumPixels)
uint2 tileCoord; // Screen tile coordinates : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1] (typically reversed)
float linearDepth; // View space Z coordinate : [Near, Far]
// If a compute shader call this function unPositionSS is an integer usually calculate like: uint2 unPositionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
// If a compute shader call this function positionSS is an integer usually calculate like: uint2 positionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize, uint2 unTileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize, uint2 tileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
posInput.positionSS = unPositionSS;
posInput.positionNDC = positionSS;
posInput.positionSS.xy += float2(0.5, 0.5);
posInput.positionNDC.xy += float2(0.5, 0.5);
posInput.positionSS *= invScreenSize;
posInput.positionNDC *= invScreenSize;
posInput.unPositionSS = uint2(unPositionSS);
posInput.unTileCoord = unTileCoord;
posInput.positionSS = uint2(positionSS);
posInput.tileCoord = tileCoord;
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize)
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize)
return GetPositionInput(unPositionSS, invScreenSize, uint2(0, 0));
return GetPositionInput(positionSS, invScreenSize, uint2(0, 0));
// depthRaw and depthVS come directly form .zw of SV_Position
void UpdatePositionInput(float depthRaw, float depthVS, float3 positionWS, inout PositionInputs posInput)
// deviceDepth and linearDepth come directly from .zw of SV_Position
void UpdatePositionInput(float deviceDepth, float linearDepth, float3 positionWS, inout PositionInputs posInput)
posInput.depthRaw = depthRaw;
posInput.depthVS = depthVS;
posInput.positionWS = positionWS;
posInput.deviceDepth = deviceDepth;
posInput.linearDepth = linearDepth;
posInput.positionWS = positionWS;
void UpdatePositionInput(float depthRaw, float4x4 invViewProjMatrix, float4x4 viewProjMatrix, inout PositionInputs posInput)
void UpdatePositionInput(float deviceDepth, float4x4 invViewProjMatrix, float4x4 viewProjMatrix, inout PositionInputs posInput)
posInput.depthRaw = depthRaw;
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionSS, depthRaw, invViewProjMatrix);
posInput.deviceDepth = deviceDepth;
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionNDC, deviceDepth, invViewProjMatrix);
posInput.depthVS = mul(viewProjMatrix, float4(posInput.positionWS, 1.0)).w;
posInput.linearDepth = mul(viewProjMatrix, float4(posInput.positionWS, 1.0)).w;
}
// The view direction 'V' points towards the camera.

posInput.positionWS += depthOffsetVS * (-V);
float4 positionCS = mul(viewProjMatrix, float4(posInput.positionWS, 1.0));
posInput.depthVS = positionCS.w;
posInput.depthRaw = positionCS.z / positionCS.w;
float4 positionCS = mul(viewProjMatrix, float4(posInput.positionWS, 1.0));
posInput.linearDepth = positionCS.w;
posInput.deviceDepth = positionCS.z / positionCS.w;
}
// ----------------------------------------------------------------------------

// LOD dithering transition helper
// LOD0 must use this function with ditherFactor 1..0
// LOD1 must use this function with ditherFactor 0..1
void LODDitheringTransition(uint2 unPositionSS, float ditherFactor)
void LODDitheringTransition(uint2 positionSS, float ditherFactor)
float p = GenerateHashedRandomFloat(unPositionSS);
float p = GenerateHashedRandomFloat(positionSS);
// We want to have a symmetry between 0..0.5 ditherFactor and 0.5..1 so no pixels are transparent during the transition
// this is handled by this test which reverse the pattern

#endif // UNITY_COMMON_INCLUDED

15
ScriptableRenderPipeline/Core/ShaderLibrary/CommonLighting.hlsl


// These clamping function to max of floating point 16 bit are use to prevent INF in code in case of extreme value
float ClampToFloat16Max(float value)
{
return min(value, 65504.0);
return min(value, HALF_MAX);
return min(value, 65504.0);
return min(value, HALF_MAX);
return min(value, 65504.0);
return min(value, HALF_MAX);
return min(value, 65504.0);
return min(value, HALF_MAX);
}
// Ligthing convention

float3 localX = float3(c * x * a - 1, sz * b, c);
float3 localY = float3(b, y * ya - sz, y);
return float3x3(localX, localY, localZ);
}
float3x3 GetLocalFrame(float3 localZ, float3 localX)
{
float3 localY = cross(localZ, localX);
return float3x3(localX, localY, localZ);
}

49
ScriptableRenderPipeline/Core/ShaderLibrary/CommonMaterial.hlsl


#define UNITY_COMMON_MATERIAL_INCLUDED
//-----------------------------------------------------------------------------
// Helper function for anisotropy
//-----------------------------------------------------------------------------
void ConvertAnisotropyToRoughness(float roughness, float anisotropy, out float roughnessT, out float roughnessB)
{
// Use the parametrization of Sony Imageworks.
// Ref: Revisiting Physically Based Shading at Imageworks, p. 15.
roughnessT = roughness * (1 + anisotropy);
roughnessB = roughness * (1 - anisotropy);
}
//-----------------------------------------------------------------------------
// Helper function for perceptual roughness
// Helper functions for roughness
//-----------------------------------------------------------------------------
float PerceptualRoughnessToRoughness(float perceptualRoughness)

float PerceptualSmoothnessToPerceptualRoughness(float perceptualSmoothness)
{
return (1.0 - perceptualSmoothness);
}
// Using roughness values of 0 leads to INFs and NANs. The only sensible place to use the roughness
// value of 0 is IBL, so we do not modify the perceptual roughness which is used to select the MIP map level.
// Note: making the constant too small results in aliasing.
float ClampRoughnessForAnalyticalLights(float roughness)
{
return max(roughness, 1.0/1024.0);
}
// 'bsdfData.roughnessT' and 'bsdfData.roughnessB' are clamped, and are meant to be used with analytical lights.
// 'bsdfData.perceptualRoughness' is not clamped, and is meant to be used for IBL.
// If IBL needs the linear roughness value for some reason, it can be computed as follows:
// float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
void ConvertAnisotropyToRoughness(float perceptualRoughness, float anisotropy, out float roughnessT, out float roughnessB)
{
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
// Use the parametrization of Sony Imageworks.
// Ref: Revisiting Physically Based Shading at Imageworks, p. 15.
roughnessT = roughness * (1 + anisotropy);
roughnessB = roughness * (1 - anisotropy);
roughnessT = ClampRoughnessForAnalyticalLights(roughnessT);
roughnessB = ClampRoughnessForAnalyticalLights(roughnessB);
}
// ----------------------------------------------------------------------------

// It can be accomplished by reading the stencil buffer.
// A faster solution (which avoids an extra texture fetch) is to simply make sure that
// all pixels which belong to an SSS material are not black (those that don't always are).
// We choose the blue color channel since it's perceptually the least noticeable.
subsurfaceLighting.r = max(subsurfaceLighting.r, HFLT_MIN);
subsurfaceLighting.b = max(subsurfaceLighting.b, HALF_MIN);
return subsurfaceLighting;
}

return subsurfaceLighting.r > 0;
return subsurfaceLighting.b > 0;
// MACRO from Legacy Untiy
// Transforms 2D UV by scale/bias property
#define TRANSFORM_TEX(tex, name) ((tex.xy) * name##_ST.xy + name##_ST.zw)
#define GET_TEXELSIZE_NAME(name) (name##_TexelSize)
#endif // UNITY_COMMON_MATERIAL_INCLUDED

6
ScriptableRenderPipeline/Core/ShaderLibrary/ImageBasedLighting.hlsl


float m = PerceptualRoughnessToRoughness(perceptualRoughness);
// Remap to spec power. See eq. 21 in --> https://dl.dropboxusercontent.com/u/55891920/papers/mm_brdf.pdf
float n = (2.0 / max(FLT_EPSILON, m * m)) - 2.0;
float n = (2.0 / max(FLT_EPS, m * m)) - 2.0;
n /= (4.0 * max(NdotR, FLT_EPSILON));
n /= (4.0 * max(NdotR, FLT_EPS));
// remap back to square root of real roughness (0.25 include both the sqrt root of the conversion and sqrt for going from roughness to perceptualRoughness)
perceptualRoughness = pow(2.0 / (n + 2.0), 0.25);

}
// Prevent NaNs arising from the division of 0 by 0.
cbsdfInt = max(cbsdfInt, FLT_MIN);
cbsdfInt = max(cbsdfInt, FLT_EPS);
return float4(lightInt / cbsdfInt, 1.0);
}

30
ScriptableRenderPipeline/Core/ShaderLibrary/Macros.hlsl


#define SAMPLE_TEXTURECUBE_ARRAY_LOD_ABSTRACT(textureName, samplerName, coord3, index, lod) SAMPLE_TEXTURECUBE_ARRAY_LOD(textureName, samplerName, coord3, index, lod)
#endif
#define PI 3.14159265358979323846
#define TWO_PI 6.28318530717958647693
#define FOUR_PI 12.5663706143591729538
#define INV_PI 0.31830988618379067154
#define INV_TWO_PI 0.15915494309189533577
#define INV_FOUR_PI 0.07957747154594766788
#define HALF_PI 1.57079632679489661923
#define INV_HALF_PI 0.63661977236758134308
#define LOG2_E 1.44269504088896340736
#define INFINITY asfloat(0x7F800000)
#define MILLIMETERS_PER_METER 1000
#define METERS_PER_MILLIMETER rcp(MILLIMETERS_PER_METER)
#define CENTIMETERS_PER_METER 100
#define METERS_PER_CENTIMETER rcp(CENTIMETERS_PER_METER)
#define FLT_EPS 5.960464478e-8 // 2^-24, machine epsilon: 1 + EPS = 1 (half of the ULP for 1)
#define FLT_MIN 1.175494351e-38 // Minimum representable positive floating-point number
#define FLT_MAX 3.402823466e+38 // Maximum representable floating-point number
#define FLT_NAN asfloat(0xFFFFFFFF)
#define HALF_MIN 6.103515625e-5 // 2^-14, the same value for 10, 11 and 16-bit: https://www.khronos.org/opengl/wiki/Small_Float_Formats
#define HALF_MAX 65504.0
#define UINT_MAX 0xFFFFFFFFu
#define TEMPLATE_1_FLT(FunctionName, Parameter1, FunctionBody) \
float FunctionName(float Parameter1) { FunctionBody; } \
float2 FunctionName(float2 Parameter1) { FunctionBody; } \

void FunctionName(inout bool2 a, inout bool2 b) { bool2 t = a; a = b; b = t; } \
void FunctionName(inout bool3 a, inout bool3 b) { bool3 t = a; a = b; b = t; } \
void FunctionName(inout bool4 a, inout bool4 b) { bool4 t = a; a = b; b = t; }
// MACRO from Legacy Untiy
// Transforms 2D UV by scale/bias property
#define TRANSFORM_TEX(tex, name) ((tex.xy) * name##_ST.xy + name##_ST.zw)
#define GET_TEXELSIZE_NAME(name) (name##_TexelSize)
#endif // UNITY_MACROS_INCLUDED

2
ScriptableRenderPipeline/Core/ShaderLibrary/NormalSurfaceGradient.hlsl


float3 SurfaceGradientFromPerturbedNormal(float3 nrmVertexNormal, float3 v)
{
float3 n = nrmVertexNormal;
float s = 1.0 / max(FLT_EPSILON, abs(dot(n, v)));
float s = 1.0 / max(FLT_EPS, abs(dot(n, v)));
return s * (dot(n, v) * n - v);
}

6
ScriptableRenderPipeline/Core/ShaderLibrary/Packing.hlsl


// Packs an integer stored using at most 'numBits' into a [0..1] float.
float PackInt(uint i, uint numBits)
{
uint maxInt = 0xFFFFFFFFu >> (32u - numBits);
uint maxInt = UINT_MAX >> (32u - numBits);
return saturate(i * rcp(maxInt));
}

uint maxInt = 0xFFFFFFFFu >> (32u - numBits);
uint maxInt = UINT_MAX >> (32u - numBits);
return (uint)(f * maxInt + 0.5); // Round instead of truncating
}

float UnpackUIntToFloat(uint src, uint numBits, uint offset)
{
uint maxInt = 0xFFFFFFFFu >> (32u - numBits);
uint maxInt = UINT_MAX >> (32u - numBits);
return float(BitFieldExtract(src, numBits, offset)) * rcp(maxInt);
}

8
ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl


// shadow sampling prototypes
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS );
#include "ShadowSampling.hlsl" // sampling patterns (don't modify)
#include "ShadowAlgorithms.hlsl" // engine default algorithms (don't modify)

return EvalShadow_PunctualDepth(shadowContext, positionWS, normalWS, shadowDataIndex, L);
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_CascadedDepth_Blend( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

6
ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl


float3 GetScreenSpaceTessFactor(float3 p0, float3 p1, float3 p2, float4x4 viewProjectionMatrix, float4 screenSize, float triangleSize)
{
// Get screen space adaptive scale factor
float2 edgeScreenPosition0 = ComputeScreenSpacePosition(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeScreenSpacePosition(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeScreenSpacePosition(p2, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition0 = ComputeNormalizedDeviceCoordinates(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeNormalizedDeviceCoordinates(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeNormalizedDeviceCoordinates(p2, viewProjectionMatrix) * screenSize.xy;
float EdgeScale = 1.0 / triangleSize; // Edge size in reality, but name is simpler
float3 tessFactor;

2
ScriptableRenderPipeline/Core/ShaderLibrary/VolumeRendering.hlsl


// Absorption coefficient from Disney: http://blog.selfshadow.com/publications/s2015-shading-course/burley/s2015_pbs_disney_bsdf_notes.pdf
float3 TransmittanceColorAtDistanceToAbsorption(float3 transmittanceColor, float atDistance)
{
return -log(transmittanceColor + FLT_EPSILON) / max(atDistance, FLT_EPSILON);
return -log(transmittanceColor + FLT_EPS) / max(atDistance, FLT_EPS);
}

146
ScriptableRenderPipeline/Core/TextureCache.cs


using UnityEngine;
using System.Collections.Generic;
using UnityEngine.Rendering;
#if UNITY_EDITOR
using UnityEditor;
#endif

{
private Texture2DArray m_Cache;
public override void TransferToSlice(int sliceIndex, Texture texture)
public override void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
{
var mismatch = (m_Cache.width != texture.width) || (m_Cache.height != texture.height);

if (mismatch)
{
if (!UnityEngine.Graphics.ConvertTexture(texture, 0, m_Cache, sliceIndex))
{
Debug.LogErrorFormat(texture, "Unable to convert texture \"{0}\" to match renderloop settings ({1}x{2} {3})",
texture.name, m_Cache.width, m_Cache.height, m_Cache.format);
}
cmd.ConvertTexture(texture, 0, m_Cache, sliceIndex);
UnityEngine.Graphics.CopyTexture(texture, 0, m_Cache, sliceIndex);
cmd.CopyTexture(texture, 0, m_Cache, sliceIndex);
}
}

private int m_CubeMipLevelPropName;
private int m_cubeSrcTexPropName;
public override void TransferToSlice(int sliceIndex, Texture texture)
public override void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
TransferToPanoCache(sliceIndex, texture);
TransferToPanoCache(cmd, sliceIndex, texture);
else
{
var mismatch = (m_Cache.width != texture.width) || (m_Cache.height != texture.height);

if (mismatch)
{
bool failed = false;
if (!UnityEngine.Graphics.ConvertTexture(texture, f, m_Cache, 6 * sliceIndex + f))
{
failed = true;
break;
}
}
if (failed)
{
Debug.LogErrorFormat(texture, "Unable to convert texture \"{0}\" to match renderloop settings ({1}x{2} {3})",
texture.name, m_Cache.width, m_Cache.height, m_Cache.format);
cmd.ConvertTexture(texture, f, m_Cache, 6 * sliceIndex + f);
UnityEngine.Graphics.CopyTexture(texture, f, m_Cache, 6 * sliceIndex + f);
cmd.CopyTexture(texture, f, m_Cache, 6 * sliceIndex + f);
}
}
}

Texture.DestroyImmediate(m_Cache);
}
private void TransferToPanoCache(int sliceIndex, Texture texture)
private void TransferToPanoCache(CommandBuffer cmd, int sliceIndex, Texture texture)
UnityEngine.Graphics.SetRenderTarget(m_StagingRTs[m]);
UnityEngine.Graphics.Blit(null, m_CubeBlitMaterial, 0);
cmd.Blit(null, m_StagingRTs[m], m_CubeBlitMaterial, 0);
UnityEngine.Graphics.CopyTexture(m_StagingRTs[m], 0, 0, m_CacheNoCubeArray, sliceIndex, m);
cmd.CopyTexture(m_StagingRTs[m], 0, 0, m_CacheNoCubeArray, sliceIndex, m);
}
}

}
}
public static TextureFormat GetPreferredHdrCompressedTextureFormat
public static TextureFormat GetPreferredHDRCompressedTextureFormat
{
get
{

// On editor the texture is uncompressed when operating against mobile build targets
// // On editor the texture is uncompressed when operating against mobile build targets
//#if UNITY_2017_2_OR_NEWER
if (SystemInfo.SupportsTextureFormat(probeFormat) && !UnityEngine.Rendering.GraphicsSettings.HasShaderDefine(UnityEngine.Rendering.BuiltinShaderDefine.UNITY_NO_DXT5nm))
format = probeFormat;

{
public uint texId;
public uint countLRU;
#if UNITY_EDITOR
public Hash128 hash;
#endif
public uint sliceEntryHash;
};
private int m_NumTextures;

private static uint g_MaxFrameCount = unchecked((uint)(-1));
private static uint g_InvalidTexID = (uint)0;
public int FetchSlice(Texture texture, bool forceReinject=false)
public uint GetTextureHash(Texture texture)
var sliceIndex = -1;
uint textureHash = texture.updateCount;
// For baked probes in the editor we need to factor in the actual hash of texture because we can't increment the update count of a texture that's baked on the disk.
// This code leaks logic from reflection probe baking into the texture cache which is not good... TODO: Find a way to do that outside of the texture cache.
#if UNITY_EDITOR
textureHash += (uint)texture.imageContentsHash.GetHashCode();
#endif
return textureHash;
}
public int ReserveSlice(Texture texture, out bool needUpdate)
{
needUpdate = false;
return sliceIndex;
return -1;
#if UNITY_EDITOR
var hash = texture.imageContentsHash;
#endif
//assert(TexID!=g_InvalidTexID);
if (texId == g_InvalidTexID) return 0;
var bSwapSlice = forceReinject;
var bFoundAvailOrExistingSlice = false;
if (texId == g_InvalidTexID)
return -1;
int cachedSlice;
if (m_LocatorInSliceArray.TryGetValue(texId, out cachedSlice))
var sliceIndex = -1;
var foundIndex = -1;
if (m_LocatorInSliceArray.TryGetValue(texId, out foundIndex))
sliceIndex = cachedSlice;
sliceIndex = foundIndex;
var textureHash = GetTextureHash(texture);
needUpdate |= (m_SliceArray[sliceIndex].sliceEntryHash != textureHash);
bFoundAvailOrExistingSlice = true;
#if UNITY_EDITOR
bSwapSlice = bSwapSlice || (m_SliceArray[sliceIndex].hash != hash);
#endif
if (!bFoundAvailOrExistingSlice)
if(sliceIndex == -1)
{
// look for first non zero entry. Will by the least recently used entry
// since the array was pre-sorted (in linear time) in NewFrame()

{
idx = m_SortedIdxArray[j];
if (m_SliceArray[idx].countLRU == 0) ++j; // if entry already snagged by a new texture in this frame then ++j
else bFound = true;
if (m_SliceArray[idx].countLRU == 0)
++j; // if entry already snagged by a new texture in this frame then ++j
else
bFound = true;
needUpdate = true;
// if we are replacing an existing entry delete it from m_locatorInSliceArray.
if (m_SliceArray[idx].texId != g_InvalidTexID)
{

m_SliceArray[idx].texId = texId;
sliceIndex = idx;
bFoundAvailOrExistingSlice = true;
bSwapSlice = true;
// wrap up
Debug.Assert(bFoundAvailOrExistingSlice, "The texture cache doesn't have enough space to store all textures. Please either increase the size of the texture cache, or use fewer unique textures.");
if (bFoundAvailOrExistingSlice)
if(sliceIndex != -1)
}
return sliceIndex;
}
if (bSwapSlice) // if this was a miss
// In case the texture content with which we update the cache is not the input texture, we need to provide the right update count.
public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content, uint textureHash)
#if UNITY_EDITOR
m_SliceArray[sliceIndex].hash = hash;
#endif
// transfer new slice to sliceIndex from source texture
SetSliceHash(sliceIndex, textureHash);
TransferToSlice(cmd, sliceIndex, content);
}
// transfer new slice to sliceIndex from source texture
TransferToSlice(sliceIndex, texture);
public void SetSliceHash(int sliceIndex, uint hash)
{
// transfer new slice to sliceIndex from source texture
m_SliceArray[sliceIndex].sliceEntryHash = hash;
}
public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content)
{
UpdateSlice(cmd, sliceIndex, content, GetTextureHash(content));
public int FetchSlice(CommandBuffer cmd, Texture texture, bool forceReinject=false)
{
bool needUpdate = false;
var sliceIndex = ReserveSlice(texture, out needUpdate);
var bSwapSlice = forceReinject || needUpdate;
// wrap up
Debug.Assert(sliceIndex != -1, "The texture cache doesn't have enough space to store all textures. Please either increase the size of the texture cache, or use fewer unique textures.");
if (sliceIndex != -1 && bSwapSlice)
{
UpdateSlice(cmd, sliceIndex, texture);
}
return sliceIndex;

m_NumMipLevels = 0;
}
public virtual void TransferToSlice(int sliceIndex, Texture texture)
public virtual void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
{
}

410
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs


using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine;
using System;

public enum DebugLightingMode
public enum FullScreenDebugMode
DiffuseLighting,
SpecularLighting,
VisualizeCascade,
IndirectDiffuseOcclusionFromSsao,
IndirectDiffuseGtaoFromSsao,
IndirectSpecularOcclusionFromSsao,
IndirectSpecularGtaoFromSsao
// Lighting
MinLightingFullScreenDebug,
SSAO,
DeferredShadows,
PreRefractionColorPyramid,
DepthPyramid,
FinalColorPyramid,
MaxLightingFullScreenDebug,
// Rendering
MinRenderingFullScreenDebug,
MotionVectors,
NanTracker,
MaxRenderingFullScreenDebug
}
public class DebugDisplaySettings

public LightingDebugSettings lightingDebugSettings = new LightingDebugSettings();
public RenderingDebugSettings renderingDebugSettings = new RenderingDebugSettings();
private static bool isDebugViewMaterialInit = false;
public static GUIContent[] debugViewMaterialStrings = null;
public static int[] debugViewMaterialValues = null;
public static GUIContent[] debugViewEngineStrings = null;
public static int[] debugViewEngineValues = null;
public static GUIContent[] debugViewMaterialVaryingStrings = null;
public static int[] debugViewMaterialVaryingValues = null;
public static GUIContent[] debugViewMaterialPropertiesStrings = null;
public static int[] debugViewMaterialPropertiesValues = null;
public static GUIContent[] debugViewMaterialGBufferStrings = null;
public static int[] debugViewMaterialGBufferValues = null;
BuildDebugRepresentation();
FillFullScreenDebugEnum(ref lightingFullScreenDebugStrings, ref lightingFullScreenDebugValues, FullScreenDebugMode.MinLightingFullScreenDebug, FullScreenDebugMode.MaxLightingFullScreenDebug);
FillFullScreenDebugEnum(ref renderingFullScreenDebugStrings, ref renderingFullScreenDebugValues, FullScreenDebugMode.MinRenderingFullScreenDebug, FullScreenDebugMode.MaxRenderingFullScreenDebug);
}
public int GetDebugMaterialIndex()

DebugMenuManager.instance.AddDebugItem<float>("Display Stats", "Frame Rate", () => 1.0f / Time.smoothDeltaTime, null, DebugItemFlag.DynamicDisplay);
DebugMenuManager.instance.AddDebugItem<float>("Display Stats", "Frame Time (ms)", () => Time.smoothDeltaTime * 1000.0f, null, DebugItemFlag.DynamicDisplay);
DebugMenuManager.instance.AddDebugItem<int>("Material", "Material",() => materialDebugSettings.debugViewMaterial, (value) => SetDebugViewMaterial((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewMaterialStrings, DebugDisplaySettings.debugViewMaterialValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "Engine",() => materialDebugSettings.debugViewEngine, (value) => SetDebugViewEngine((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewEngineStrings, DebugDisplaySettings.debugViewEngineValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "Material",() => materialDebugSettings.debugViewMaterial, (value) => SetDebugViewMaterial((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(MaterialDebugSettings.debugViewMaterialStrings, MaterialDebugSettings.debugViewMaterialValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "Engine",() => materialDebugSettings.debugViewEngine, (value) => SetDebugViewEngine((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(MaterialDebugSettings.debugViewEngineStrings, MaterialDebugSettings.debugViewEngineValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "GBuffer",() => materialDebugSettings.debugViewGBuffer, (value) => SetDebugViewGBuffer((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(DebugDisplaySettings.debugViewMaterialGBufferStrings, DebugDisplaySettings.debugViewMaterialGBufferValues));
DebugMenuManager.instance.AddDebugItem<int>("Material", "GBuffer",() => materialDebugSettings.debugViewGBuffer, (value) => SetDebugViewGBuffer((int)value), DebugItemFlag.None, new DebugItemHandlerIntEnum(MaterialDebugSettings.debugViewMaterialGBufferStrings, MaterialDebugSettings.debugViewMaterialGBufferValues));
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, bool>(kEnableShadowDebug, () => lightingDebugSettings.enableShadows, (value) => lightingDebugSettings.enableShadows = (bool)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, ShadowMapDebugMode>(kShadowDebugMode, () => lightingDebugSettings.shadowDebugMode, (value) => lightingDebugSettings.shadowDebugMode = (ShadowMapDebugMode)value);

DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, Color>(kDebugLightingAlbedo, () => lightingDebugSettings.debugLightingAlbedo, (value) => lightingDebugSettings.debugLightingAlbedo = (Color)value);
DebugMenuManager.instance.AddDebugItem<bool>("Lighting", kDisplaySkyReflectionDebug, () => lightingDebugSettings.displaySkyReflection, (value) => lightingDebugSettings.displaySkyReflection = (bool)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, float>(kSkyReflectionMipmapDebug, () => lightingDebugSettings.skyReflectionMipmap, (value) => lightingDebugSettings.skyReflectionMipmap = (float)value, DebugItemFlag.None, new DebugItemHandlerFloatMinMax(0.0f, 1.0f));
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, TilePass.TileSettings.TileClusterDebug>(kTileClusterDebug,() => lightingDebugSettings.tileClusterDebug, (value) => lightingDebugSettings.tileClusterDebug = (TilePass.TileSettings.TileClusterDebug)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, TilePass.TileSettings.TileClusterCategoryDebug>(kTileClusterCategoryDebug,() => lightingDebugSettings.tileClusterDebugByCategory, (value) => lightingDebugSettings.tileClusterDebugByCategory = (TilePass.TileSettings.TileClusterCategoryDebug)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, LightLoopSettings.TileClusterDebug>(kTileClusterDebug,() => lightingDebugSettings.tileClusterDebug, (value) => lightingDebugSettings.tileClusterDebug = (LightLoopSettings.TileClusterDebug)value);
DebugMenuManager.instance.AddDebugItem<LightingDebugPanel, LightLoopSettings.TileClusterCategoryDebug>(kTileClusterCategoryDebug,() => lightingDebugSettings.tileClusterDebugByCategory, (value) => lightingDebugSettings.tileClusterDebugByCategory = (LightLoopSettings.TileClusterCategoryDebug)value);
DebugMenuManager.instance.AddDebugItem<bool>("Rendering", "Display Opaque",() => renderingDebugSettings.displayOpaqueObjects, (value) => renderingDebugSettings.displayOpaqueObjects = (bool)value);
DebugMenuManager.instance.AddDebugItem<bool>("Rendering", "Display Transparency",() => renderingDebugSettings.displayTransparentObjects, (value) => renderingDebugSettings.displayTransparentObjects = (bool)value);

lightingDebugSettings.OnValidate();
}
// className include the additional "/"
void FillWithProperties(Type type, GUIContent[] debugViewMaterialStrings, int[] debugViewMaterialValues, string className, ref int index)
{
var attributes = type.GetCustomAttributes(true);
// Get attribute to get the start number of the value for the enum
var attr = attributes[0] as GenerateHLSL;
if (!attr.needParamDebug)
{
return;
}
var fields = type.GetFields();
var localIndex = 0;
foreach (var field in fields)
{
var fieldName = field.Name;
// Check if the display name have been override by the users
if (Attribute.IsDefined(field, typeof(SurfaceDataAttributes)))
{
var propertyAttr = (SurfaceDataAttributes[])field.GetCustomAttributes(typeof(SurfaceDataAttributes), false);
if (propertyAttr[0].displayName != "")
{
fieldName = propertyAttr[0].displayName;
}
}
fieldName = className + fieldName;
debugViewMaterialStrings[index] = new GUIContent(fieldName);
debugViewMaterialValues[index] = attr.paramDefinesStart + (int)localIndex;
index++;
localIndex++;
}
}
void FillWithPropertiesEnum(Type type, GUIContent[] debugViewMaterialStrings, int[] debugViewMaterialValues, string prefix, ref int index)
{
var names = Enum.GetNames(type);
var localIndex = 0;
foreach (var value in Enum.GetValues(type))
{
var valueName = prefix + names[localIndex];
debugViewMaterialStrings[index] = new GUIContent(valueName);
debugViewMaterialValues[index] = (int)value;
index++;
localIndex++;
}
}
public class MaterialItem
{
public String className;
public Type surfaceDataType;
public Type bsdfDataType;
};
void BuildDebugRepresentation()
{
if (!isDebugViewMaterialInit)
{
List<RenderPipelineMaterial> materialList = HDUtils.GetRenderPipelineMaterialList();
// TODO: Share this code to retrieve deferred material with HDRenderPipeline
// Find first material that have non 0 Gbuffer count and assign it as deferredMaterial
Type bsdfDataDeferredType = null;
foreach (RenderPipelineMaterial material in materialList)
{
if (material.GetMaterialGBufferCount() > 0)
{
bsdfDataDeferredType = material.GetType().GetNestedType("BSDFData");
}
}
// TODO: Handle the case of no Gbuffer material
Debug.Assert(bsdfDataDeferredType != null);
List<MaterialItem> materialItems = new List<MaterialItem>();
int numSurfaceDataFields = 0;
int numBSDFDataFields = 0;
foreach (RenderPipelineMaterial material in materialList)
{
MaterialItem item = new MaterialItem();
item.className = material.GetType().Name + "/";
item.surfaceDataType = material.GetType().GetNestedType("SurfaceData");
numSurfaceDataFields += item.surfaceDataType.GetFields().Length;
item.bsdfDataType = material.GetType().GetNestedType("BSDFData");
numBSDFDataFields += item.bsdfDataType.GetFields().Length;
materialItems.Add(item);
}
// Material properties debug
var num = typeof(Builtin.BuiltinData).GetFields().Length * materialList.Count // BuildtinData are duplicated for each material
+ numSurfaceDataFields + 1; // +1 for None case
debugViewMaterialStrings = new GUIContent[num];
debugViewMaterialValues = new int[num];
// Special case for None since it cannot be inferred from SurfaceData/BuiltinData
debugViewMaterialStrings[0] = new GUIContent("None");
debugViewMaterialValues[0] = 0;
var index = 1;
// 0 is a reserved number and should not be used (allow to track error)
foreach (MaterialItem item in materialItems)
{
// BuiltinData are duplicated for each material
FillWithProperties(typeof(Builtin.BuiltinData), debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index);
FillWithProperties(item.surfaceDataType, debugViewMaterialStrings, debugViewMaterialValues, item.className, ref index);
}
// Engine properties debug
num = numBSDFDataFields + 1; // +1 for None case
debugViewEngineStrings = new GUIContent[num];
debugViewEngineValues = new int[num];
// 0 is a reserved number and should not be used (allow to track error)
debugViewEngineStrings[0] = new GUIContent("None");
debugViewEngineValues[0] = 0;
index = 1;
foreach (MaterialItem item in materialItems)
{
FillWithProperties(item.bsdfDataType, debugViewEngineStrings, debugViewEngineValues, item.className, ref index);
}
// Attributes debug
var varyingNames = Enum.GetNames(typeof(Attributes.DebugViewVarying));
debugViewMaterialVaryingStrings = new GUIContent[varyingNames.Length];
debugViewMaterialVaryingValues = new int[varyingNames.Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewVarying), debugViewMaterialVaryingStrings, debugViewMaterialVaryingValues, "", ref index);
// Properties debug
var propertiesNames = Enum.GetNames(typeof(Attributes.DebugViewProperties));
debugViewMaterialPropertiesStrings = new GUIContent[propertiesNames.Length];
debugViewMaterialPropertiesValues = new int[propertiesNames.Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewProperties), debugViewMaterialPropertiesStrings, debugViewMaterialPropertiesValues, "", ref index);
// Gbuffer debug
var gbufferNames = Enum.GetNames(typeof(Attributes.DebugViewGbuffer));
debugViewMaterialGBufferStrings = new GUIContent[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length];
debugViewMaterialGBufferValues = new int[gbufferNames.Length + bsdfDataDeferredType.GetFields().Length];
index = 0;
FillWithPropertiesEnum(typeof(Attributes.DebugViewGbuffer), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index);
FillWithProperties(typeof(Lit.BSDFData), debugViewMaterialGBufferStrings, debugViewMaterialGBufferValues, "", ref index);
// Lighting Full Screen Debug
FillFullScreenDebugEnum(ref lightingFullScreenDebugStrings, ref lightingFullScreenDebugValues, FullScreenDebugMode.MinLightingFullScreenDebug, FullScreenDebugMode.MaxLightingFullScreenDebug);
FillFullScreenDebugEnum(ref renderingFullScreenDebugStrings, ref renderingFullScreenDebugValues, FullScreenDebugMode.MinRenderingFullScreenDebug, FullScreenDebugMode.MaxRenderingFullScreenDebug);
isDebugViewMaterialInit = true;
}
}
void FillFullScreenDebugEnum(ref GUIContent[] strings, ref int[] values, FullScreenDebugMode min, FullScreenDebugMode max)
{
int count = max - min - 1;

values[index] = i;
index++;
}
}
}
namespace Attributes
{
// 0 is reserved!
[GenerateHLSL]
public enum DebugViewVarying
{
None = 0,
Texcoord0 = 1,
Texcoord1,
Texcoord2,
Texcoord3,
VertexTangentWS,
VertexBitangentWS,
VertexNormalWS,
VertexColor,
VertexColorAlpha,
Last,
};
// Number must be contiguous
[GenerateHLSL]
public enum DebugViewGbuffer
{
None = 0,
Depth = DebugViewVarying.Last,
BakeDiffuseLightingWithAlbedoPlusEmissive,
BakeShadowMask0,
BakeShadowMask1,
BakeShadowMask2,
BakeShadowMask3,
Last,
}
// Number must be contiguous
[GenerateHLSL]
public enum DebugViewProperties
{
None = 0,
Tessellation = DebugViewGbuffer.Last,
PixelDisplacement,
VertexDisplacement,
TessellationDisplacement,
DepthOffset,
Lightmap,
}
}
[Serializable]
public class MaterialDebugSettings
{
public int debugViewMaterial { get { return m_DebugViewMaterial; } }
public int debugViewEngine { get { return m_DebugViewEngine; } }
public Attributes.DebugViewVarying debugViewVarying { get { return m_DebugViewVarying; } }
public Attributes.DebugViewProperties debugViewProperties { get { return m_DebugViewProperties; } }
public int debugViewGBuffer { get { return m_DebugViewGBuffer; } }
int m_DebugViewMaterial = 0; // No enum there because everything is generated from materials.
int m_DebugViewEngine = 0; // No enum there because everything is generated from BSDFData
Attributes.DebugViewVarying m_DebugViewVarying = Attributes.DebugViewVarying.None;
Attributes.DebugViewProperties m_DebugViewProperties = Attributes.DebugViewProperties.None;
int m_DebugViewGBuffer = 0; // Can't use GBuffer enum here because the values are actually split between this enum and values from Lit.BSDFData
public int GetDebugMaterialIndex()
{
// This value is used in the shader for the actual debug display.
// There is only one uniform parameter for that so we just add all of them
// They are all mutually exclusive so return the sum will return the right index.
return m_DebugViewGBuffer + m_DebugViewMaterial + m_DebugViewEngine + (int)m_DebugViewVarying + (int)m_DebugViewProperties;
}
public void DisableMaterialDebug()
{
m_DebugViewMaterial = 0;
m_DebugViewEngine = 0;
m_DebugViewVarying = Attributes.DebugViewVarying.None;
m_DebugViewProperties = Attributes.DebugViewProperties.None;
m_DebugViewGBuffer = 0;
}
public void SetDebugViewMaterial(int value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewMaterial = value;
}
public void SetDebugViewEngine(int value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewEngine = value;
}
public void SetDebugViewVarying(Attributes.DebugViewVarying value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewVarying = value;
}
public void SetDebugViewProperties(Attributes.DebugViewProperties value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewProperties = value;
}
public void SetDebugViewGBuffer(int value)
{
if (value != 0)
DisableMaterialDebug();
m_DebugViewGBuffer = value;
}
public bool IsDebugGBufferEnabled()
{
return m_DebugViewGBuffer != 0;
}
public bool IsDebugDisplayEnabled()
{
return (m_DebugViewEngine != 0 || m_DebugViewMaterial != 0 || m_DebugViewVarying != Attributes.DebugViewVarying.None || m_DebugViewProperties != Attributes.DebugViewProperties.None || m_DebugViewGBuffer != 0);
}
}
[Serializable]
public class RenderingDebugSettings
{
public bool displayOpaqueObjects = true;
public bool displayTransparentObjects = true;
public bool enableDistortion = true;
public bool enableGaussianPyramid = true;
public bool enableSSSAndTransmission = true;
public bool enableAtmosphericScattering = true;
}
public enum ShadowMapDebugMode
{
None,
VisualizeAtlas,
VisualizeShadowMap
}
[GenerateHLSL]
public enum FullScreenDebugMode
{
None,
// Lighting
MinLightingFullScreenDebug,
SSAO,
DeferredShadows,
PreRefractionColorPyramid,
DepthPyramid,
FinalColorPyramid,
MaxLightingFullScreenDebug,
// Rendering
MinRenderingFullScreenDebug,
MotionVectors,
NanTracker,
MaxRenderingFullScreenDebug
}
[Serializable]
public class LightingDebugSettings
{
public bool IsDebugDisplayEnabled()
{
return debugLightingMode != DebugLightingMode.None;
}
public DebugLightingMode debugLightingMode = DebugLightingMode.None;
public bool enableShadows = true;
public ShadowMapDebugMode shadowDebugMode = ShadowMapDebugMode.None;
public bool shadowDebugUseSelection = false;
public uint shadowMapIndex = 0;
public uint shadowAtlasIndex = 0;
public float shadowMinValue = 0.0f;
public float shadowMaxValue = 1.0f;
public bool overrideSmoothness = false;
public float overrideSmoothnessValue = 0.5f;
public Color debugLightingAlbedo = new Color(0.5f, 0.5f, 0.5f);
public bool displaySkyReflection = false;
public float skyReflectionMipmap = 0.0f;
public TilePass.TileSettings.TileClusterDebug tileClusterDebug = TilePass.TileSettings.TileClusterDebug.None;
public TilePass.TileSettings.TileClusterCategoryDebug tileClusterDebugByCategory = TilePass.TileSettings.TileClusterCategoryDebug.Punctual;
public void OnValidate()
{
overrideSmoothnessValue = Mathf.Clamp(overrideSmoothnessValue, 0.0f, 1.0f);
skyReflectionMipmap = Mathf.Clamp(skyReflectionMipmap, 0.0f, 1.0f);
}
}
}

50
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.cs.hlsl


#ifndef DEBUGDISPLAY_CS_HLSL
#define DEBUGDISPLAY_CS_HLSL
//
// UnityEngine.Experimental.Rendering.HDPipeline.DebugLightingMode: static fields
//
#define DEBUGLIGHTINGMODE_NONE (0)
#define DEBUGLIGHTINGMODE_DIFFUSE_LIGHTING (1)
#define DEBUGLIGHTINGMODE_SPECULAR_LIGHTING (2)
#define DEBUGLIGHTINGMODE_VISUALIZE_CASCADE (3)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION_FROM_SSAO (4)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_GTAO_FROM_SSAO (5)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO (6)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_GTAO_FROM_SSAO (7)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewVarying: static fields
//
#define DEBUGVIEWVARYING_NONE (0)
#define DEBUGVIEWVARYING_TEXCOORD0 (1)
#define DEBUGVIEWVARYING_TEXCOORD1 (2)
#define DEBUGVIEWVARYING_TEXCOORD2 (3)
#define DEBUGVIEWVARYING_TEXCOORD3 (4)
#define DEBUGVIEWVARYING_VERTEX_TANGENT_WS (5)
#define DEBUGVIEWVARYING_VERTEX_BITANGENT_WS (6)
#define DEBUGVIEWVARYING_VERTEX_NORMAL_WS (7)
#define DEBUGVIEWVARYING_VERTEX_COLOR (8)
#define DEBUGVIEWVARYING_VERTEX_COLOR_ALPHA (9)
#define DEBUGVIEWVARYING_LAST (10)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewGbuffer: static fields
//
#define DEBUGVIEWGBUFFER_NONE (0)
#define DEBUGVIEWGBUFFER_DEPTH (10)
#define DEBUGVIEWGBUFFER_BAKE_DIFFUSE_LIGHTING_WITH_ALBEDO_PLUS_EMISSIVE (11)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK0 (12)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK1 (13)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK2 (14)
#define DEBUGVIEWGBUFFER_BAKE_SHADOW_MASK3 (15)
#define DEBUGVIEWGBUFFER_LAST (16)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Attributes.DebugViewProperties: static fields
//
#define DEBUGVIEWPROPERTIES_NONE (0)
#define DEBUGVIEWPROPERTIES_TESSELLATION (16)
#define DEBUGVIEWPROPERTIES_PIXEL_DISPLACEMENT (17)
#define DEBUGVIEWPROPERTIES_VERTEX_DISPLACEMENT (18)
#define DEBUGVIEWPROPERTIES_TESSELLATION_DISPLACEMENT (19)
#define DEBUGVIEWPROPERTIES_DEPTH_OFFSET (20)
#define DEBUGVIEWPROPERTIES_LIGHTMAP (21)
//
// UnityEngine.Experimental.Rendering.HDPipeline.FullScreenDebugMode: static fields
//
#define FULLSCREENDEBUGMODE_NONE (0)

2
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugDisplay.hlsl


#define UNITY_DEBUG_DISPLAY_INCLUDED
#include "DebugDisplay.cs.hlsl"
#include "MaterialDebug.cs.hlsl"
#include "LightingDebug.cs.hlsl"
// Set of parameters available when switching to debug shader mode
int _DebugLightingMode; // Match enum DebugLightingMode

11
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader


#pragma multi_compile _ SHADOWS_SHADOWMASK
#include "ShaderLibrary/Common.hlsl"
#include "ShaderLibrary/Color.hlsl"
// CAUTION: In case deferred lighting need to support various lighting model statically, we will require to do multicompile with different define like UNITY_MATERIAL_LIT
#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl

#include "../Material/Material.hlsl"
#ifdef SHADOWS_SHADOWMASK
TEXTURE2D(_ShadowMaskTexture);
#endif

{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, 0xFFFFFFFF, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, UINT_MAX, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
// Init to not expected value

if (_DebugViewMaterial == DEBUGVIEWGBUFFER_DEPTH)
{
float linearDepth = frac(posInput.depthVS * 0.1);
float linearDepth = frac(posInput.linearDepth * 0.1);
result = linearDepth.xxx;
}
// Caution: This value is not the same than the builtin data bakeDiffuseLighting. It also include emissive and multiply by the albedo

8
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader


{
// positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
int2 pixelCoord = posInput.unPositionSS.xy;
int2 pixelCoord = posInput.positionSS.xy;
int2 tileCoord = (float2)pixelCoord / GetTileSize();
int2 mouseTileCoord = _MousePixelCoord / GetTileSize();
int2 offsetInTile = pixelCoord - tileCoord * GetTileSize();

// Tile overlap counter
if (n >= 0)
{
result = OverlayHeatMap(int2(posInput.unPositionSS.xy) & (GetTileSize() - 1), n);
result = OverlayHeatMap(int2(posInput.positionSS.xy) & (GetTileSize() - 1), n);
}
#ifdef SHOW_LIGHT_CATEGORIES

if (tileCoord.y < LIGHTCATEGORY_COUNT && tileCoord.x < maxLights + 3)
{
PositionInputs mousePosInput = GetPositionInput(_MousePixelCoord, _ScreenSize.zw, mouseTileCoord);
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.unPositionSS).x;
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.positionSS).x;
UpdatePositionInput(depthMouse, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, mousePosInput);
uint category = (LIGHTCATEGORY_COUNT - 1) - tileCoord.y;

26
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDAssetFactory.cs


newAsset.copyChannelCS = Load<ComputeShader>(CorePath + "Resources/GPUCopy.compute");
newAsset.applyDistortionCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ApplyDistorsion.compute");
newAsset.clearDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/cleardispatchindirect.compute");
newAsset.buildDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/builddispatchindirect.compute");
newAsset.buildScreenAABBShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/scrbound.compute");
newAsset.buildPerTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/lightlistbuild.compute");
newAsset.buildPerBigTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/lightlistbuild-bigtile.compute");
newAsset.buildPerVoxelLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/lightlistbuild-clustered.compute");
newAsset.buildMaterialFlagsShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/materialflags.compute");
newAsset.deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/Deferred.compute");
newAsset.clearDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute");
newAsset.buildDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute");
newAsset.buildScreenAABBShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute");
newAsset.buildPerTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute");
newAsset.buildPerBigTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute");
newAsset.buildPerVoxelLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute");
newAsset.buildMaterialFlagsShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute");
newAsset.deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute");
newAsset.deferredDirectionalShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/TilePass/DeferredDirectionalShadow.compute");
newAsset.deferredDirectionalShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/DeferredDirectionalShadow.compute");
// SceneSettings
// These shaders don't need to be reference by RenderPipelineResource as they are not use at runtime (only to draw in editor)

// Sky
newAsset.blitCubemap = Load<Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader");
newAsset.buildProbabilityTables = Load<ComputeShader>(HDRenderPipelinePath + "Sky/BuildProbabilityTables.compute");
newAsset.computeGgxIblSampleData = Load<ComputeShader>(HDRenderPipelinePath + "Sky/ComputeGgxIblSampleData.compute");
newAsset.GGXConvolve = Load<Shader>(HDRenderPipelinePath + "Sky/GGXConvolve.shader");
newAsset.buildProbabilityTables = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute");
newAsset.computeGgxIblSampleData = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute");
newAsset.GGXConvolve = Load<Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader");
newAsset.encodeBC6HCS = Load<ComputeShader>(CorePath + "Resources/EncodeBC6H.compute");
// Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor
newAsset.skyboxCubemap = Shader.Find("Skybox/Cubemap");

1
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.Styles.cs


public readonly GUIContent spotCookieSize = new GUIContent("Spot Cookie Size");
public readonly GUIContent pointCookieSize = new GUIContent("Point Cookie Size");
public readonly GUIContent reflectionCubemapSize = new GUIContent("Reflection Cubemap Size");
public readonly GUIContent reflectionCacheCompressed = new GUIContent("Compress Reflection Probe Cache");
public readonly GUIContent sssSettings = new GUIContent("Subsurface Scattering Settings");

19
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs


SerializedProperty m_DefaultDiffuseMaterial;
SerializedProperty m_DefaultShader;
// TilePass settings
// LightLoop settings
SerializedProperty m_enableTileAndCluster;
SerializedProperty m_enableSplitLightEvaluation;
SerializedProperty m_enableComputeLightEvaluation;

SerializedProperty m_SpotCookieSize;
SerializedProperty m_PointCookieSize;
SerializedProperty m_ReflectionCubemapSize;
SerializedProperty m_ReflectionCacheCompressed;
void InitializeProperties()
{

m_ShadowAtlasHeight = properties.Find(x => x.shadowInitParams.shadowAtlasHeight);
// Texture settings
m_SpotCookieSize = properties.Find(x => x.textureSettings.spotCookieSize);
m_PointCookieSize = properties.Find(x => x.textureSettings.pointCookieSize);
m_ReflectionCubemapSize = properties.Find(x => x.textureSettings.reflectionCubemapSize);
m_SpotCookieSize = properties.Find(x => x.globalTextureSettings.spotCookieSize);
m_PointCookieSize = properties.Find(x => x.globalTextureSettings.pointCookieSize);
m_ReflectionCubemapSize = properties.Find(x => x.globalTextureSettings.reflectionCubemapSize);
m_ReflectionCacheCompressed = properties.Find(x => x.globalTextureSettings.reflectionCacheCompressed);
m_RenderingUseForwardOnly = properties.Find(x => x.renderingSettings.useForwardRenderingOnly);
m_RenderingUseDepthPrepass = properties.Find(x => x.renderingSettings.useDepthPrepassWithDeferredRendering);
m_RenderingUseDepthPrepassAlphaTestOnly = properties.Find(x => x.renderingSettings.renderAlphaTestOnlyInDeferredPrepass);
m_RenderingUseForwardOnly = properties.Find(x => x.globalRenderingSettings.useForwardRenderingOnly);
m_RenderingUseDepthPrepass = properties.Find(x => x.globalRenderingSettings.useDepthPrepassWithDeferredRendering);
m_RenderingUseDepthPrepassAlphaTestOnly = properties.Find(x => x.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass);
// Subsurface Scattering Settings
m_SubsurfaceScatteringSettings = properties.Find(x => x.sssSettings);

EditorGUILayout.PropertyField(m_SpotCookieSize, s_Styles.spotCookieSize);
EditorGUILayout.PropertyField(m_PointCookieSize, s_Styles.pointCookieSize);
EditorGUILayout.PropertyField(m_ReflectionCubemapSize, s_Styles.reflectionCubemapSize);
// Commented ou until we have proper realtime BC6H compression
//EditorGUILayout.PropertyField(m_ReflectionCacheCompressed, s_Styles.reflectionCacheCompressed);
if (EditorGUI.EndChangeCheck())
{

8
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineMenuItems.cs


class DoCreateNewAssetCommonSettings : DoCreateNewAsset<CommonSettings> {}
class DoCreateNewAssetHDRISkySettings : DoCreateNewAsset<HDRISkySettings> {}
class DoCreateNewAssetBlacksmithSkySettings : DoCreateNewAsset<BlacksmithSkySettings> {}
class DoCreateNewAssetProceduralSkySettings : DoCreateNewAsset<ProceduralSkySettings> {}
class DoCreateNewAssetSubsurfaceScatteringSettings : DoCreateNewAsset<SubsurfaceScatteringSettings> {}

{
var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon");
ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, ScriptableObject.CreateInstance<DoCreateNewAssetHDRISkySettings>(), "New HDRISkySettings.asset", icon, null);
}
[MenuItem("Assets/Create/Render Pipeline/High Definition/BlacksmithSky Settings", priority = CoreUtils.assetCreateMenuPriority2)]
static void MenuCreateBlacksmithSkySettings()
{
var icon = EditorGUIUtility.FindTexture("ScriptableObject Icon");
ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, ScriptableObject.CreateInstance<DoCreateNewAssetBlacksmithSkySettings>(), "New BlacksmithSkySettings.asset", icon, null);
}
[MenuItem("Assets/Create/Render Pipeline/High Definition/ProceduralSky Settings", priority = CoreUtils.assetCreateMenuPriority2)]

3
ScriptableRenderPipeline/HDRenderPipeline/HDCustomSamplerId.cs


TPDisplayShadows,
TPRenderDeferredLighting,
// Misc
VolumeUpdate,
Max
}
}

454
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine.Experimental.Rendering.HDPipeline.TilePass;
using UnityEngine.Profiling;
#if UNITY_EDITOR

namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[Serializable]
public class RenderingSettings
{
public bool useForwardRenderingOnly; // TODO: Currently there is no way to strip the extra forward shaders generated by the shaders compiler, so we can switch dynamically.
public bool useDepthPrepassWithDeferredRendering;
public bool renderAlphaTestOnlyInDeferredPrepass;
// We have to fall back to forward-only rendering when scene view is using wireframe rendering mode --
// as rendering everything in wireframe + deferred do not play well together
public bool ShouldUseForwardRenderingOnly()
{
return useForwardRenderingOnly || GL.wireframe;
}
}
public class GBufferManager
{
public const int k_MaxGbuffer = 8;

for (int gbufferIndex = 0; gbufferIndex < gbufferCount; ++gbufferIndex)
{
cmd.ReleaseTemporaryRT(HDShaderIDs._GBufferTexture[gbufferIndex]);
cmd.GetTemporaryRT(HDShaderIDs._GBufferTexture[gbufferIndex], width, height, 0, FilterMode.Point, rtFormat[gbufferIndex], rtReadWrite[gbufferIndex]);
m_RTIDs[gbufferIndex] = new RenderTargetIdentifier(HDShaderIDs._GBufferTexture[gbufferIndex]);
}

cmd.ReleaseTemporaryRT(HDShaderIDs._ShadowMaskTexture);
cmd.GetTemporaryRT(HDShaderIDs._ShadowMaskTexture, width, height, 0, FilterMode.Point, Builtin.GetShadowMaskBufferFormat(), Builtin.GetShadowMaskBufferReadWrite());
m_RTIDs[gbufferCount++] = new RenderTargetIdentifier(HDShaderIDs._ShadowMaskTexture);
}

// If velocity is in GBuffer then it is in the last RT. Assign a different name to it.
cmd.ReleaseTemporaryRT(HDShaderIDs._VelocityTexture);
cmd.GetTemporaryRT(HDShaderIDs._VelocityTexture, width, height, 0, FilterMode.Point, Builtin.GetVelocityBufferFormat(), Builtin.GetVelocityBufferReadWrite());
m_RTIDs[gbufferCount++] = new RenderTargetIdentifier(HDShaderIDs._VelocityTexture);
}

};
static readonly RenderQueueRange k_RenderQueue_PreRefraction = new RenderQueueRange { min = (int)HDRenderQueue.PreRefraction, max = (int)HDRenderQueue.Transparent - 1 };
static readonly RenderQueueRange k_RenderQueue_Transparent = new RenderQueueRange { min = (int)HDRenderQueue.Transparent, max = (int)HDRenderQueue.Overlay };
static readonly RenderQueueRange k_RenderQueue_Transparent = new RenderQueueRange { min = (int)HDRenderQueue.Transparent, max = (int)HDRenderQueue.Overlay - 1};
readonly HDRenderPipelineAsset m_Asset;

// Renderer Bake configuration can vary depends on if shadow mask is enabled or no
RendererConfiguration m_currentRendererConfigurationBakedLighting = HDUtils.k_RendererConfigurationBakedLighting;
// Various set of material use in render loop
IBLFilterGGX m_IBLFilterGGX = null;
// Various set of material use in render loop
ComputeShader m_SubsurfaceScatteringCS { get { return m_Asset.renderPipelineResources.subsurfaceScatteringCS; } }
int m_SubsurfaceScatteringKernel;
Material m_CombineLightingPass;

// The pass "SRPDefaultUnlit" is a fall back to legacy unlit rendering and is required to support unity 2d + unity UI that render in the scene.
ShaderPassName[] m_ForwardAndForwardOnlyPassNames = { new ShaderPassName(), new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName};
ShaderPassName[] m_ForwardOnlyPassNames = { new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName };
ShaderPassName[] m_ForwardOnlyPassNames = { new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName};
ShaderPassName[] m_AllTransparentPassNames = { HDShaderPassNames.s_TransparentBackfaceName,
HDShaderPassNames.s_ForwardOnlyName,

{
m_Asset = asset;
m_GPUCopy = new GPUCopy(asset.renderPipelineResources.copyChannelCS);
EncodeBC6H.DefaultInstance = EncodeBC6H.DefaultInstance ?? new EncodeBC6H(asset.renderPipelineResources.encodeBC6HCS);
// Scan material list and assign it
m_MaterialList = HDUtils.GetRenderPipelineMaterialList();

// Initialize various compute shader resources
m_applyDistortionKernel = m_applyDistortionCS.FindKernel("KMain");
m_CopyStencilForSplitLighting = CoreUtils.CreateEngineMaterial("Hidden/HDRenderPipeline/CopyStencilBuffer");
m_CopyStencilForSplitLighting.EnableKeyword("EXPORT_HTILE");
m_CopyStencilForSplitLighting.SetInt(HDShaderIDs._StencilRef, (int)StencilLightingUsage.SplitLighting);

m_MaterialList.ForEach(material => material.Build(asset.renderPipelineResources));
m_LightLoop.Build(asset.renderPipelineResources, asset.renderingSettings, asset.tileSettings, asset.textureSettings, asset.shadowInitParams, m_ShadowSettings);
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources);
m_LightLoop.Build(asset.renderPipelineResources, asset.globalRenderingSettings, asset.tileSettings, asset.globalTextureSettings, asset.shadowInitParams, m_ShadowSettings, m_IBLFilterGGX);
m_SkyManager.Build(asset.renderPipelineResources);
m_SkyManager.Build(asset.renderPipelineResources, m_IBLFilterGGX);
m_SkyManager.skySettings = skySettingsToUse;
m_DebugDisplaySettings.RegisterDebug();

void RegisterDebug()
{
// These need to be Runtime Only because those values are hold by the HDRenderPipeline asset so if user change them through the editor debug menu they might change the value in the asset without noticing it.
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Forward Only", () => m_Asset.renderingSettings.useForwardRenderingOnly, (value) => m_Asset.renderingSettings.useForwardRenderingOnly = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass", () => m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering, (value) => m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass ATest Only", () => m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass, (value) => m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass = (bool)value, DebugItemFlag.RuntimeOnly);
// These need to be Runtime Only because those values are held by the HDRenderPipeline asset so if user change them through the editor debug menu they might change the value in the asset without noticing it.
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Forward Only", () => m_Asset.globalRenderingSettings.useForwardRenderingOnly, (value) => m_Asset.globalRenderingSettings.useForwardRenderingOnly = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass", () => m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering, (value) => m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass ATest Only", () => m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass, (value) => m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Enable Tile/Cluster", () => m_Asset.tileSettings.enableTileAndCluster, (value) => m_Asset.tileSettings.enableTileAndCluster = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Enable Big Tile", () => m_Asset.tileSettings.enableBigTilePrepass, (value) => m_Asset.tileSettings.enableBigTilePrepass = (bool)value, DebugItemFlag.RuntimeOnly);

cmd.SetGlobalVectorArray(HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetGlobalVectorArray(HDShaderIDs._HalfRcpVariancesAndWeights, sssParameters.halfRcpVariancesAndWeights);
cmd.SetGlobalVectorArray(HDShaderIDs._TransmissionTints, sssParameters.transmissionTints);
cmd.SetGlobalVectorArray(HDShaderIDs._WorldScales, sssParameters.worldScales);
}
}

CullResults m_CullResults;
public override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
base.Render(renderContext, cameras);
#if UNITY_EDITOR
SupportedRenderingFeatures.active = s_NeededFeatures;
#endif
// HD use specific GraphicsSettings. This is init here.
// TODO: This should not be set at each Frame but is there another place for these config setup ?
GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;
if (m_FrameCount != Time.frameCount)
{
HDCamera.CleanUnused();
m_FrameCount = Time.frameCount;
}
foreach (var camera in cameras)
{
base.Render(renderContext, cameras);
#if UNITY_EDITOR
SupportedRenderingFeatures.active = s_NeededFeatures;
#endif
// HD use specific GraphicsSettings. This is init here.
// TODO: This should not be set at each Frame but is there another place for these config setup ?
GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;
if (m_FrameCount != Time.frameCount)
{
HDCamera.CleanUnused();
m_FrameCount = Time.frameCount;
}
foreach (var material in m_MaterialList)
material.RenderInit(cmd);
foreach (var material in m_MaterialList)
material.RenderInit(cmd);
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame();
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame();
// we only want to render one camera for now
// select the most main camera!
Camera camera = null;
foreach (var cam in cameras)
{
if (cam == Camera.main)
{
camera = cam;
break;
}
}
if (camera == null)
{
renderContext.Submit();
continue;
}
if (camera == null && cameras.Length > 0)
camera = cameras[0];
// If we render a reflection view or a preview we should not display any debug information
// This need to be call before ApplyDebugDisplaySettings()
if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
{
// Neutral allow to disable all debug settings
m_CurrentDebugDisplaySettings = s_NeutralDebugDisplaySettings;
}
else
{
m_CurrentDebugDisplaySettings = m_DebugDisplaySettings;
if (camera == null)
using (new ProfilingSample(cmd, "Volume Update", GetSampler(CustomSamplerId.VolumeUpdate)))
renderContext.Submit();
return;
// TODO: Transform & layer should be configurable per camera
VolumeManager.instance.Update(camera.transform, -1);
}
// If we render a reflection view or a preview we should not display any debug information
// This need to be call before ApplyDebugDisplaySettings()
if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
{
// Neutral allow to disable all debug settings
m_CurrentDebugDisplaySettings = s_NeutralDebugDisplaySettings;
}
else
{
m_CurrentDebugDisplaySettings = m_DebugDisplaySettings;
}
ApplyDebugDisplaySettings(cmd);
UpdateCommonSettings();
ApplyDebugDisplaySettings(cmd);
UpdateCommonSettings();
if (!m_IBLFilterGGX.IsInitialized())
m_IBLFilterGGX.Initialize(cmd);
ScriptableCullingParameters cullingParams;
if (!CullResults.GetCullingParameters(camera, out cullingParams))
{
renderContext.Submit();
return;
}
ScriptableCullingParameters cullingParams;
if (!CullResults.GetCullingParameters(camera, out cullingParams))
{
renderContext.Submit();
continue;
}
m_LightLoop.UpdateCullingParameters( ref cullingParams );
m_LightLoop.UpdateCullingParameters( ref cullingParams );
// emit scene view UI
if (camera.cameraType == CameraType.SceneView)
{
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}
// emit scene view UI
if (camera.cameraType == CameraType.SceneView)
{
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}
using (new ProfilingSample(cmd, "CullResults.Cull", GetSampler(CustomSamplerId.CullResultsCull)))
{
CullResults.Cull(ref cullingParams, renderContext,ref m_CullResults);
}
using (new ProfilingSample(cmd, "CullResults.Cull", GetSampler(CustomSamplerId.CullResultsCull)))
{
CullResults.Cull(ref cullingParams, renderContext,ref m_CullResults);
}
Resize(camera);
Resize(camera);
renderContext.SetupCameraProperties(camera);
renderContext.SetupCameraProperties(camera);
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
var hdCamera = HDCamera.Get(camera, postProcessLayer);
PushGlobalParams(hdCamera, cmd, sssSettings);
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
var hdCamera = HDCamera.Get(camera, postProcessLayer);
PushGlobalParams(hdCamera, cmd, sssSettings);
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode
m_MaterialList.ForEach(material => material.Bind());
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode
m_MaterialList.ForEach(material => material.Bind());
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
if (additionalCameraData && additionalCameraData.renderingPath == RenderingPathHDRP.Unlit)
{
// TODO: Add another path dedicated to planar reflection / real time cubemap that implement simpler lighting
// It is up to the users to only send unlit object for this camera path
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
if (additionalCameraData && additionalCameraData.renderingPath == RenderingPathHDRP.Unlit)
using (new ProfilingSample(cmd, "Forward", GetSampler(CustomSamplerId.Forward)))
// TODO: Add another path dedicated to planar reflection / real time cubemap that implement simpler lighting
// It is up to the users to only send unlit object for this camera path
using (new ProfilingSample(cmd, "Forward", GetSampler(CustomSamplerId.Forward)))
{
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color | ClearFlag.Depth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, cmd, HDShaderPassNames.s_ForwardName);
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color | ClearFlag.Depth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, cmd, HDShaderPassNames.s_ForwardName);
}
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
renderContext.Submit();
return;
// Note: Legacy Unity behave like this for ShadowMask
// When you select ShadowMask in Lighting panel it recompile shaders on the fly with the SHADOW_MASK keyword.
// However there is no C# function that we can query to know what mode have been select in Lighting Panel and it will be wrong anyway. Lighting Panel setup what will be the next bake mode. But until light is bake, it is wrong.
// Currently to know if you need shadow mask you need to go through all visible lights (of CullResult), check the LightBakingOutput struct and look at lightmapBakeType/mixedLightingMode. If one light have shadow mask bake mode, then you need shadow mask features (i.e extra Gbuffer).
// It mean that when we build a standalone player, if we detect a light with bake shadow mask, we generate all shader variant (with and without shadow mask) and at runtime, when a bake shadow mask light is visible, we dynamically allocate an extra GBuffer and switch the shader.
// So the first thing to do is to go through all the light: PrepareLightsForGPU
bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", GetSampler(CustomSamplerId.TPPrepareLightsForGPU)))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(m_ShadowSettings, m_CullResults, camera);
}
ConfigureForShadowMask(enableBakeShadowMask, cmd);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
renderContext.Submit();
continue;
}
InitAndClearBuffer(hdCamera, enableBakeShadowMask, cmd);
// Note: Legacy Unity behave like this for ShadowMask
// When you select ShadowMask in Lighting panel it recompile shaders on the fly with the SHADOW_MASK keyword.
// However there is no C# function that we can query to know what mode have been select in Lighting Panel and it will be wrong anyway. Lighting Panel setup what will be the next bake mode. But until light is bake, it is wrong.
// Currently to know if you need shadow mask you need to go through all visible lights (of CullResult), check the LightBakingOutput struct and look at lightmapBakeType/mixedLightingMode. If one light have shadow mask bake mode, then you need shadow mask features (i.e extra Gbuffer).
// It mean that when we build a standalone player, if we detect a light with bake shadow mask, we generate all shader variant (with and without shadow mask) and at runtime, when a bake shadow mask light is visible, we dynamically allocate an extra GBuffer and switch the shader.
// So the first thing to do is to go through all the light: PrepareLightsForGPU
bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", GetSampler(CustomSamplerId.TPPrepareLightsForGPU)))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, m_ShadowSettings, m_CullResults, camera);
}
ConfigureForShadowMask(enableBakeShadowMask, cmd);
RenderDepthPrepass(m_CullResults, camera, renderContext, cmd);
InitAndClearBuffer(hdCamera, enableBakeShadowMask, cmd);
RenderGBuffer(m_CullResults, camera, renderContext, cmd);
RenderDepthPrepass(m_CullResults, camera, renderContext, cmd);
// In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
CopyDepthBufferIfNeeded(cmd);
RenderGBuffer(m_CullResults, camera, renderContext, cmd);
// In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
CopyDepthBufferIfNeeded(cmd);
RenderPyramidDepth(camera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
RenderPyramidDepth(camera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
// Required for the SSS and the shader feature classification pass.
PrepareAndBindStencilTexture(cmd);
// Required for the SSS and the shader feature classification pass.
PrepareAndBindStencilTexture(cmd);
if (m_CurrentDebugDisplaySettings.IsDebugMaterialDisplayEnabled())
if (m_CurrentDebugDisplaySettings.IsDebugMaterialDisplayEnabled())
{
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext, cmd);
}
else
{
using (new ProfilingSample(cmd, "Render SSAO", GetSampler(CustomSamplerId.RenderSSAO)))
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext, cmd);
// TODO: Everything here (SSAO, Shadow, Build light list, deferred shadow, material and light classification can be parallelize with Async compute)
RenderSSAO(cmd, camera, renderContext, postProcessLayer);
else
{
using (new ProfilingSample(cmd, "Render SSAO", GetSampler(CustomSamplerId.RenderSSAO)))
{
// TODO: Everything here (SSAO, Shadow, Build light list, deferred shadow, material and light classification can be parallelize with Async compute)
RenderSSAO(cmd, camera, renderContext, postProcessLayer);
}
using (new ProfilingSample(cmd, "Render shadows", GetSampler(CustomSamplerId.RenderShadows)))
using (new ProfilingSample(cmd, "Render shadows", GetSampler(CustomSamplerId.RenderShadows)))
m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// TODO: check if statement below still apply
renderContext.SetupCameraProperties(camera); // Need to recall SetupCameraProperties after RenderShadows as it modify our view/proj matrix
}
m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// TODO: check if statement below still apply
renderContext.SetupCameraProperties(camera); // Need to recall SetupCameraProperties after RenderShadows as it modify our view/proj matrix
}
using (new ProfilingSample(cmd, "Deferred directional shadows", GetSampler(CustomSamplerId.RenderDeferredDirectionalShadow)))
{
cmd.GetTemporaryRT(m_DeferredShadowBuffer, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1, true);
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBufferRT, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(cmd, m_DeferredShadowBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.DeferredShadows);
}
using (new ProfilingSample(cmd, "Deferred directional shadows", GetSampler(CustomSamplerId.RenderDeferredDirectionalShadow)))
{
cmd.ReleaseTemporaryRT(m_DeferredShadowBuffer);
cmd.GetTemporaryRT(m_DeferredShadowBuffer, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1 , true);
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBufferRT, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(cmd, m_DeferredShadowBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.DeferredShadows);
}
using (new ProfilingSample(cmd, "Build Light list", GetSampler(CustomSamplerId.BuildLightList)))
{
m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT, GetStencilTexture());
}
using (new ProfilingSample(cmd, "Build Light list", GetSampler(CustomSamplerId.BuildLightList)))
{
m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT, GetStencilTexture());
}
// Don't update the sky environment if we are rendering a cubemap (it should be update already)
if (camera.cameraType != CameraType.Reflection)
{
// Caution: We require sun light here as some sky use the sun light to render, mean UpdateSkyEnvironment
// must be call after BuildGPULightLists.
// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
}
// Caution: We require sun light here as some sky use the sun light to render, mean UpdateSkyEnvironment
// must be call after BuildGPULightLists.
// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
RenderDeferredLighting(hdCamera, cmd);
RenderDeferredLighting(hdCamera, cmd);
// We compute subsurface scattering here. Therefore, no objects rendered afterwards will exhibit SSS.
// Currently, there is no efficient way to switch between SRT and MRT for the forward pass;
// therefore, forward-rendered objects do not output split lighting required for the SSS pass.
SubsurfaceScatteringPass(hdCamera, cmd, sssSettings);
// We compute subsurface scattering here. Therefore, no objects rendered afterwards will exhibit SSS.
// Currently, there is no efficient way to switch between SRT and MRT for the forward pass;
// therefore, forward-rendered objects do not output split lighting required for the SSS pass.
SubsurfaceScatteringPass(hdCamera, cmd, sssSettings);
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderSky(hdCamera, cmd);
RenderSky(hdCamera, cmd);
// Render pre refraction objects
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
// Render pre refraction objects
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.PreRefractionColorPyramid);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.PreRefractionColorPyramid);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, camera, renderContext, FullScreenDebugMode.NanTracker);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, camera, renderContext, FullScreenDebugMode.NanTracker);
// Planar and real time cubemap doesn't need post process and render in FP16
if (camera.cameraType == CameraType.Reflection)
// Planar and real time cubemap doesn't need post process and render in FP16
if (camera.cameraType == CameraType.Reflection)
{
using (new ProfilingSample(cmd, "Blit to final RT", GetSampler(CustomSamplerId.BlitToFinalRT)))
using (new ProfilingSample(cmd, "Blit to final RT", GetSampler(CustomSamplerId.BlitToFinalRT)))
{
// Simple blit
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
}
// Simple blit
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
else
{
RenderVelocity(m_CullResults, hdCamera, renderContext, cmd); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
}
else
{
RenderVelocity(m_CullResults, hdCamera, renderContext, cmd); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.FinalColorPyramid);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.FinalColorPyramid);
// TODO: Check with VFX team.
// Rendering distortion here have off course lot of artifact.
// But resolving at each objects that write in distortion is not possible (need to sort transparent, render those that do not distort, then resolve, then etc...)
// Instead we chose to apply distortion at the end after we cumulate distortion vector and desired blurriness.
AccumulateDistortion(m_CullResults, camera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources);
// TODO: Check with VFX team.
// Rendering distortion here have off course lot of artifact.
// But resolving at each objects that write in distortion is not possible (need to sort transparent, render those that do not distort, then resolve, then etc...)
// Instead we chose to apply distortion at the end after we cumulate distortion vector and desired blurriness.
AccumulateDistortion(m_CullResults, camera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources);
RenderPostProcesses(hdCamera, cmd, postProcessLayer);
}
RenderPostProcesses(hdCamera, cmd, postProcessLayer);
}
RenderDebug(hdCamera, cmd);
RenderDebug(hdCamera, cmd);
// Make sure to unbind every render texture here because in the next iteration of the loop we might have to reallocate render texture (if the camera size is different)
cmd.SetRenderTarget(new RenderTargetIdentifier(-1), new RenderTargetIdentifier(-1));
// bind depth surface for editor grid/gizmo/selection rendering
if (camera.cameraType == CameraType.SceneView)
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
// We still need to bind correctly default camera target with our depth buffer in case we are currently rendering scene view. It should be the last camera here
// bind depth surface for editor grid/gizmo/selection rendering
if (camera.cameraType == CameraType.SceneView)
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
renderContext.ExecuteCommandBuffer(cmd);
renderContext.ExecuteCommandBuffer(cmd);
} // For each camera
}
void RenderOpaqueRenderList(CullResults cull,

int w = camera.pixelWidth;
int h = camera.pixelHeight;
cmd.ReleaseTemporaryRT(m_DistortionBuffer);
cmd.GetTemporaryRT(m_DistortionBuffer, w, h, 0, FilterMode.Point, Builtin.GetDistortionBufferFormat(), Builtin.GetDistortionBufferReadWrite());
cmd.SetRenderTarget(m_DistortionBufferRT, m_CameraDepthStencilBufferRT);
cmd.ClearRenderTarget(false, true, Color.clear);

// It must also have a "DepthForwardOnly" and no "DepthOnly" pass as forward material (either deferred or forward only rendering) have always a depth pass.
// In case of forward only rendering we have a depth prepass. In case of deferred renderer, it is optional
bool addFullDepthPrepass = m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() || m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering;
bool addAlphaTestedOnly = !m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() && m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering && m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass;
bool addFullDepthPrepass = m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() || m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering;
bool addAlphaTestedOnly = !m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() && m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering && m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass;
using (new ProfilingSample(cmd, addAlphaTestedOnly ? "Depth Prepass alpha test" : "Depth Prepass", GetSampler(CustomSamplerId.DepthPrepass)))
{

// We render first the opaque object as opaque alpha tested are more costly to render and could be reject by early-z (but not Hi-z as it is disable with clip instruction)
// This is handled automatically with the RenderQueue value (OpaqueAlphaTested have a different value and thus are sorted after Opaque)
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyAndDepthForwardOnlyPassNames, 0, RenderQueueRange.opaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyAndDepthForwardOnlyPassNames, 0, RenderQueueRange.opaque, m_DepthStateOpaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthForwardOnlyPassNames, 0, RenderQueueRange.opaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthForwardOnlyPassNames, 0, RenderQueueRange.opaque, m_DepthStateOpaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyPassNames, 0, renderQueueRange);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyPassNames, 0, renderQueueRange, m_DepthStateOpaque);
}
}

// during Gbuffer pass. This is handled in the shader and the depth test (equal and no depth write) is done here.
void RenderGBuffer(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
using (new ProfilingSample(cmd, m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() ? "GBufferDebugDisplay" : "GBuffer", GetSampler(CustomSamplerId.GBuffer)))

}
else
{
if (m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering)
if (m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering)
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, rangeOpaqueNoAlphaTest, m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass ? m_DepthStateOpaque : m_DepthStateOpaqueWithPrepass);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, rangeOpaqueNoAlphaTest, m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass ? m_DepthStateOpaque : m_DepthStateOpaqueWithPrepass);
// but for opaque alpha tested object we use a depth equal and no depth write. And we rely on the shader pass GbufferWithDepthPrepass
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferWithPrepassName, m_currentRendererConfigurationBakedLighting, rangeOpaqueAlphaTest, m_DepthStateOpaqueWithPrepass);
}

{
using (new ProfilingSample(cmd, "DisplayDebug ViewMaterial", GetSampler(CustomSamplerId.DisplayDebugViewMaterial)))
{
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
{
using (new ProfilingSample(cmd, "DebugViewMaterialGBuffer", GetSampler(CustomSamplerId.DebugViewMaterialGBuffer)))
{

if (settings.IsEnabledAndSupported(null))
{
cmd.ReleaseTemporaryRT(HDShaderIDs._AmbientOcclusionTexture);
cmd.GetTemporaryRT(HDShaderIDs._AmbientOcclusionTexture, new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight, RenderTextureFormat.R8, 0)
{
sRGB = false,

void RenderDeferredLighting(HDCamera hdCamera, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
m_MRTCache2[0] = m_CameraColorBufferRT;

void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, SubsurfaceScatteringSettings sssParameters)
{
// Currently, forward-rendered objects do not output split lighting required for the SSS pass.
if (!m_CurrentDebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission || m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_CurrentDebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission || m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
using (new ProfilingSample(cmd, "Subsurface Scattering", GetSampler(CustomSamplerId.SubsurfaceScattering)))

cmd.SetComputeFloatParam(m_SubsurfaceScatteringCS, HDShaderIDs._TexturingModeFlags, *(float*)&texturingModeFlags);
}
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, sssParameters.worldScales);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._GBufferTexture0, m_GbufferManager.GetGBuffers()[0]);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._GBufferTexture1, m_GbufferManager.GetGBuffers()[1]);

else
{
cmd.SetGlobalTexture(HDShaderIDs._IrradianceSource, m_CameraSssDiffuseLightingBufferRT); // Cannot set a RT on a material
m_SssVerticalFilterPass.SetVectorArray(HDShaderIDs._WorldScales, sssParameters.worldScales);
m_SssVerticalFilterPass.SetVectorArray(HDShaderIDs._FilterKernelsBasic, sssParameters.filterKernelsBasic);
m_SssVerticalFilterPass.SetVectorArray(HDShaderIDs._HalfRcpWeightedVariances, sssParameters.halfRcpWeightedVariances);
// Perform the vertical SSS filtering pass which fills 'm_CameraFilteringBufferRT'.

m_SssHorizontalFilterAndCombinePass.SetVectorArray(HDShaderIDs._WorldScales, sssParameters.worldScales);
m_SssHorizontalFilterAndCombinePass.SetVectorArray(HDShaderIDs._FilterKernelsBasic, sssParameters.filterKernelsBasic);
m_SssHorizontalFilterAndCombinePass.SetVectorArray(HDShaderIDs._HalfRcpWeightedVariances, sssParameters.halfRcpWeightedVariances);
// Perform the horizontal SSS filtering pass, and combine diffuse and specular lighting into 'm_CameraColorBufferRT'.

m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardName;
}
var passNames = m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() ? m_ForwardAndForwardOnlyPassNames : m_ForwardOnlyPassNames;
var passNames = m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() ? m_ForwardAndForwardOnlyPassNames : m_ForwardOnlyPassNames;
// Forward opaque material always have a prepass (whether or not we use deferred, whether or not there is option like alpha test only) so we pass the right depth state here.
RenderOpaqueRenderList(cullResults, camera, renderContext, cmd, passNames, m_currentRendererConfigurationBakedLighting, null, m_DepthStateOpaqueWithPrepass);
}

m_CameraMotionVectorsMaterial.SetVector(HDShaderIDs._CameraPosDiff, hdcam.prevCameraPos - hdcam.cameraPos);
cmd.ReleaseTemporaryRT(m_VelocityBuffer);
cmd.GetTemporaryRT(m_VelocityBuffer, w, h, 0, FilterMode.Point, Builtin.GetVelocityBufferFormat(), Builtin.GetVelocityBufferReadWrite());
CoreUtils.DrawFullScreen(cmd, m_CameraMotionVectorsMaterial, m_VelocityBufferRT, null, 0);
cmd.SetRenderTarget(m_VelocityBufferRT, m_CameraDepthStencilBufferRT);

{
mipSize >>= 1;
cmd.ReleaseTemporaryRT(HDShaderIDs._GaussianPyramidColorMips[i + 1]);
cmd.GetTemporaryRT(HDShaderIDs._GaussianPyramidColorMips[i + 1], mipSize, mipSize, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true);
cmd.SetComputeTextureParam(m_GaussianPyramidCS, m_GaussianPyramidKernel, "_Source", last);
cmd.SetComputeTextureParam(m_GaussianPyramidCS, m_GaussianPyramidKernel, "_Result", HDShaderIDs._GaussianPyramidColorMips[i + 1]);

cmd.SetGlobalVector(HDShaderIDs._DepthPyramidMipSize, new Vector4(size, size, lodCount, 0));
cmd.ReleaseTemporaryRT(HDShaderIDs._DepthPyramidMips[0]);
cmd.GetTemporaryRT(HDShaderIDs._DepthPyramidMips[0], size, size, 0, FilterMode.Bilinear, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear, 1, true);
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, GetDepthTexture(), HDShaderIDs._DepthPyramidMips[0], new Vector2(size, size));
cmd.CopyTexture(HDShaderIDs._DepthPyramidMips[0], 0, 0, m_DepthPyramidBuffer, 0, 0);

{
mipSize >>= 1;
cmd.ReleaseTemporaryRT(HDShaderIDs._DepthPyramidMips[i + 1]);
cmd.GetTemporaryRT(HDShaderIDs._DepthPyramidMips[i + 1], mipSize, mipSize, 0, FilterMode.Bilinear, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear, 1, true);
cmd.SetComputeTextureParam(m_DepthPyramidCS, m_DepthPyramidKernel, "_Source", HDShaderIDs._DepthPyramidMips[i]);
cmd.SetComputeTextureParam(m_DepthPyramidCS, m_DepthPyramidKernel, "_Result", HDShaderIDs._DepthPyramidMips[i + 1]);

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cb.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cb.GetTemporaryRT(m_DebugFullScreenTempRT, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
cb.Blit(textureID, m_DebugFullScreenTempRT);
}

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cmd.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cmd.GetTemporaryRT(m_DebugFullScreenTempRT, width >> mipIndex, height >> mipIndex, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
cmd.CopyTexture(textureID, 0, mipIndex, m_DebugFullScreenTempRT, 0, 0);
}

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cmd.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cmd.GetTemporaryRT(m_DebugFullScreenTempRT, width >> mipIndex, height >> mipIndex, 0, FilterMode.Point, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear);
cmd.CopyTexture(textureID, 0, mipIndex, m_DebugFullScreenTempRT, 0, 0);
}

int w = camera.camera.pixelWidth;
int h = camera.camera.pixelHeight;
cmd.ReleaseTemporaryRT(m_CameraColorBuffer);
cmd.ReleaseTemporaryRT(m_CameraSssDiffuseLightingBuffer);
cmd.GetTemporaryRT(m_CameraColorBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
cmd.GetTemporaryRT(m_CameraSssDiffuseLightingBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
if (NeedTemporarySubsurfaceBuffer())

int s = CalculatePyramidSize(w, h);
m_GaussianPyramidColorBufferDesc.width = s;
m_GaussianPyramidColorBufferDesc.height = s;
cmd.ReleaseTemporaryRT(m_GaussianPyramidColorBuffer);
cmd.ReleaseTemporaryRT(m_DepthPyramidBuffer);
if (!m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
m_GbufferManager.InitGBuffers(w, h, m_DeferredMaterial, enableBakeShadowMask, cmd);
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Depth);

// Clear the HDR target
using (new ProfilingSample(cmd, "Clear HDR target", GetSampler(CustomSamplerId.ClearHDRTarget)))
{
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color, CoreUtils.clearColorAllBlack);
Color clearColor = camera.camera.backgroundColor.linear; // Need it in linear because we clear a linear fp16 texture.
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color, clearColor);
if (!m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
{
using (new ProfilingSample(cmd, "Clear GBuffer", GetSampler(CustomSamplerId.ClearGBuffer)))
{

16
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset


m_EditorClassIdentifier:
m_RenderPipelineResources: {fileID: 11400000, guid: 42086e81f4f0c724f96f7f09cc995354,
type: 2}
renderingSettings:
globalRenderingSettings:
globalTextureSettings:
spotCookieSize: 128
pointCookieSize: 512
reflectionCubemapSize: 128
reflectionCacheCompressed: 0
sssSettings: {fileID: 11400000, guid: 873499ce7a6f749408981f512a9683f7, type: 2}
tileSettings:
enableTileAndCluster: 1

enableClustered: 1
enableFptlForOpaqueWhenClustered: 1
enableFptlForForwardOpaque: 1
diffuseGlobalDimmer: 1
specularGlobalDimmer: 1
textureSettings:
spotCookieSize: 128
pointCookieSize: 512
reflectionCubemapSize: 128
m_DefaultDiffuseMaterial: {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17,
type: 2}
m_DefaultShader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}

3
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset.meta


fileFormatVersion: 2
guid: 449281dd2b4fbee49b8397de0541ea3c
timeCreated: 1496931629
licenseType: Pro
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:

13
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs


using UnityEngine.Experimental.Rendering.HDPipeline.TilePass;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
// The HDRenderPipeline assumes linear lighting. Doesn't work with gamma.

// NOTE: All those properties are public because of how HDRenderPipelineInspector retrieves those properties via serialization/reflection
// Doing it this way allows to change parameters name and still retrieve correct serialized values
// Renderer Settings
public RenderingSettings renderingSettings = new RenderingSettings();
// Global Renderer Settings
public GlobalRenderingSettings globalRenderingSettings = new GlobalRenderingSettings();
public GlobalTextureSettings globalTextureSettings = new GlobalTextureSettings();
public TileSettings tileSettings = new TileSettings();
public LightLoopSettings tileSettings = new LightLoopSettings();
// Texture Settings
public TextureSettings textureSettings = new TextureSettings();
[SerializeField]
Material m_DefaultDiffuseMaterial;
[SerializeField]

40
ScriptableRenderPipeline/HDRenderPipeline/HDUtils.cs


s_OverlayLineHeight = -1.0f;
}
}
public static Matrix4x4 ComputePixelCoordToWorldSpaceViewDirectionMatrix(float verticalFoV, Vector4 screenSize, Matrix4x4 worldToViewMatrix, bool renderToCubemap)
{
// Compose the view space version first.
// V = -(X, Y, Z), s.t. Z = 1,
// X = (2x / resX - 1) * tan(vFoV / 2) * ar = x * [(2 / resX) * tan(vFoV / 2) * ar] + [-tan(vFoV / 2) * ar] = x * [-m00] + [-m20]
// Y = (2y / resY - 1) * tan(vFoV / 2) = y * [(2 / resY) * tan(vFoV / 2)] + [-tan(vFoV / 2)] = y * [-m11] + [-m21]
float tanHalfVertFoV = Mathf.Tan(0.5f * verticalFoV);
float aspectRatio = screenSize.x * screenSize.w;
// Compose the matrix.
float m21 = tanHalfVertFoV;
float m20 = tanHalfVertFoV * aspectRatio;
float m00 = -2.0f * screenSize.z * m20;
float m11 = -2.0f * screenSize.w * m21;
float m33 = -1.0f;
if (renderToCubemap)
{
// Flip Y.
m11 = -m11;
m21 = -m21;
}
var viewSpaceRasterTransform = new Matrix4x4(new Vector4(m00, 0.0f, 0.0f, 0.0f),
new Vector4(0.0f, m11, 0.0f, 0.0f),
new Vector4(m20, m21, m33, 0.0f),
new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
// Remove the translation component.
var homogeneousZero = new Vector4(0, 0, 0, 1);
worldToViewMatrix.SetColumn(3, homogeneousZero);
// Flip the Z to make the coordinate system left-handed.
worldToViewMatrix.SetRow(2, -worldToViewMatrix.GetRow(2));
// Transpose for HLSL.
return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
}
}

8
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader


// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

Outputs outputs;
#ifdef OUTPUT_SPLIT_LIGHTING
if (_EnableSSSAndTransmission != 0)
if (_EnableSSSAndTransmission != 0 && bsdfData.materialId == MATERIALID_LIT_SSS)
{
outputs.specularLighting = float4(specularLighting, 1.0);
outputs.diffuseLighting = TagLightingForSSS(diffuseLighting);

14
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Editor/HDLightEditor.Styles.cs


sealed class Styles
{
// Base
public readonly GUIContent range = new GUIContent("Range", "Controls how far the light is emitted from the center of the object.");
public readonly GUIContent color = new GUIContent("Color", "Controls the color being emitted by the light.");
public readonly GUIContent spotAngle = new GUIContent("Angle", "Controls the angle in degrees at the base of a Spot light's cone.");
public readonly GUIContent useColorTemperature = new GUIContent("Use Color Temperature", "Choose between RGB and temperature mode for light's color.");
public readonly GUIContent colorFilter = new GUIContent("Filter", "A colored gel can be put in front of the light source to tint the light.");
public readonly GUIContent colorTemperature = new GUIContent("Temperature", "Also known as CCT (Correlated color temperature). The color temperature of the electromagnetic radiation emitted from an ideal black body is defined as its surface temperature in Kelvin. White is 6500K");
public readonly GUIContent intensity = new GUIContent("Intensity", "Controls the brightness of the light. Light color is multiplied by this value.");
public readonly GUIContent lightmappingMode = new GUIContent("Mode", "Specifies the light mode used to determine if and how a light will be baked. Possible modes are Baked, Mixed, and Realtime.");
public readonly GUIContent lightBounceIntensity = new GUIContent("Indirect Multiplier", "Controls the intensity of indirect light being contributed to the scene. A value of 0 will cause Realtime lights to be removed from realtime global illumination and Baked and Mixed lights to no longer emit indirect lighting. Has no effect when both Realtime and Baked Global Illumination are disabled.");
public readonly GUIContent cookie = new GUIContent("Cookie", "Specifies the Texture mask to cast shadows, create silhouettes, or patterned illumination for the light.");
public readonly GUIContent cookieSizeX = new GUIContent("Size X", "Controls the size of the cookie mask currently assigned to the light.");
public readonly GUIContent cookieSizeY = new GUIContent("Size Y", "Controls the size of the cookie mask currently assigned to the light.");
public readonly GUIContent shadowBias = new GUIContent("Bias", "Controls the distance at which the shadows will be pushed away from the light. Useful for avoiding false self-shadowing artifacts.");

public readonly GUIContent bakedShadowAngle = new GUIContent("Baked Shadow Angle", "Controls the amount of artificial softening applied to the edges of shadows cast by directional lights.");
public readonly GUIContent bakingWarning = new GUIContent("Light mode is currently overridden to Realtime mode. Enable Baked Global Illumination to use Mixed or Baked light modes.");
public readonly GUIContent indirectBounceShadowWarning = new GUIContent("Realtime indirect bounce shadowing is not supported for Spot and Point lights.");
public readonly GUIContent cookieWarning = new GUIContent("Cookie textures for spot lights should be set to clamp, not repeat, to avoid artifacts.");
// Additional light data
public readonly GUIContent maxSmoothness = new GUIContent("Max Smoothness", "Very low cost way of faking spherical area lighting. This will modify the roughness of the material lit. This is useful when the specular highlight is too small or too sharp.");

319
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Editor/HDLightEditor.cs


using System;
using System.Linq;
using UnityEngine.Rendering;
// TODO: Simplify this editor once we can target 2018.1
sealed class SerializedBaseData
{
public SerializedProperty type;
public SerializedProperty range;
public SerializedProperty spotAngle;
public SerializedProperty cookie;
public SerializedProperty cookieSize;
public SerializedProperty color;
public SerializedProperty intensity;
public SerializedProperty bounceIntensity;
public SerializedProperty colorTemperature;
public SerializedProperty useColorTemperature;
public SerializedProperty shadowsType;
public SerializedProperty shadowsBias;
public SerializedProperty shadowsNormalBias;
public SerializedProperty shadowsNearPlane;
public SerializedProperty lightmapping;
public SerializedProperty areaSizeX;
public SerializedProperty areaSizeY;
public SerializedProperty bakedShadowRadius;
public SerializedProperty bakedShadowAngle;
}
sealed class SerializedLightData
{
public SerializedProperty spotInnerPercent;

SerializedObject m_SerializedAdditionalLightData;
SerializedObject m_SerializedAdditionalShadowData;
SerializedBaseData m_BaseData;
// Copied over from teh original LightEditor class. Will go away once we can target 2018.1
bool m_TypeIsSame { get { return !m_BaseData.type.hasMultipleDifferentValues; } }
bool m_LightmappingTypeIsSame { get { return !m_BaseData.lightmapping.hasMultipleDifferentValues; } }
bool m_IsCompletelyBaked { get { return m_BaseData.lightmapping.intValue == 2; } }
bool m_IsRealtime { get { return m_BaseData.lightmapping.intValue == 4; } }
Light light { get { return serializedObject.targetObject as Light; } }
Texture m_Cookie { get { return m_BaseData.cookie.objectReferenceValue as Texture; } }
bool m_BakingWarningValue { get { return !UnityEditor.Lightmapping.bakedGI && m_LightmappingTypeIsSame && !m_IsRealtime; } }
bool m_BounceWarningValue
{
get
{
return m_TypeIsSame && (light.type == LightType.Point || light.type == LightType.Spot) &&
m_LightmappingTypeIsSame && m_IsRealtime && !m_BaseData.bounceIntensity.hasMultipleDifferentValues
&& m_BaseData.bounceIntensity.floatValue > 0.0f;
}
}
public bool cookieWarningValue
{
get
{
return m_TypeIsSame && light.type == LightType.Spot &&
!m_BaseData.cookie.hasMultipleDifferentValues && m_Cookie && m_Cookie.wrapMode != TextureWrapMode.Clamp;
}
}
// LightType + LightTypeExtent combined
enum LightShape
{

// Used for UI only; the processing code must use LightTypeExtent and LightType
LightShape m_LightShape;
void OnEnable()
protected override void OnEnable()
base.OnEnable();
var lightData = GetAdditionalData<HDAdditionalLightData>();
var shadowData = GetAdditionalData<AdditionalShadowData>();
var lightData = CoreEditorUtils.GetAdditionalData<HDAdditionalLightData>(targets);
var shadowData = CoreEditorUtils.GetAdditionalData<AdditionalShadowData>(targets);
// Grab all the serialized data we need
m_BaseData = new SerializedBaseData
{
type = serializedObject.FindProperty("m_Type"),
range = serializedObject.FindProperty("m_Range"),
spotAngle = serializedObject.FindProperty("m_SpotAngle"),
cookie = serializedObject.FindProperty("m_Cookie"),
cookieSize = serializedObject.FindProperty("m_CookieSize"),
color = serializedObject.FindProperty("m_Color"),
intensity = serializedObject.FindProperty("m_Intensity"),
bounceIntensity = serializedObject.FindProperty("m_BounceIntensity"),
colorTemperature = serializedObject.FindProperty("m_ColorTemperature"),
useColorTemperature = serializedObject.FindProperty("m_UseColorTemperature"),
shadowsType = serializedObject.FindProperty("m_Shadows.m_Type"),
shadowsBias = serializedObject.FindProperty("m_Shadows.m_Bias"),
shadowsNormalBias = serializedObject.FindProperty("m_Shadows.m_NormalBias"),
shadowsNearPlane = serializedObject.FindProperty("m_Shadows.m_NearPlane"),
lightmapping = serializedObject.FindProperty("m_Lightmapping"),
areaSizeX = serializedObject.FindProperty("m_AreaSize.x"),
areaSizeY = serializedObject.FindProperty("m_AreaSize.y"),
bakedShadowRadius = serializedObject.FindProperty("m_ShadowRadius"),
bakedShadowAngle = serializedObject.FindProperty("m_ShadowAngle")
};
using (var o = new PropertyFetcher<HDAdditionalLightData>(m_SerializedAdditionalLightData))
m_AdditionalLightData = new SerializedLightData
{

ApplyAdditionalComponentsVisibility(true);
CheckStyles();
serializedObject.Update();
settings.Update();
DrawFoldout(m_BaseData.type, "Shape", DrawShape);
DrawFoldout(m_BaseData.intensity, "Light", DrawLightSettings);
DrawFoldout(settings.lightType, "Shape", DrawShape);
DrawFoldout(settings.intensity, "Light", DrawLightSettings);
if (m_BaseData.shadowsType.enumValueIndex != (int)LightShadows.None)
DrawFoldout(m_BaseData.shadowsType, "Shadows", DrawShadows);
if (settings.shadowsType.enumValueIndex != (int)LightShadows.None)
DrawFoldout(settings.shadowsType, "Shadows", DrawShadows);
CoreEditorUtils.DrawSplitter();
EditorGUILayout.Space();

serializedObject.ApplyModifiedProperties();
settings.ApplyModifiedProperties();
}
void DrawFoldout(SerializedProperty foldoutProperty, string title, Action func)

void DrawFeatures()
{
EditorGUILayout.PropertyField(m_AdditionalLightData.showAdditionalSettings);
bool disabledScope = m_IsCompletelyBaked
bool disabledScope = settings.isCompletelyBaked
bool shadowsEnabled = EditorGUILayout.Toggle(new GUIContent("Enable Shadows"), m_BaseData.shadowsType.enumValueIndex != 0);
m_BaseData.shadowsType.enumValueIndex = shadowsEnabled ? (int)LightShadows.Hard : (int)LightShadows.None;
bool shadowsEnabled = EditorGUILayout.Toggle(CoreEditorUtils.GetContent("Enable Shadows"), settings.shadowsType.enumValueIndex != 0);
settings.shadowsType.enumValueIndex = shadowsEnabled ? (int)LightShadows.Hard : (int)LightShadows.None;
EditorGUILayout.PropertyField(m_AdditionalLightData.showAdditionalSettings);
}
void DrawShape()

switch (m_LightShape)
{
case LightShape.Directional:
m_BaseData.type.enumValueIndex = (int)LightType.Directional;
settings.lightType.enumValueIndex = (int)LightType.Directional;
m_BaseData.type.enumValueIndex = (int)LightType.Point;
settings.lightType.enumValueIndex = (int)LightType.Point;
m_BaseData.type.enumValueIndex = (int)LightType.Spot;
settings.lightType.enumValueIndex = (int)LightType.Spot;
m_AdditionalLightData.lightTypeExtent.enumValueIndex = (int)LightTypeExtent.Punctual;
EditorGUILayout.PropertyField(m_AdditionalLightData.spotLightShape, s_Styles.spotLightShape);
var spotLightShape = (SpotLightShape)m_AdditionalLightData.spotLightShape.enumValueIndex;

EditorGUILayout.Slider(m_BaseData.spotAngle, 0f, 179.9f, s_Styles.spotAngle);
settings.DrawSpotAngle();
EditorGUILayout.Slider(m_BaseData.spotAngle, 0f, 179.9f, s_Styles.spotAngle);
settings.DrawSpotAngle();
EditorGUILayout.Slider(m_AdditionalLightData.aspectRatio, 0.05f, 20.0f, s_Styles.aspectRatioPyramid);
}
else if (spotLightShape == SpotLightShape.Box)

case LightShape.Rectangle:
// TODO: Currently if we use Area type as it is offline light in legacy, the light will not exist at runtime
//m_BaseData.type.enumValueIndex = (int)LightType.Area;
m_BaseData.type.enumValueIndex = (int)LightType.Point;
settings.lightType.enumValueIndex = (int)LightType.Point;
m_BaseData.areaSizeX.floatValue = m_AdditionalLightData.shapeLength.floatValue;
m_BaseData.areaSizeY.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
m_BaseData.shadowsType.enumValueIndex = (int)LightShadows.None;
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeLength.floatValue;
settings.areaSizeY.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
m_BaseData.type.enumValueIndex = (int)LightType.Point;
settings.lightType.enumValueIndex = (int)LightType.Point;
m_BaseData.areaSizeX.floatValue = m_AdditionalLightData.shapeLength.floatValue;
m_BaseData.areaSizeY.floatValue = 0.01f;
m_BaseData.shadowsType.enumValueIndex = (int)LightShadows.None;
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeLength.floatValue;
settings.areaSizeY.floatValue = 0.01f;
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
break;
case (LightShape)(-1):

void DrawLightSettings()
{
if (GraphicsSettings.lightsUseLinearIntensity && GraphicsSettings.lightsUseColorTemperature)
{
EditorGUILayout.PropertyField(m_BaseData.useColorTemperature, s_Styles.useColorTemperature);
if (m_BaseData.useColorTemperature.boolValue)
{
const float kMinKelvin = 1000f;
const float kMaxKelvin = 20000f;
EditorGUILayout.LabelField(s_Styles.color);
EditorGUI.indentLevel += 1;
EditorGUILayout.PropertyField(m_BaseData.color, s_Styles.colorFilter);
EditorGUILayout.Slider(m_BaseData.colorTemperature, kMinKelvin, kMaxKelvin, s_Styles.colorTemperature);
EditorGUI.indentLevel -= 1;
}
else EditorGUILayout.PropertyField(m_BaseData.color, s_Styles.color);
}
else EditorGUILayout.PropertyField(m_BaseData.color, s_Styles.color);
EditorGUILayout.PropertyField(m_BaseData.intensity, s_Styles.intensity);
EditorGUILayout.PropertyField(m_BaseData.bounceIntensity, s_Styles.lightBounceIntensity);
// Indirect shadows warning (Should be removed when we support realtime indirect shadows)
if (m_BounceWarningValue)
EditorGUILayout.HelpBox(s_Styles.indirectBounceShadowWarning.text, MessageType.Info);
EditorGUILayout.PropertyField(m_BaseData.range, s_Styles.range);
EditorGUILayout.PropertyField(m_BaseData.lightmapping, s_Styles.lightmappingMode);
// Warning if GI Baking disabled and m_Lightmapping isn't realtime
if (m_BakingWarningValue)
EditorGUILayout.HelpBox(s_Styles.bakingWarning.text, MessageType.Info);
settings.DrawColor();
settings.DrawIntensity();
settings.DrawBounceIntensity();
settings.DrawRange(false);
settings.DrawLightmapping();
EditorGUILayout.PropertyField(m_BaseData.cookie, s_Styles.cookie);
// Warn on spotlights if the cookie is set to repeat
if (cookieWarningValue)
EditorGUILayout.HelpBox(s_Styles.cookieWarning.text, MessageType.Warning);
settings.DrawCookie();
if (m_Cookie != null && m_LightShape == LightShape.Directional)
if (settings.cookie != null && m_LightShape == LightShape.Directional)
EditorGUI.indentLevel++;
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.LabelField("Additional Settings", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_AdditionalLightData.affectDiffuse, s_Styles.affectDiffuse);

}
}
void DrawShadows()
void DrawBakedShadowParameters()
if (m_IsCompletelyBaked)
switch ((LightType)settings.lightType.enumValueIndex)
switch ((LightType)m_BaseData.type.enumValueIndex)
{
case LightType.Directional:
EditorGUILayout.Slider(m_BaseData.bakedShadowAngle, 0f, 90f, s_Styles.bakedShadowAngle);
break;
case LightType.Spot:
case LightType.Point:
EditorGUILayout.PropertyField(m_BaseData.bakedShadowRadius, s_Styles.bakedShadowRadius);
break;
}
case LightType.Directional:
EditorGUILayout.Slider(settings.bakedShadowAngleProp, 0f, 90f, s_Styles.bakedShadowAngle);
break;
case LightType.Spot:
case LightType.Point:
EditorGUILayout.PropertyField(settings.bakedShadowRadiusProp, s_Styles.bakedShadowRadius);
break;
}
}
void DrawShadows()
{
if (settings.isCompletelyBaked)
{
DrawBakedShadowParameters();
EditorGUILayout.Slider(m_BaseData.shadowsBias, 0.001f, 1f, s_Styles.shadowBias);
EditorGUILayout.Slider(m_BaseData.shadowsNormalBias, 0.001f, 1f, s_Styles.shadowNormalBias);
EditorGUILayout.Slider(m_BaseData.shadowsNearPlane, 0.01f, 10f, s_Styles.shadowNearPlane);
if (m_BaseData.type.enumValueIndex != (int)LightType.Directional)
return;
EditorGUILayout.Slider(settings.shadowsBias, 0.001f, 1f, s_Styles.shadowBias);
EditorGUILayout.Slider(settings.shadowsNormalBias, 0.001f, 1f, s_Styles.shadowNormalBias);
EditorGUILayout.Slider(settings.shadowsNearPlane, 0.01f, 10f, s_Styles.shadowNearPlane);
using (var scope = new EditorGUI.ChangeCheckScope())
if (settings.lightType.enumValueIndex == (int)LightType.Directional)
EditorGUILayout.IntSlider(m_AdditionalShadowData.cascadeCount, 1, 4, s_Styles.shadowCascadeCount);
if (scope.changed)
using (var scope = new EditorGUI.ChangeCheckScope())
int len = m_AdditionalShadowData.cascadeCount.intValue;
m_AdditionalShadowData.cascadeRatios.arraySize = len - 1;
m_AdditionalShadowData.cascadeBorders.arraySize = len;
EditorGUILayout.IntSlider(m_AdditionalShadowData.cascadeCount, 1, 4, s_Styles.shadowCascadeCount);
if (scope.changed)
{
int len = m_AdditionalShadowData.cascadeCount.intValue;
m_AdditionalShadowData.cascadeRatios.arraySize = len - 1;
m_AdditionalShadowData.cascadeBorders.arraySize = len;
}
}
EditorGUI.indentLevel++;
EditorGUI.indentLevel++;
using (var scope = new EditorGUI.ChangeCheckScope())
{
// Draw each field first...
int arraySize = m_AdditionalShadowData.cascadeRatios.arraySize;
for (int i = 0; i < arraySize; i++)
EditorGUILayout.Slider(m_AdditionalShadowData.cascadeRatios.GetArrayElementAtIndex(i), 0f, 1f, s_Styles.shadowCascadeRatios[i]);
if (scope.changed)
using (var scope = new EditorGUI.ChangeCheckScope())
// ...then clamp values to avoid out of bounds cascade ratios
// Draw each field first...
int arraySize = m_AdditionalShadowData.cascadeRatios.arraySize;
{
var ratios = m_AdditionalShadowData.cascadeRatios;
var ratioProp = ratios.GetArrayElementAtIndex(i);
float val = ratioProp.floatValue;
EditorGUILayout.Slider(m_AdditionalShadowData.cascadeRatios.GetArrayElementAtIndex(i), 0f, 1f, s_Styles.shadowCascadeRatios[i]);
if (i > 0)
if (scope.changed)
{
// ...then clamp values to avoid out of bounds cascade ratios
for (int i = 0; i < arraySize; i++)
var prevRatioProp = ratios.GetArrayElementAtIndex(i - 1);
float prevVal = prevRatioProp.floatValue;
val = Mathf.Max(val, prevVal);
}
var ratios = m_AdditionalShadowData.cascadeRatios;
var ratioProp = ratios.GetArrayElementAtIndex(i);
float val = ratioProp.floatValue;
if (i > 0)
{
var prevRatioProp = ratios.GetArrayElementAtIndex(i - 1);
float prevVal = prevRatioProp.floatValue;
val = Mathf.Max(val, prevVal);
}
if (i < arraySize - 1)
{
var nextRatioProp = ratios.GetArrayElementAtIndex(i + 1);
float nextVal = nextRatioProp.floatValue;
val = Mathf.Min(val, nextVal);
}
if (i < arraySize - 1)
{
var nextRatioProp = ratios.GetArrayElementAtIndex(i + 1);
float nextVal = nextRatioProp.floatValue;
val = Mathf.Min(val, nextVal);
ratioProp.floatValue = val;
ratioProp.floatValue = val;
EditorGUI.indentLevel--;
EditorGUI.indentLevel--;
if (settings.isBakedOrMixed)
DrawBakedShadowParameters();
EditorGUILayout.Space();
EditorGUILayout.PropertyField(m_AdditionalShadowData.fadeDistance, s_Styles.shadowFadeDistance);
if (settings.lightType.enumValueIndex == (int)LightType.Point || settings.lightType.enumValueIndex == (int)LightType.Spot)
EditorGUILayout.PropertyField(m_AdditionalShadowData.fadeDistance, s_Styles.shadowFadeDistance);
EditorGUILayout.PropertyField(m_AdditionalShadowData.dimmer, s_Styles.shadowDimmer);
EditorGUI.indentLevel--;
}

void ResolveLightShape()
{
var type = m_BaseData.type;
var type = settings.lightType;
// Special case for multi-selection: don't resolve light shape or it'll corrupt lights
if (type.hasMultipleDifferentValues)

break;
}
}
}
// TODO: Move this to a generic EditorUtilities class
T[] GetAdditionalData<T>()
where T : Component
{
// Handles multi-selection
var data = targets.Cast<Component>()
.Select(t => t.GetComponent<T>())
.ToArray();
for (int i = 0; i < data.Length; i++)
{
if (data[i] == null)
data[i] = Undo.AddComponent<T>(((Component)targets[i]).gameObject);
}
return data;
}
}
}

15
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs


public Vector3 forward;
public int cookieIndex; // -1 if unused
public Vector3 right; // Rescaled by (2 / shapeLenght)
public Vector3 right; // Rescaled by (2 / shapeLength)
public bool dynamicShadowCasterOnly; // Use with ShadowMask feature
public bool dynamicShadowCasterOnly; // Use with ShadowMask feature
public Vector4 shadowMaskSelector; // Use with ShadowMask feature
};

public Vector3 forward;
public int cookieIndex; // -1 if unused
public Vector3 right; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by (2 / shapeLenght)
public Vector3 right; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by (2 / shapeLength)
public Vector3 up; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by * (2 / shapeWidth)
public Vector3 up; // If spot: rescaled by cot(outerHalfAngle); if projector: rescaled by (2 / shapeWidth)
public float diffuseScale;
public float angleScale; // Spot light

public Vector2 size; // Used by area, frustum projector and spot lights (x = cot(outerHalfAngle))
public Vector4 shadowMaskSelector; // Use with ShadowMask feature
public Vector2 size; // Used by area and pyramid projector lights
public Vector4 shadowMaskSelector; // Use with ShadowMask feature
[GenerateHLSL]

20
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightDefinition.cs.hlsl


float specularScale;
float3 up;
float diffuseScale;
bool dynamicShadowCasterOnly;
bool dynamicShadowCasterOnly;
float4 shadowMaskSelector;
};

float angleOffset;
float shadowDimmer;
bool dynamicShadowCasterOnly;
float4 shadowMaskSelector;
float4 shadowMaskSelector;
};
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.EnvLightData

{
return value.diffuseScale;
}
bool GetDynamicShadowCasterOnly(DirectionalLightData value)
{
return value.dynamicShadowCasterOnly;
}
float2 GetFadeDistanceScaleAndBias(DirectionalLightData value)
{
return value.fadeDistanceScaleAndBias;

return value.unused0;
}
bool GetDynamicShadowCasterOnly(DirectionalLightData value)
{
return value.dynamicShadowCasterOnly;
}
float4 GetShadowMaskSelector(DirectionalLightData value)
{
return value.shadowMaskSelector;

{
return value.dynamicShadowCasterOnly;
}
float4 GetShadowMaskSelector(LightData value)
{
return value.shadowMaskSelector;
}
float2 GetSize(LightData value)
{
return value.size;

float GetMinRoughness(LightData value)
{
return value.minRoughness;
}
float4 GetShadowMaskSelector(LightData value)
{
return value.shadowMaskSelector;
}
//

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Lighting.hlsl


#include "../Lighting/LightDefinition.cs.hlsl"
#include "../Lighting/LightUtilities.hlsl"
#include "TilePass/Shadow.hlsl"
#include "LightLoop/Shadow.hlsl"
#include "../Lighting/TilePass/TilePass.hlsl"
#include "../Lighting/LightLoop/LightLoopDef.hlsl"
#endif
// Shadow use samling function define in header above and must be include before Material.hlsl

#if defined(LIGHTLOOP_SINGLE_PASS) || defined(LIGHTLOOP_TILE_PASS)
#include "../Lighting/TilePass/TilePassLoop.hlsl"
#include "../Lighting/LightLoop/LightLoop.hlsl"
#endif

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoopDef.hlsl


#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
#define DWORD_PER_TILE 16 // See dwordsPerTile in TilePass.cs, we have roomm for 31 lights and a number of light value all store on 16 bit (ushort)
#define DWORD_PER_TILE 16 // See dwordsPerTile in LightLoop.cs, we have roomm for 31 lights and a number of light value all store on 16 bit (ushort)
CBUFFER_START(UnityTilePass)
uint _NumTileFtplX;

93
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.hlsl


// Calculate the offset in global light index light for current light category
int GetTileOffset(PositionInputs posInput, uint lightCategory)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
return (tileIndex.y + lightCategory * _NumTileFtplY) * _NumTileFtplX + tileIndex.x;
}

void GetCountAndStartCluster(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
float logBase = g_fClustBase;
if (g_isLogBaseBufferEnabled)

int clustIdx = SnapToClusterIdxFlex(posInput.depthVS, logBase, g_isLogBaseBufferEnabled != 0);
int clustIdx = SnapToClusterIdxFlex(posInput.linearDepth, logBase, g_isLogBaseBufferEnabled != 0);
int nrClusters = (1 << g_iLog2NumClusters);
const int idx = ((lightCategory * nrClusters + clustIdx) * _NumTileClusteredY + tileIndex.y) * _NumTileClusteredX + tileIndex.x;

#endif // LIGHTLOOP_TILE_PASS
LightData FetchLight(uint start, uint i)
{
#ifdef LIGHTLOOP_TILE_PASS
int j = FetchIndex(start, i);
#else
int j = start + i;
#endif
return _LightDatas[j];
}
// bakeDiffuseLighting is part of the prototype so a user is able to implement a "base pass" with GI and multipass direct light (aka old unity rendering path)
void LightLoop( float3 V, PositionInputs posInput, PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, uint featureFlags,
out float3 diffuseLighting,

if (featureFlags & LIGHTFEATUREFLAGS_PUNCTUAL)
{
#ifdef LIGHTLOOP_TILE_PASS
uint lightCount, lightStart;
// TODO: Convert the for loop below to a while on each type as we know we are sorted and compare performance.
uint punctualLightStart;
uint punctualLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_PUNCTUAL, punctualLightStart, punctualLightCount);
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStart(posInput, LIGHTCATEGORY_PUNCTUAL, lightStart, lightCount);
#else
lightCount = _PunctualLightCount;
lightStart = 0;
#endif
for (i = 0; i < punctualLightCount; ++i)
for (i = 0; i < lightCount; i++)
int punctualIndex = FetchIndex(punctualLightStart, i);
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, _LightDatas[punctualIndex], bsdfData, bakeLightingData, _LightDatas[punctualIndex].lightType);
AccumulateDirectLighting(lighting, aggregateLighting);
}
LightData lightData = FetchLight(lightStart, i);
#else
for (i = 0; i < _PunctualLightCount; ++i)
{
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, _LightDatas[i], bsdfData, bakeLightingData, _LightDatas[i].lightType);
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
#endif
#ifdef LIGHTLOOP_TILE_PASS
uint lightCount, lightStart;
uint areaLightStart;
uint areaLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_AREA, areaLightStart, areaLightCount);
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStart(posInput, LIGHTCATEGORY_AREA, lightStart, lightCount);
#else
lightCount = _AreaLightCount;
lightStart = _PunctualLightCount;
#endif
// COMPILER BEHAVIOR WARNING!
// If rectangle lights are before line lights, the compiler will duplicate light matrices in VGPR because they are used differently between the two types of lights.

i = 0;
if (areaLightCount > 0)
if (lightCount > 0)
uint areaIndex = FetchIndex(areaLightStart, 0);
uint lightType = _LightDatas[areaIndex].lightType;
i = 0;
uint last = lightCount - 1;
LightData lightData = FetchLight(lightStart, i);
while (i < areaLightCount && lightType == GPULIGHTTYPE_LINE)
while (i <= last && lightData.lightType == GPULIGHTTYPE_LINE)
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[areaIndex], bsdfData, bakeLightingData, GPULIGHTTYPE_LINE);
lightData.lightType = GPULIGHTTYPE_LINE; // Enforce constant propagation
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
i++;
areaIndex = i < areaLightCount ? FetchIndex(areaLightStart, i) : 0;
lightType = i < areaLightCount ? _LightDatas[areaIndex].lightType : 0xFF;
lightData = FetchLight(lightStart, min(++i, last));
while (i < areaLightCount && lightType == GPULIGHTTYPE_RECTANGLE)
while (i <= last && lightData.lightType == GPULIGHTTYPE_RECTANGLE)
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[areaIndex], bsdfData, bakeLightingData, GPULIGHTTYPE_RECTANGLE);
lightData.lightType = GPULIGHTTYPE_RECTANGLE; // Enforce constant propagation
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
i++;
areaIndex = i < areaLightCount ? FetchIndex(areaLightStart, i) : 0;
lightType = i < areaLightCount ? _LightDatas[areaIndex].lightType : 0xFF;
lightData = FetchLight(lightStart, min(++i, last));
#else
for (i = _PunctualLightCount; i < _PunctualLightCount + _AreaLightCount; ++i)
{
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[i], bsdfData, bakeLightingData, _LightDatas[i].lightType);
AccumulateDirectLighting(lighting, aggregateLighting);
}
#endif
}
float reflectionHierarchyWeight = 0.0; // Max: 1.0

20
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.cs.hlsl


// This file was automatically generated. Please don't edit by hand.
//
#ifndef TILEPASS_CS_HLSL
#define TILEPASS_CS_HLSL
#ifndef LIGHTLOOP_CS_HLSL
#define LIGHTLOOP_CS_HLSL
// UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightVolumeType: static fields
// UnityEngine.Experimental.Rendering.HDPipeline.LightVolumeType: static fields
//
#define LIGHTVOLUMETYPE_CONE (0)
#define LIGHTVOLUMETYPE_SPHERE (1)

//
// UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightCategory: static fields
// UnityEngine.Experimental.Rendering.HDPipeline.LightCategory: static fields
//
#define LIGHTCATEGORY_PUNCTUAL (0)
#define LIGHTCATEGORY_AREA (1)

//
// UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightFeatureFlags: static fields
// UnityEngine.Experimental.Rendering.HDPipeline.LightFeatureFlags: static fields
//
#define LIGHTFEATUREFLAGS_PUNCTUAL (4096)
#define LIGHTFEATUREFLAGS_AREA (8192)

#define LIGHTFEATUREFLAGS_SSREFLECTION (262144)
//
// UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightDefinitions: static fields
// UnityEngine.Experimental.Rendering.HDPipeline.LightDefinitions: static fields
//
#define MAX_NR_LIGHTS_PER_CAMERA (1024)
#define MAX_NR_BIG_TILE_LIGHTS_PLUS_ONE (512)

#define LIGHT_FEATURE_MASK_FLAGS_TRANSPARENT (16510976)
#define MATERIAL_FEATURE_MASK_FLAGS (4095)
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.TilePass.SFiniteLightBound
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.SFiniteLightBound
// PackingRules = Exact
struct SFiniteLightBound
{

float radius;
};
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightVolumeData
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.LightVolumeData
// PackingRules = Exact
struct LightVolumeData
{

};
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.TilePass.SFiniteLightBound
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.SFiniteLightBound
//
float3 GetBoxAxisX(SFiniteLightBound value)
{

}
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.TilePass.LightVolumeData
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.LightVolumeData
//
float3 GetLightPos(LightVolumeData value)
{

983
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightLoop.cs
文件差异内容过多而无法显示
查看文件

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/scrbound.compute


#pragma kernel ScreenBoundsAABB
#include "ShaderLibrary/common.hlsl"
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
uniform int g_isOrthographic;
uniform int g_iNrVisibLights;

float fW = vPnts[k].w;
float fS = fW<0 ? -1 : 1;
float fWabs = fW<0 ? (-fW) : fW;
fW = fS * (fWabs<FLT_EPSILON ? FLT_EPSILON : fWabs);
fW = fS * (fWabs<FLT_EPS ? FLT_EPS : fWabs);
float3 vP = float3(vPnts[k].x/fW, vPnts[k].y/fW, vPnts[k].z/fW);
if(k==0) { vMin=vP; vMax=vP; }

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/materialflags.compute


#include "ShaderLibrary/common.hlsl"
#include "ShaderBase.hlsl"
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl
#include "../../Material/Material.hlsl" // This includes Material.hlsl

if (UnpackByte(LOAD_TEXTURE2D(_StencilTexture, uCrd).r) != STENCILLIGHTINGUSAGE_NO_LIGHTING) // This test is we are the sky/background or not
{
PositionInputs posInput = GetPositionInput(uCrd, invScreenSize);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.unPositionSS);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.positionSS);
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild.compute


#include "ShaderLibrary/common.hlsl"
#include "ShaderBase.hlsl"
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild-clustered.compute


#include "ShaderLibrary/common.hlsl"
#include "ShaderBase.hlsl"
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)

SFiniteLightBound lgtDat = g_data[coarseList[l]];
if( !DoesSphereOverlapTile(V, halfTileSizeAtZDistOne, lgtDat.center.xyz, lgtDat.radius, g_isOrthographic!=0) )
coarseList[l]=0xffffffff;
coarseList[l]=UINT_MAX;
}
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)

int offs = 0;
for(int l=0; l<iNrCoarseLights; l++)
{
if(coarseList[l]!=0xffffffff)
if(coarseList[l]!=UINT_MAX)
coarseList[offs++] = coarseList[l];
}
lightOffsSph = offs;

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/lightlistbuild-bigtile.compute


#pragma kernel BigTileLightListGen
#include "ShaderLibrary/common.hlsl"
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
#include "LightingConvexHullUtils.hlsl"
#include "SortingComputeUtils.hlsl"

SFiniteLightBound lgtDat = g_data[lightsListLDS[l]];
if( !DoesSphereOverlapTile(V, halfTileSizeAtZDistOne, lgtDat.center.xyz, lgtDat.radius, g_isOrthographic!=0) )
lightsListLDS[l]=0xffffffff;
lightsListLDS[l]=UINT_MAX;
}
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)

int resf = (positive>0 && negative>0) ? 0 : (positive>0 ? 1 : (negative>0 ? (-1) : 0));
bool bFoundSepPlane = (resh*resf)<0;
if(bFoundSepPlane) lightsListLDS[l]=0xffffffff;
if(bFoundSepPlane) lightsListLDS[l]=UINT_MAX;
}
}
}

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/builddispatchindirect.compute


#pragma kernel BuildDispatchIndirect
#include "TilePass.cs.hlsl"
#include "ShaderLibrary/Common.hlsl"
#include "LightLoop.cs.hlsl"
#define UNITY_MATERIAL_LIT // Need to be define before including Material.hlsl
#include "../../Material/Material.hlsl" // This includes Material.hlsl

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/SortingComputeUtils.hlsl


// have to make this sort routine a macro unfortunately because hlsl doesn't take
// groupshared memory of unspecified length as an input parameter to a function.
// maxcapacity_in must be a power of two.
// all data from length_in and up to closest power of two will be filled with 0xffffffff
// all data from length_in and up to closest power of two will be filled with UINT_MAX
for(int t=length+localThreadID; t<N; t+=nrthreads) { data[t]=0xffffffff; } \
for(int t=length+localThreadID; t<N; t+=nrthreads) { data[t]=UINT_MAX; } \
GroupMemoryBarrierWithGroupSync(); \
\
for(int k=2; k<=N; k=2*k) \

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/ShadowContext.hlsl


#ifndef TILEPASS_SHADOW_CONTEXT_HLSL
#define TILEPASS_SHADOW_CONTEXT_HLSL
#ifndef LIGHTLOOP_SHADOW_CONTEXT_HLSL
#define LIGHTLOOP_SHADOW_CONTEXT_HLSL
#define SHADOWCONTEXT_MAX_TEX2DARRAY 4
#define SHADOWCONTEXT_MAX_TEXCUBEARRAY 0

return sc;
}
#endif // TILEPASS_SHADOW_CONTEXT_HLSL
#endif // LIGHTLOOP_SHADOW_CONTEXT_HLSL

10
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/Shadow.hlsl


#ifndef TILEPASS_SHADOW_HLSL
#define TILEPASS_SHADOW_HLSL
#ifndef LIGHTLOOP_SHADOW_HLSL
#define LIGHTLOOP_SHADOW_HLSL
#define SHADOW_DISPATCH_USE_CUSTOM_DIRECTIONAL
#define SHADOW_DISPATCH_USE_CUSTOM_PUNCTUAL

return EvalShadow_CascadedDepth_Blend( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_PunctualDepth( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
#endif
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

#endif // TILEPASS_SHADOW_HLSL
#endif // LIGHTLOOP_SHADOW_HLSL

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/LightingConvexHullUtils.hlsl


#ifndef __LIGHTINGCONVEXHULLUTILS_H__
#define __LIGHTINGCONVEXHULLUTILS_H__
#include "TilePass.cs.hlsl"
#include "LightLoop.cs.hlsl"
float3 GetHullVertex(const float3 boxX, const float3 boxY, const float3 boxZ, const float3 center, const float2 scaleXY, const int p)
{

7
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/Deferred.compute


#pragma kernel Deferred_Indirect_ShadowMask_Fptl_Variant25 SHADE_OPAQUE_ENTRY=Deferred_Indirect_ShadowMask_Fptl_Variant25 USE_INDIRECT SHADOWS_SHADOWMASK VARIANT=25
#pragma kernel Deferred_Indirect_ShadowMask_Fptl_Variant26 SHADE_OPAQUE_ENTRY=Deferred_Indirect_ShadowMask_Fptl_Variant26 USE_INDIRECT SHADOWS_SHADOWMASK VARIANT=26
#define LIGHTLOOP_TILE_PASS 1
// deferred opaque always use FPTL
#define USE_FPTL_LIGHTLIST 1

PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw, tileCoord);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
// For indirect case: we can still overlap inside a tile with the sky/background, reject it
// Can't rely on stencil as we are in compute shader

BSDFData bsdfData;
BakeLightingData bakeLightingData;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop/ClusteredUtils.hlsl


float SuggestLogBase50(float tileFarPlane)
{
const float C = (float)(1 << g_iLog2NumClusters);
float rangeFittedDistance = clamp((tileFarPlane - g_fNearPlane) / (g_fFarPlane - g_fNearPlane), FLT_EPSILON, 1.0);
float rangeFittedDistance = clamp((tileFarPlane - g_fNearPlane) / (g_fFarPlane - g_fNearPlane), FLT_EPS, 1.0);
float suggested_base = pow((1.0 + sqrt(max(0.0, 1.0 - 4.0 * rangeFittedDistance * (1.0 - rangeFittedDistance)))) / (2.0 * rangeFittedDistance), 2.0 / C); //
return max(g_fClustBase, suggested_base);
}

{
const float C = (float)(1 << g_iLog2NumClusters);
float rangeFittedDistance = clamp((tileFarPlane - g_fNearPlane) / (g_fFarPlane - g_fNearPlane), FLT_EPSILON, 1.0);
float rangeFittedDistance = clamp((tileFarPlane - g_fNearPlane) / (g_fFarPlane - g_fNearPlane), FLT_EPS, 1.0);
float suggested_base = pow((1 / 2.3) * max(0.0, (0.8 / rangeFittedDistance) - 1), 4.0 / (C * 2)); // approximate inverse of d*x^4 + (-x) + (1-d) = 0 - d is normalized distance
return max(g_fClustBase, suggested_base);
}

71
ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/Editor/LayeredLitUI.cs


using System;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Experimental.Rendering.HDPipeline;
using System.Linq;

static public void SetupLayersMappingKeywords(Material material)
{
// object scale affect tile
SetKeyword(material, "_LAYER_TILING_COUPLED_WITH_UNIFORM_OBJECT_SCALE", material.GetFloat(kObjectScaleAffectTile) > 0.0f);
CoreUtils.SetKeyword(material, "_LAYER_TILING_COUPLED_WITH_UNIFORM_OBJECT_SCALE", material.GetFloat(kObjectScaleAffectTile) > 0.0f);
SetKeyword(material, "_LAYER_MAPPING_PLANAR_BLENDMASK", UVBlendMaskMapping == UVBaseMapping.Planar);
SetKeyword(material, "_LAYER_MAPPING_TRIPLANAR_BLENDMASK", UVBlendMaskMapping == UVBaseMapping.Triplanar);
CoreUtils.SetKeyword(material, "_LAYER_MAPPING_PLANAR_BLENDMASK", UVBlendMaskMapping == UVBaseMapping.Planar);
CoreUtils.SetKeyword(material, "_LAYER_MAPPING_TRIPLANAR_BLENDMASK", UVBlendMaskMapping == UVBaseMapping.Triplanar);
int numLayer = (int)material.GetFloat(kLayerCount);

SetKeyword(material, "_LAYEREDLIT_4_LAYERS", true);
SetKeyword(material, "_LAYEREDLIT_3_LAYERS", false);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_4_LAYERS", true);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_3_LAYERS", false);
SetKeyword(material, "_LAYEREDLIT_4_LAYERS", false);
SetKeyword(material, "_LAYEREDLIT_3_LAYERS", true);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_4_LAYERS", false);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_3_LAYERS", true);
SetKeyword(material, "_LAYEREDLIT_4_LAYERS", false);
SetKeyword(material, "_LAYEREDLIT_3_LAYERS", false);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_4_LAYERS", false);
CoreUtils.SetKeyword(material, "_LAYEREDLIT_3_LAYERS", false);
}
const string kLayerMappingPlanar = "_LAYER_MAPPING_PLANAR";

string layerUVBaseParam = string.Format("{0}{1}", kUVBase, i);
UVBaseMapping layerUVBaseMapping = (UVBaseMapping)material.GetFloat(layerUVBaseParam);
string currentLayerMappingPlanar = string.Format("{0}{1}", kLayerMappingPlanar, i);
SetKeyword(material, currentLayerMappingPlanar, layerUVBaseMapping == UVBaseMapping.Planar);
CoreUtils.SetKeyword(material, currentLayerMappingPlanar, layerUVBaseMapping == UVBaseMapping.Planar);
SetKeyword(material, currentLayerMappingTriplanar, layerUVBaseMapping == UVBaseMapping.Triplanar);
CoreUtils.SetKeyword(material, currentLayerMappingTriplanar, layerUVBaseMapping == UVBaseMapping.Triplanar);
string uvBase = string.Format("{0}{1}", kUVBase, i);
string uvDetail = string.Format("{0}{1}", kUVDetail, i);

{
NormalMapSpace normalMapSpace = ((NormalMapSpace)material.GetFloat(kNormalMapSpace + i));
SetKeyword(material, "_NORMALMAP_TANGENT_SPACE" + i, normalMapSpace == NormalMapSpace.TangentSpace);
CoreUtils.SetKeyword(material, "_NORMALMAP_TANGENT_SPACE" + i, normalMapSpace == NormalMapSpace.TangentSpace);
SetKeyword(material, "_NORMALMAP" + i, material.GetTexture(kNormalMap + i) || material.GetTexture(kDetailMap + i));
SetKeyword(material, "_BENTNORMALMAP" + i, material.GetTexture(kBentNormalMap + i));
CoreUtils.SetKeyword(material, "_NORMALMAP" + i, material.GetTexture(kNormalMap + i) || material.GetTexture(kDetailMap + i));
CoreUtils.SetKeyword(material, "_BENTNORMALMAP" + i, material.GetTexture(kBentNormalMap + i));
SetKeyword(material, "_NORMALMAP" + i, material.GetTexture(kNormalMapOS + i) || material.GetTexture(kDetailMap + i));
SetKeyword(material, "_BENTNORMALMAP" + i, material.GetTexture(kBentNormalMapOS + i));
CoreUtils.SetKeyword(material, "_NORMALMAP" + i, material.GetTexture(kNormalMapOS + i) || material.GetTexture(kDetailMap + i));
CoreUtils.SetKeyword(material, "_BENTNORMALMAP" + i, material.GetTexture(kBentNormalMapOS + i));
SetKeyword(material, "_MASKMAP" + i, material.GetTexture(kMaskMap + i));
CoreUtils.SetKeyword(material, "_MASKMAP" + i, material.GetTexture(kMaskMap + i));
SetKeyword(material, "_DETAIL_MAP" + i, material.GetTexture(kDetailMap + i));
CoreUtils.SetKeyword(material, "_DETAIL_MAP" + i, material.GetTexture(kDetailMap + i));
SetKeyword(material, "_HEIGHTMAP" + i, material.GetTexture(kHeightMap + i));
CoreUtils.SetKeyword(material, "_HEIGHTMAP" + i, material.GetTexture(kHeightMap + i));
SetKeyword(material, "_SUBSURFACE_RADIUS_MAP" + i, material.GetTexture(kSubsurfaceRadiusMap + i));
SetKeyword(material, "_THICKNESSMAP" + i, material.GetTexture(kThicknessMap + i));
CoreUtils.SetKeyword(material, "_SUBSURFACE_RADIUS_MAP" + i, material.GetTexture(kSubsurfaceRadiusMap + i));
CoreUtils.SetKeyword(material, "_THICKNESSMAP" + i, material.GetTexture(kThicknessMap + i));
SetKeyword(material, "_INFLUENCEMASK_MAP", material.GetTexture(kLayerInfluenceMaskMap) && material.GetFloat(kkUseMainLayerInfluence) != 0.0f);
CoreUtils.SetKeyword(material, "_INFLUENCEMASK_MAP", material.GetTexture(kLayerInfluenceMaskMap) && material.GetFloat(kkUseMainLayerInfluence) != 0.0f);
SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
SetKeyword(material, "_ENABLESPECULAROCCLUSION", material.GetFloat(kEnableSpecularOcclusion) > 0.0f);
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
CoreUtils.SetKeyword(material, "_ENABLESPECULAROCCLUSION", material.GetFloat(kEnableSpecularOcclusion) > 0.0f);
SetKeyword(material, "_MAIN_LAYER_INFLUENCE_MODE", material.GetFloat(kkUseMainLayerInfluence) != 0.0f);
CoreUtils.SetKeyword(material, "_MAIN_LAYER_INFLUENCE_MODE", material.GetFloat(kkUseMainLayerInfluence) != 0.0f);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", true);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", false);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", true);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", false);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", false);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", true);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", false);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", true);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", false);
SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", false);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_MUL", false);
CoreUtils.SetKeyword(material, "_LAYER_MASK_VERTEX_COLOR_ADD", false);
SetKeyword(material, "_HEIGHT_BASED_BLEND", useHeightBasedBlend);
CoreUtils.SetKeyword(material, "_HEIGHT_BASED_BLEND", useHeightBasedBlend);
bool useDensityModeEnable = false;
for (int i = 0; i < material.GetInt(kLayerCount); ++i )

SetKeyword(material, "_DENSITY_MODE", useDensityModeEnable);
CoreUtils.SetKeyword(material, "_DENSITY_MODE", useDensityModeEnable);
SetKeyword(material, "_MATID_SSS", materialId == Lit.MaterialId.LitSSS);
//SetKeyword(material, "_MATID_STANDARD", materialId == Lit.MaterialId.LitStandard); // See comment in Lit.shader, it is the default, we don't define it
CoreUtils.SetKeyword(material, "_MATID_SSS", materialId == Lit.MaterialId.LitSSS);
//CoreUtils.SetKeyword(material, "_MATID_STANDARD", materialId == Lit.MaterialId.LitStandard); // See comment in Lit.shader, it is the default, we don't define it
}
private void DoEmissiveGUI(Material material)
{

2
ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

19
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Editor/BaseLitUI.cs


using System;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Experimental.Rendering.HDPipeline;
namespace UnityEditor.Experimental.Rendering.HDPipeline

bool enablePixelDisplacement = (DisplacementMode)material.GetFloat(kDisplacementMode) == DisplacementMode.Pixel;
bool enableTessellationDisplacement = ((DisplacementMode)material.GetFloat(kDisplacementMode) == DisplacementMode.Tessellation) && material.HasProperty(kTessellationMode);
SetKeyword(material, "_VERTEX_DISPLACEMENT", enableVertexDisplacement);
SetKeyword(material, "_PIXEL_DISPLACEMENT", enablePixelDisplacement);
CoreUtils.SetKeyword(material, "_VERTEX_DISPLACEMENT", enableVertexDisplacement);
CoreUtils.SetKeyword(material, "_PIXEL_DISPLACEMENT", enablePixelDisplacement);
SetKeyword(material, "_TESSELLATION_DISPLACEMENT", enableTessellationDisplacement);
CoreUtils.SetKeyword(material, "_TESSELLATION_DISPLACEMENT", enableTessellationDisplacement);
SetKeyword(material, "_VERTEX_DISPLACEMENT_LOCK_OBJECT_SCALE", displacementLockObjectScale && (enableVertexDisplacement || enableTessellationDisplacement));
SetKeyword(material, "_PIXEL_DISPLACEMENT_LOCK_OBJECT_SCALE", displacementLockObjectScale && enablePixelDisplacement);
SetKeyword(material, "_DISPLACEMENT_LOCK_TILING_SCALE", displacementLockTilingScale && enableDisplacement);
CoreUtils.SetKeyword(material, "_VERTEX_DISPLACEMENT_LOCK_OBJECT_SCALE", displacementLockObjectScale && (enableVertexDisplacement || enableTessellationDisplacement));
CoreUtils.SetKeyword(material, "_PIXEL_DISPLACEMENT_LOCK_OBJECT_SCALE", displacementLockObjectScale && enablePixelDisplacement);
CoreUtils.SetKeyword(material, "_DISPLACEMENT_LOCK_TILING_SCALE", displacementLockTilingScale && enableDisplacement);
SetKeyword(material, "_VERTEX_WIND", windEnabled);
CoreUtils.SetKeyword(material, "_VERTEX_WIND", windEnabled);
SetKeyword(material, "_DEPTHOFFSET_ON", depthOffsetEnable);
CoreUtils.SetKeyword(material, "_DEPTHOFFSET_ON", depthOffsetEnable);
SetKeyword(material, "_TESSELLATION_PHONG", tessMode == TessellationMode.Phong);
CoreUtils.SetKeyword(material, "_TESSELLATION_PHONG", tessMode == TessellationMode.Phong);
}
SetupMainTexForAlphaTestGI("_BaseColorMap", "_BaseColor", material);

54
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Editor/LitUI.cs


public static GUIContent smoothnessRemappingText = new GUIContent("Smoothness Remapping", "Smoothness remapping");
public static GUIContent aoRemappingText = new GUIContent("AmbientOcclusion Remapping", "AmbientOcclusion remapping");
public static GUIContent maskMapSText = new GUIContent("Mask Map - M(R), AO(G), D(B), S(A)", "Mask map");
public static GUIContent maskMapSpecularText = new GUIContent("Mask Map - AO(G), D(B), S(A)", "Mask map");
public static GUIContent maskMapSpecularText = new GUIContent("Mask Map - AO(G), D(B), S(A)", "Mask map");
public static GUIContent normalMapSpaceText = new GUIContent("Normal Map space", "");
public static GUIContent normalMapText = new GUIContent("Normal Map", "Normal Map (BC7/BC5/DXT5(nm))");

// Note: keywords must be based on Material value not on MaterialProperty due to multi-edit & material animation
// (MaterialProperty value might come from renderer material property block)
SetKeyword(material, "_MAPPING_PLANAR", ((UVBaseMapping)material.GetFloat(kUVBase)) == UVBaseMapping.Planar);
SetKeyword(material, "_MAPPING_TRIPLANAR", ((UVBaseMapping)material.GetFloat(kUVBase)) == UVBaseMapping.Triplanar);
SetKeyword(material, "_NORMALMAP_TANGENT_SPACE", (normalMapSpace == NormalMapSpace.TangentSpace));
CoreUtils.SetKeyword(material, "_MAPPING_PLANAR", ((UVBaseMapping)material.GetFloat(kUVBase)) == UVBaseMapping.Planar);
CoreUtils.SetKeyword(material, "_MAPPING_TRIPLANAR", ((UVBaseMapping)material.GetFloat(kUVBase)) == UVBaseMapping.Triplanar);
CoreUtils.SetKeyword(material, "_NORMALMAP_TANGENT_SPACE", (normalMapSpace == NormalMapSpace.TangentSpace));
SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMap) || material.GetTexture(kDetailMap));
SetKeyword(material, "_TANGENTMAP", material.GetTexture(kTangentMap));
SetKeyword(material, "_BENTNORMALMAP", material.GetTexture(kBentNormalMap));
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMap) || material.GetTexture(kDetailMap));
CoreUtils.SetKeyword(material, "_TANGENTMAP", material.GetTexture(kTangentMap));
CoreUtils.SetKeyword(material, "_BENTNORMALMAP", material.GetTexture(kBentNormalMap));
SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMapOS) || material.GetTexture(kDetailMap));
SetKeyword(material, "_TANGENTMAP", material.GetTexture(kTangentMapOS));
SetKeyword(material, "_BENTNORMALMAP", material.GetTexture(kBentNormalMapOS));
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMapOS) || material.GetTexture(kDetailMap));
CoreUtils.SetKeyword(material, "_TANGENTMAP", material.GetTexture(kTangentMapOS));
CoreUtils.SetKeyword(material, "_BENTNORMALMAP", material.GetTexture(kBentNormalMapOS));
SetKeyword(material, "_MASKMAP", material.GetTexture(kMaskMap));
SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
SetKeyword(material, "_ENABLESPECULAROCCLUSION", material.GetFloat(kEnableSpecularOcclusion) > 0.0f);
SetKeyword(material, "_HEIGHTMAP", material.GetTexture(kHeightMap));
SetKeyword(material, "_ANISOTROPYMAP", material.GetTexture(kAnisotropyMap));
SetKeyword(material, "_DETAIL_MAP", material.GetTexture(kDetailMap));
SetKeyword(material, "_SUBSURFACE_RADIUS_MAP", material.GetTexture(kSubsurfaceRadiusMap));
SetKeyword(material, "_THICKNESSMAP", material.GetTexture(kThicknessMap));
SetKeyword(material, "_SPECULARCOLORMAP", material.GetTexture(kSpecularColorMap));
CoreUtils.SetKeyword(material, "_MASKMAP", material.GetTexture(kMaskMap));
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
CoreUtils.SetKeyword(material, "_ENABLESPECULAROCCLUSION", material.GetFloat(kEnableSpecularOcclusion) > 0.0f);
CoreUtils.SetKeyword(material, "_HEIGHTMAP", material.GetTexture(kHeightMap));
CoreUtils.SetKeyword(material, "_ANISOTROPYMAP", material.GetTexture(kAnisotropyMap));
CoreUtils.SetKeyword(material, "_DETAIL_MAP", material.GetTexture(kDetailMap));
CoreUtils.SetKeyword(material, "_SUBSURFACE_RADIUS_MAP", material.GetTexture(kSubsurfaceRadiusMap));
CoreUtils.SetKeyword(material, "_THICKNESSMAP", material.GetTexture(kThicknessMap));
CoreUtils.SetKeyword(material, "_SPECULARCOLORMAP", material.GetTexture(kSpecularColorMap));
bool needUV2 = (UVDetailMapping)material.GetFloat(kUVDetail) == UVDetailMapping.UV2 && (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV0;
bool needUV3 = (UVDetailMapping)material.GetFloat(kUVDetail) == UVDetailMapping.UV3 && (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV0;

Lit.MaterialId materialId = (Lit.MaterialId)material.GetFloat(kMaterialID);
SetKeyword(material, "_MATID_SSS", materialId == Lit.MaterialId.LitSSS);
//SetKeyword(material, "_MATID_STANDARD", materialId == Lit.MaterialId.LitStandard); // See comment in Lit.shader, it is the default, we don't define it
SetKeyword(material, "_MATID_ANISO", materialId == Lit.MaterialId.LitAniso);
SetKeyword(material, "_MATID_SPECULAR", materialId == Lit.MaterialId.LitSpecular);
SetKeyword(material, "_MATID_CLEARCOAT", materialId == Lit.MaterialId.LitClearCoat);
CoreUtils.SetKeyword(material, "_MATID_SSS", materialId == Lit.MaterialId.LitSSS);
//CoreUtils.SetKeyword(material, "_MATID_STANDARD", materialId == Lit.MaterialId.LitStandard); // See comment in Lit.shader, it is the default, we don't define it
CoreUtils.SetKeyword(material, "_MATID_ANISO", materialId == Lit.MaterialId.LitAniso);
CoreUtils.SetKeyword(material, "_MATID_SPECULAR", materialId == Lit.MaterialId.LitSpecular);
CoreUtils.SetKeyword(material, "_MATID_CLEARCOAT", materialId == Lit.MaterialId.LitClearCoat);
SetKeyword(material, "_REFRACTION_PLANE", (refractionModeValue == Lit.RefractionMode.Plane) && canHaveRefraction);
SetKeyword(material, "_REFRACTION_SPHERE", (refractionModeValue == Lit.RefractionMode.Sphere) && canHaveRefraction);
SetKeyword(material, "_TRANSMITTANCECOLORMAP", material.GetTexture(kTransmittanceColorMap) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_PLANE", (refractionModeValue == Lit.RefractionMode.Plane) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_REFRACTION_SPHERE", (refractionModeValue == Lit.RefractionMode.Sphere) && canHaveRefraction);
CoreUtils.SetKeyword(material, "_TRANSMITTANCECOLORMAP", material.GetTexture(kTransmittanceColorMap) && canHaveRefraction);
}
}
} // namespace UnityEditor

21
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs


[SurfaceDataAttributes("", true)]
public Vector3 normalWS;
public float perceptualRoughness;
public float roughness;
public int materialId;
// MaterialId dependent attribute

public float thickness;
public int subsurfaceProfile;
public bool enableTransmission; // Read from the SSS profile
public bool useThinObjectMode; // Read from the SSS profile
public bool useThickObjectMode; // Read from the SSS profile
public Vector3 transmittance;
// SpecColor

public enum GBufferMaterial
{
// Note: This count doesn't include the velocity buffer. On shader and csharp side the velocity buffer will be added by the framework
Count = (ShaderConfig.k_PackgbufferInU16 == 1) ? 2 : 4
Count = 4
};
//-----------------------------------------------------------------------------

RenderTextureFormat[] m_RTFormat4 = { RenderTextureFormat.ARGB32, RenderTextureFormat.ARGB2101010, RenderTextureFormat.ARGB32, RenderTextureFormat.RGB111110Float };
RenderTextureReadWrite[] m_RTReadWrite4 = { RenderTextureReadWrite.sRGB, RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear };
// TODO: Just discovered that Unity doesn't support unsigned 16 RT format.
RenderTextureFormat[] m_RTFormat2 = { RenderTextureFormat.ARGBInt, RenderTextureFormat.ARGBInt };
RenderTextureReadWrite[] m_RTReadWrite2 = { RenderTextureReadWrite.Linear, RenderTextureReadWrite.Linear };
if (ShaderConfig.s_PackgbufferInU16 == 1)
{
RTFormat = m_RTFormat2;
RTReadWrite = m_RTReadWrite2;
}
else
{
RTFormat = m_RTFormat4;
RTReadWrite = m_RTReadWrite4;
}
RTFormat = m_RTFormat4;
RTReadWrite = m_RTReadWrite4;
}
//-----------------------------------------------------------------------------

47
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.cs.hlsl


#define DEBUGVIEW_LIT_BSDFDATA_SPECULAR_OCCLUSION (1032)
#define DEBUGVIEW_LIT_BSDFDATA_NORMAL_WS (1033)
#define DEBUGVIEW_LIT_BSDFDATA_PERCEPTUAL_ROUGHNESS (1034)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS (1035)
#define DEBUGVIEW_LIT_BSDFDATA_MATERIAL_ID (1036)
#define DEBUGVIEW_LIT_BSDFDATA_TANGENT_WS (1037)
#define DEBUGVIEW_LIT_BSDFDATA_BITANGENT_WS (1038)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_T (1039)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_B (1040)
#define DEBUGVIEW_LIT_BSDFDATA_ANISOTROPY (1041)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_RADIUS (1042)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS (1043)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_PROFILE (1044)
#define DEBUGVIEW_LIT_BSDFDATA_ENABLE_TRANSMISSION (1045)
#define DEBUGVIEW_LIT_BSDFDATA_USE_THIN_OBJECT_MODE (1046)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE (1047)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_NORMAL_WS (1048)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_COVERAGE (1049)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_IOR (1050)
#define DEBUGVIEW_LIT_BSDFDATA_IOR (1051)
#define DEBUGVIEW_LIT_BSDFDATA_ABSORPTION_COEFFICIENT (1052)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE_MASK (1053)
#define DEBUGVIEW_LIT_BSDFDATA_MATERIAL_ID (1035)
#define DEBUGVIEW_LIT_BSDFDATA_TANGENT_WS (1036)
#define DEBUGVIEW_LIT_BSDFDATA_BITANGENT_WS (1037)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_T (1038)
#define DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS_B (1039)
#define DEBUGVIEW_LIT_BSDFDATA_ANISOTROPY (1040)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_RADIUS (1041)
#define DEBUGVIEW_LIT_BSDFDATA_THICKNESS (1042)
#define DEBUGVIEW_LIT_BSDFDATA_SUBSURFACE_PROFILE (1043)
#define DEBUGVIEW_LIT_BSDFDATA_ENABLE_TRANSMISSION (1044)
#define DEBUGVIEW_LIT_BSDFDATA_USE_THICK_OBJECT_MODE (1045)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE (1046)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_NORMAL_WS (1047)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_COVERAGE (1048)
#define DEBUGVIEW_LIT_BSDFDATA_COAT_IOR (1049)
#define DEBUGVIEW_LIT_BSDFDATA_IOR (1050)
#define DEBUGVIEW_LIT_BSDFDATA_ABSORPTION_COEFFICIENT (1051)
#define DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE_MASK (1052)
//
// UnityEngine.Experimental.Rendering.HDPipeline.Lit+GBufferMaterial: static fields

float specularOcclusion;
float3 normalWS;
float perceptualRoughness;
float roughness;
int materialId;
float3 tangentWS;
float3 bitangentWS;

float thickness;
int subsurfaceProfile;
bool enableTransmission;
bool useThinObjectMode;
bool useThickObjectMode;
float3 transmittance;
float3 coatNormalWS;
float coatCoverage;

case DEBUGVIEW_LIT_BSDFDATA_PERCEPTUAL_ROUGHNESS:
result = bsdfdata.perceptualRoughness.xxx;
break;
case DEBUGVIEW_LIT_BSDFDATA_ROUGHNESS:
result = bsdfdata.roughness.xxx;
break;
case DEBUGVIEW_LIT_BSDFDATA_MATERIAL_ID:
result = GetIndexColor(bsdfdata.materialId);
break;

case DEBUGVIEW_LIT_BSDFDATA_ENABLE_TRANSMISSION:
result = (bsdfdata.enableTransmission) ? float3(1.0, 1.0, 1.0) : float3(0.0, 0.0, 0.0);
break;
case DEBUGVIEW_LIT_BSDFDATA_USE_THIN_OBJECT_MODE:
result = (bsdfdata.useThinObjectMode) ? float3(1.0, 1.0, 1.0) : float3(0.0, 0.0, 0.0);
case DEBUGVIEW_LIT_BSDFDATA_USE_THICK_OBJECT_MODE:
result = (bsdfdata.useThickObjectMode) ? float3(1.0, 1.0, 1.0) : float3(0.0, 0.0, 0.0);
break;
case DEBUGVIEW_LIT_BSDFDATA_TRANSMITTANCE:
result = bsdfdata.transmittance;

687
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


# endif
#endif
// In case we pack data uint16 buffer we need to change the output render target format to uint16
// TODO: Is there a way to automate these output type based on the format declare in lit.cs ?
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
#define GBufferType0 uint4
#define GBufferType1 uint4
// TODO: How to abstract that ? We would like to avoid this PS4 test here
#ifdef SHADER_API_PSSL
// On PS4 we need to specify manually the format of the output render target, output type is not enough
#pragma PSSL_target_output_format(target 0 FMT_UINT16_ABGR)
#pragma PSSL_target_output_format(target 1 FMT_UINT16_ABGR)
#endif
#else
#endif
// GBuffer texture declaration
TEXTURE2D(_GBufferTexture0);

#endif
// Use Lambert diffuse instead of Disney diffuse
// #define LIT_DIFFUSE_LAMBERT_BRDF
// Use optimization of Precomputing LambdaV
// TODO: Test if this is a win
// #define LIT_USE_BSDF_PRE_LAMBDAV
#define LIT_USE_GGX_ENERGY_COMPENSATION
// Sampler use by area light, gaussian pyramid, ambient occlusion etc...

float4 _ThicknessRemaps[SSS_N_PROFILES]; // R: start, G = end - start, BA unused
float4 _ShapeParams[SSS_N_PROFILES]; // RGB = S = 1 / D, A = filter radius
float4 _TransmissionTints[SSS_N_PROFILES]; // RGB = 1/4 * color, A = unused
float4 _WorldScales[SSS_N_PROFILES]; // X = meters per world unit; Y = world units per meter
CBUFFER_END
//-----------------------------------------------------------------------------

uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
// Get the depth of the approximated back plane
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionSS * (depthSize >> 2), 2).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionNDC * (depthSize >> 2), 2).r;
float depthFromPositionInput = depth - posInputs.depthVS;
float depthFromPositionInput = depth - posInputs.linearDepth;
float offset = dot(-V, positionWS - posInputs.positionWS);
float depthFromPosition = depthFromPositionInput - offset;

// However as we use material classification it is hard to be fully separated
// the dependecy is define in this include where there is shared define for material and lighting in case of deferred material.
// If a user do a lighting architecture without material classification, this can be remove
#include "../../Lighting/TilePass/TilePass.cs.hlsl"
#include "../../Lighting/LightLoop/LightLoop.cs.hlsl"
static int g_FeatureFlags = 0xFFFFFFFF;
static uint g_FeatureFlags = UINT_MAX;
bool HasMaterialFeatureFlag(int flag)
bool HasMaterialFeatureFlag(uint flag)
{
return ((g_FeatureFlags & flag) != 0);
}

bsdfData.fresnel0 = lerp(val.xxx, baseColor, metallic);
}
void FillMaterialIdAnisoData(float roughness, float3 normalWS, float3 tangentWS, float anisotropy, inout BSDFData bsdfData)
void FillMaterialIdSSSData(float3 baseColor, inout BSDFData bsdfData)
bsdfData.tangentWS = tangentWS;
bsdfData.bitangentWS = cross(normalWS, tangentWS);
ConvertAnisotropyToRoughness(roughness, anisotropy, bsdfData.roughnessT, bsdfData.roughnessB);
bsdfData.anisotropy = anisotropy;
bsdfData.diffuseColor = baseColor;
bsdfData.fresnel0 = SKIN_SPECULAR_VALUE; // TODO take from subsurfaceProfile instead
void FillMaterialIdSSSData(float3 baseColor, int subsurfaceProfile, float subsurfaceRadius, float thickness, inout BSDFData bsdfData)
void FillTransmissionData(int subsurfaceProfile, float subsurfaceRadius, float thickness, inout BSDFData bsdfData)
bsdfData.diffuseColor = baseColor;
bsdfData.fresnel0 = SKIN_SPECULAR_VALUE; // TODO take from subsurfaceProfile instead
bsdfData.subsurfaceProfile = subsurfaceProfile;
bsdfData.subsurfaceRadius = subsurfaceRadius;
bsdfData.thickness = _ThicknessRemaps[subsurfaceProfile].x + _ThicknessRemaps[subsurfaceProfile].y * thickness;
bsdfData.enableTransmission = _EnableSSSAndTransmission != 0;
bsdfData.enableTransmission = _EnableSSSAndTransmission != 0 && transmissionMode != SSS_TRSM_MODE_NONE;
if (bsdfData.enableTransmission)
if (bsdfData.enableTransmission && transmissionMode != SSS_TRSM_MODE_NONE)
bsdfData.useThinObjectMode = transmissionMode == SSS_TRSM_MODE_THIN;
bsdfData.subsurfaceProfile = subsurfaceProfile;
bsdfData.subsurfaceRadius = subsurfaceRadius;
bsdfData.useThickObjectMode = transmissionMode != SSS_TRSM_MODE_THIN;
bsdfData.thickness = _ThicknessRemaps[subsurfaceProfile].x + _ThicknessRemaps[subsurfaceProfile].y * thickness;
if (_UseDisneySSS != 0)
{

BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
bsdfData.specularOcclusion = surfaceData.specularOcclusion;
bsdfData.normalWS = surfaceData.normalWS;
bsdfData.materialId = surfaceData.materialId;
bsdfData.specularOcclusion = surfaceData.specularOcclusion;
bsdfData.normalWS = surfaceData.normalWS;
bsdfData.anisotropy = surfaceData.anisotropy;
bsdfData.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
bsdfData.materialId = surfaceData.materialId;
ConvertAnisotropyToRoughness(bsdfData.perceptualRoughness, bsdfData.anisotropy, bsdfData.roughnessT, bsdfData.roughnessB);
if (surfaceData.materialId != MATERIALID_LIT_ANISO)
{
// Notify the material classification system that we should not use the anisotropic GGX for forward rendering.
// Forward rendering implies automatic material classification, so normally we don't use our material classification
// system, and set 'g_FeatureFlags' to UINT_MAX. However, since our rendering pipeline supports both forward and
// deferred rendering, 'g_FeatureFlags' is always available, so we can use it to control GGX evaluation.
g_FeatureFlags &= ~MATERIALFEATUREFLAGS_LIT_ANISO;
}
// IMPORTANT: In case of foward or gbuffer pass we must know what we are statically, so compiler can do compile time optimization
if (bsdfData.materialId == MATERIALID_LIT_STANDARD)

}
else if (bsdfData.materialId == MATERIALID_LIT_SSS)
{
FillMaterialIdSSSData(surfaceData.baseColor, surfaceData.subsurfaceProfile, surfaceData.subsurfaceRadius, surfaceData.thickness, bsdfData);
FillMaterialIdSSSData(surfaceData.baseColor, bsdfData);
FillTransmissionData(surfaceData.subsurfaceProfile, surfaceData.subsurfaceRadius, surfaceData.thickness, bsdfData);
FillMaterialIdAnisoData(bsdfData.roughness, surfaceData.normalWS, surfaceData.tangentWS, surfaceData.anisotropy, bsdfData);
bsdfData.tangentWS = surfaceData.tangentWS;
bsdfData.bitangentWS = cross(bsdfData.normalWS, bsdfData.tangentWS);
}
else if (bsdfData.materialId == MATERIALID_LIT_CLEAR_COAT)
{

// Must be in sync with RT declared in HDRenderPipeline.cs ::Rebuild
void EncodeIntoGBuffer( SurfaceData surfaceData,
float3 bakeDiffuseLighting,
uint2 unPositionSS,
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
out GBufferType0 outGBufferU0,
out GBufferType1 outGBufferU1
#else
uint2 positionSS,
#endif
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
float4 outGBuffer0, outGBuffer1, outGBuffer2, outGBuffer3;
#endif
ApplyDebugToSurfaceData(surfaceData);
// RT0 - 8:8:8:8 sRGB

// Lighting
outGBuffer3 = float4(bakeDiffuseLighting, 0.0);
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
// Now pack all buffer into 2 uint buffer
// We don't have hardware sRGB to store base color in case we pack int u16, so rather than perform full sRGB encoding just use cheap gamma20
// TODO: test alternative like FastLinearToSRGB to better match unpacked gbuffer
outGBuffer0.xyz = LinearToGamma20(outGBuffer0.xyz);
uint packedGBuffer1 = PackR10G10B10A2(outGBuffer1);
outGBufferU0 = uint4( PackFloatToUInt(outGBuffer0.x, 8, 0) | PackFloatToUInt(outGBuffer0.y, 8, 8),
PackFloatToUInt(outGBuffer0.z, 8, 0) | PackFloatToUInt(outGBuffer0.w, 8, 8),
(packedGBuffer1 & 0x0000FFFF),
(packedGBuffer1 & 0xFFFF0000) >> 16);
uint packedGBuffer3 = PackToR11G11B10f(outGBuffer3.xyz);
outGBufferU1 = uint4( PackFloatToUInt(outGBuffer2.x, 8, 0) | PackFloatToUInt(outGBuffer2.y, 8, 8),
PackFloatToUInt(outGBuffer2.z, 8, 0) | PackFloatToUInt(outGBuffer2.w, 8, 8),
(packedGBuffer3 & 0x0000FFFF),
(packedGBuffer3 & 0xFFFF0000) >> 16);
#endif
}
float4 DecodeGBuffer0(GBufferType0 encodedGBuffer0)
{
float4 decodedGBuffer0;
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
decodedGBuffer0.x = UnpackUIntToFloat(encodedGBuffer0.x, 8, 0);
decodedGBuffer0.y = UnpackUIntToFloat(encodedGBuffer0.x, 8, 8);
decodedGBuffer0.z = UnpackUIntToFloat(encodedGBuffer0.y, 8, 0);
decodedGBuffer0.w = UnpackUIntToFloat(encodedGBuffer0.y, 8, 8);
decodedGBuffer0.xyz = Gamma20ToLinear(encodedGBuffer0.xyz);
#else
decodedGBuffer0 = encodedGBuffer0;
#endif
return decodedGBuffer0;
uint2 unPositionSS,
uint2 positionSS,
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
GBufferType0 inGBufferU0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBufferU1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
#else
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, unPositionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, unPositionSS);
#endif
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, positionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, positionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, positionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, positionSS);
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
float4 inGBuffer0, inGBuffer1, inGBuffer2, inGBuffer3;
inGBuffer0 = DecodeGBuffer0(inGBufferU0);
uint packedGBuffer1 = inGBufferU0.z | inGBufferU0.w << 16;
inGBuffer1 = UnpackR10G10B10A2(packedGBuffer1);
inGBuffer2.x = UnpackUIntToFloat(inGBufferU1.x, 8, 0);
inGBuffer2.y = UnpackUIntToFloat(inGBufferU1.x, 8, 8);
inGBuffer2.z = UnpackUIntToFloat(inGBufferU1.y, 8, 0);
inGBuffer2.w = UnpackUIntToFloat(inGBufferU1.y, 8, 8);
uint packedGBuffer3 = inGBufferU1.z | inGBufferU1.w << 16;
inGBuffer3.xyz = UnpackFromR11G11B10f(packedGBuffer1);
inGBuffer3.w = 0.0;
#endif
float3 baseColor = inGBuffer0.rgb;
bsdfData.specularOcclusion = inGBuffer0.a;

bsdfData.normalWS = UnpackNormalOctEncode(float2(inGBuffer1.r, inGBuffer1.g));
bsdfData.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
// The code is also call from MaterialFeatureFlagsFromGBuffer, so must work fully dynamic if featureFlags is 0xFFFFFFFF
// The code is also call from MaterialFeatureFlagsFromGBuffer, so must work fully dynamic if featureFlags is UINT_MAX
int supportsStandard = HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_STANDARD);
int supportsSSS = HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_SSS);
int supportsAniso = HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO);

bsdfData.materialId = MATERIALID_LIT_CLEAR_COAT;
}
// We avoid divergent evaluation of the GGX, as that nearly doubles the cost.
// If the tile has anisotropy, all the pixels within the tile are evaluated as anisotropic.
if (HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
{
bsdfData.anisotropy = 0;
bsdfData.tangentWS = GetLocalFrame(bsdfData.normalWS)[0];
if (bsdfData.materialId == MATERIALID_LIT_ANISO)
{
float metallic;
int octTangentSign;
UnpackFloatInt8bit(inGBuffer2.a, 4.0, metallic, octTangentSign);
inGBuffer2.r = (octTangentSign & 1) ? -inGBuffer2.r : inGBuffer2.r;
inGBuffer2.g = (octTangentSign & 2) ? -inGBuffer2.g : inGBuffer2.g;
bsdfData.anisotropy = inGBuffer2.b * 2 - 1;
bsdfData.tangentWS = UnpackNormalOctEncode(inGBuffer2.rg);
FillMaterialIdStandardData(baseColor, metallic, bsdfData);
}
bsdfData.bitangentWS = cross(bsdfData.normalWS, bsdfData.tangentWS);
}
ConvertAnisotropyToRoughness(bsdfData.perceptualRoughness, bsdfData.anisotropy, bsdfData.roughnessT, bsdfData.roughnessB);
if (HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_SSS))
{
float subsurfaceRadius = 0;
float thickness = 0;
int subsurfaceProfile = SSS_NEUTRAL_PROFILE_ID;
if (bsdfData.materialId == MATERIALID_LIT_SSS)
{
subsurfaceRadius = inGBuffer2.x;
thickness = inGBuffer2.y;
subsurfaceProfile = UnpackByte(inGBuffer2.w);
FillMaterialIdSSSData(baseColor, bsdfData);
}
FillTransmissionData(subsurfaceProfile, subsurfaceRadius, thickness, bsdfData);
}
if (bsdfData.materialId == MATERIALID_LIT_STANDARD && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_STANDARD))
{
float metallic;

if (materialIdExtent == GBUFFER_LIT_STANDARD_SPECULAR_COLOR_ID)
[flatten] if (materialIdExtent == GBUFFER_LIT_STANDARD_SPECULAR_COLOR_ID)
{
// Note: Specular is not a material id but just a way to parameterize the standard materialid, thus we reset materialId to MATERIALID_LIT_STANDARD
// For material classification it will be consider as Standard as well, thus no need to create special case

FillMaterialIdStandardData(baseColor, metallic, bsdfData);
}
}
else if (bsdfData.materialId == MATERIALID_LIT_SSS && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_SSS))
{
float subsurfaceRadius = inGBuffer2.x;
float thickness = inGBuffer2.y;
int subsurfaceProfile = UnpackByte(inGBuffer2.w);
FillMaterialIdSSSData(baseColor, subsurfaceProfile, subsurfaceRadius, thickness, bsdfData);
}
else if (bsdfData.materialId == MATERIALID_LIT_ANISO && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
{
float metallic;
int octTangentSign;
UnpackFloatInt8bit(inGBuffer2.a, 4.0, metallic, octTangentSign);
FillMaterialIdStandardData(baseColor, metallic, bsdfData);
inGBuffer2.r = (octTangentSign & 1) ? -inGBuffer2.r : inGBuffer2.r;
inGBuffer2.g = (octTangentSign & 2) ? -inGBuffer2.g : inGBuffer2.g;
float3 tangentWS = UnpackNormalOctEncode(inGBuffer2.rg);
float anisotropy = inGBuffer2.b * 2 - 1;
FillMaterialIdAnisoData(bsdfData.roughness, bsdfData.normalWS, tangentWS, anisotropy, bsdfData);
}
else if (bsdfData.materialId == MATERIALID_LIT_CLEAR_COAT && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
{
// We have swap the encoding of the normal to have more precision for coat normal as it is more smooth

// Function call from the material classification compute shader
// Note that as we store materialId on two buffer (for anisotropy case), the code need to load 2 RGBA8 buffer
uint MaterialFeatureFlagsFromGBuffer(uint2 unPositionSS)
uint MaterialFeatureFlagsFromGBuffer(uint2 positionSS)
unPositionSS,
0xFFFFFFFF,
positionSS,
UINT_MAX,
bsdfData,
unused
);

// GGX
float partLambdaV;
float energyCompensation;
float TdotV;
float BdotV;
float3 coatV;
float3 refractV; // The view vector refracted through clear coat interface
// IBL

float diffuseFGD;
// Area lights (17 VGPRs)
// TODO: 'orthoBasisViewNormal' is just a rotation around the normal and should thus be just 1x VGPR.
float3x3 orthoBasisViewNormal; // Right-handed view-dependent orthogonal basis around the normal (6x VGPRs)
float3x3 ltcTransformDiffuse; // Inverse transformation for Lambertian or Disney Diffuse (4x VGPRs)
float3x3 ltcTransformSpecular; // Inverse transformation for GGX (4x VGPRs)

NdotV = saturate(dot(N, V));
preLightData.NdotV = NdotV;
float3 iblR;
float3 iblN, iblR;
// GGX aniso
if (bsdfData.materialId == MATERIALID_LIT_ANISO && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
// We avoid divergent evaluation of the GGX, as that nearly doubles the cost.
// If the tile has anisotropy, all the pixels within the tile are evaluated as anisotropic.
if (HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
preLightData.TdotV = dot(bsdfData.tangentWS, V);
preLightData.BdotV = dot(bsdfData.bitangentWS, V);
preLightData.partLambdaV = GetSmithJointGGXAnisoPartLambdaV(preLightData.TdotV, preLightData.BdotV, NdotV, bsdfData.roughnessT, bsdfData.roughnessB);
float TdotV = dot(bsdfData.tangentWS, V);
float BdotV = dot(bsdfData.bitangentWS, V);
preLightData.partLambdaV = GetSmithJointGGXAnisoPartLambdaV(TdotV, BdotV, NdotV, bsdfData.roughnessT, bsdfData.roughnessB);
// For GGX aniso and IBL we have done an empirical (eye balled) approximation compare to the reference.
// We use a single fetch, and we stretch the normal to use based on various criteria.

// NOTE: If we follow the theory we should use the modified normal for the different calculation implying a normal (like NdotV) and use 'anisoIblNormalWS'
// into function like GetSpecularDominantDir(). However modified normal is just a hack. The goal is just to stretch a cubemap, no accuracy here.
// With this in mind and for performance reasons we chose to only use modified normal to calculate R.
float3 anisoIblNormalWS = GetAnisotropicModifiedNormal(grainDirWS, N, V, stretch);
iblR = reflect(-V, anisoIblNormalWS);
iblN = GetAnisotropicModifiedNormal(grainDirWS, N, V, stretch);
else // GGX iso
else
preLightData.TdotV = 0;
preLightData.BdotV = 0;
preLightData.partLambdaV = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughness);
iblR = reflect(-V, N);
preLightData.partLambdaV = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughnessT);
iblN = N;
iblR = reflect(-V, iblN);
float reflectivity;

else
{
// Note: this is a ad-hoc tweak.
float iblRoughness, iblPerceptualRoughness;
if (bsdfData.materialId == MATERIALID_LIT_ANISO && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
{
// Use the min roughness, and bias it for higher values of anisotropy and roughness.
float roughnessBias = 0.075 * bsdfData.anisotropy * bsdfData.roughness;
iblRoughness = saturate(min(bsdfData.roughnessT, bsdfData.roughnessB) + roughnessBias);
iblPerceptualRoughness = RoughnessToPerceptualRoughness(iblRoughness);
}
else
{
iblRoughness = bsdfData.roughness;
iblPerceptualRoughness = bsdfData.perceptualRoughness;
}
preLightData.iblDirWS = GetSpecularDominantDir(N, iblR, iblRoughness, NdotV);
preLightData.iblMipLevel = PerceptualRoughnessToMipmapLevel(iblPerceptualRoughness);
// TODO: we need a better hack.
float iblPerceptualRoughness = bsdfData.perceptualRoughness * saturate(1.2 - bsdfData.anisotropy);
float iblRoughness = PerceptualRoughnessToRoughness(iblPerceptualRoughness);
preLightData.iblDirWS = GetSpecularDominantDir(N, iblR, iblRoughness, NdotV);
preLightData.iblMipLevel = PerceptualRoughnessToMipmapLevel(iblPerceptualRoughness);
}
#ifdef LIT_USE_GGX_ENERGY_COMPENSATION

// Construct a right-handed view-dependent orthogonal basis around the normal
preLightData.orthoBasisViewNormal[0] = normalize(V - N * NdotV);
preLightData.orthoBasisViewNormal[2] = N;
preLightData.orthoBasisViewNormal[1] = normalize(cross(preLightData.orthoBasisViewNormal[2], preLightData.orthoBasisViewNormal[0]));
preLightData.orthoBasisViewNormal[1] = cross(preLightData.orthoBasisViewNormal[2], preLightData.orthoBasisViewNormal[0]);
float3 ltcMagnitude = SAMPLE_TEXTURE2D_ARRAY_LOD(_LtcData, s_linear_clamp_sampler, uv, LTC_MULTI_GGX_FRESNEL_DISNEY_DIFFUSE_INDEX, 0).rgb;
float ltcGGXFresnelMagnitudeDiff = ltcMagnitude.r; // The difference of magnitudes of GGX and Fresnel

// But rough metals (black diffuse) still scatter quite a lot of light around, so
// we want to take some of that into account too.
lightTransportData.diffuseColor = bsdfData.diffuseColor + bsdfData.fresnel0 * bsdfData.roughness * 0.5 * surfaceData.metallic;
float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
lightTransportData.diffuseColor = bsdfData.diffuseColor + bsdfData.fresnel0 * roughness * 0.5 * surfaceData.metallic;
lightTransportData.emissiveColor = builtinData.emissiveColor;
return lightTransportData;

float NdotL = saturate(dot(bsdfData.coatNormalWS, L));
float NdotV = preLightData.coatNdotV;
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2 * LdotV + 2, FLT_EPSILON));
float invLenLV = rsqrt(max(2 * LdotV + 2, FLT_EPS));
float NdotH = saturate((NdotL + NdotV) * invLenLV);
float LdotH = saturate(invLenLV * LdotV + invLenLV);

float NdotL = saturate(dot(bsdfData.normalWS, L)); // Must have the same value without the clamp
float NdotV = preLightData.NdotV; // Get the unaltered (geometric) version
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2 * LdotV + 2, FLT_EPSILON)); // invLenLV = rcp(length(L + V)) - caution about the case where V and L are opposite, it can happen, use max to avoid this
float invLenLV = rsqrt(max(2 * LdotV + 2, FLT_EPS)); // invLenLV = rcp(length(L + V)) - caution about the case where V and L are opposite, it can happen, use max to avoid this
float NdotH = saturate((NdotL + NdotV) * invLenLV);
float LdotH = saturate(invLenLV * LdotV + invLenLV);

if (bsdfData.materialId == MATERIALID_LIT_ANISO && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
// We avoid divergent evaluation of the GGX, as that nearly doubles the cost.
// If the tile has anisotropy, all the pixels within the tile are evaluated as anisotropic.
if (HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_ANISO))
{
float3 H = (L + V) * invLenLV;
// For anisotropy we must not saturate these values

float BdotL = dot(bsdfData.bitangentWS, L);
bsdfData.roughnessT = ClampRoughnessForAnalyticalLights(bsdfData.roughnessT);
bsdfData.roughnessB = ClampRoughnessForAnalyticalLights(bsdfData.roughnessB);
DV = DV_SmithJointGGXAniso(TdotH, BdotH, NdotH,
preLightData.TdotV, preLightData.BdotV, preLightData.NdotV,
TdotL, BdotL, NdotL,
bsdfData.roughnessT, bsdfData.roughnessB
#ifdef LIT_USE_BSDF_PRE_LAMBDAV
, preLightData.partLambdaV);
#else
);
#endif
DV = DV_SmithJointGGXAniso(TdotH, BdotH, NdotH, NdotV, TdotL, BdotL, NdotL,
bsdfData.roughnessT, bsdfData.roughnessB, preLightData.partLambdaV);
bsdfData.roughness = ClampRoughnessForAnalyticalLights(bsdfData.roughness);
DV = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughness
#ifdef LIT_USE_BSDF_PRE_LAMBDAV
, preLightData partLambdaV);
#else
);
#endif
DV = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughnessT, preLightData.partLambdaV);
}
specularLighting += F * DV;

float3 diffuseTerm = DiffuseGGX(bsdfData.diffuseColor, NdotV, NdotL, NdotH, LdotV, bsdfData.roughness);
float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
float3 diffuseTerm = DiffuseGGX(bsdfData.diffuseColor, NdotV, NdotL, NdotH, LdotV, roughness);
// A note on subsurface scattering.
// A note on subsurface scattering: [SSS-NOTE-TRSM]
// The correct way to handle SSS is to transmit light inside the surface, perform SSS,
// and then transmit it outside towards the viewer.
// Transmit(X) = F_Transm_Schlick(F0, F90, NdotX), where F0 = 0, F90 = 1.

diffuseLighting = diffuseTerm;
}
// In the "thin object" mode (for cards), we assume that the geometry is very thin.
// We apply wrapped lighting to compensate for that, and do not modify the shading position.
// Otherwise, in the "thick object" mode, we can have EITHER reflected (front) lighting
// OR transmitted (back) lighting, never both at the same time. For transmitted lighting,
// we need to push the shading position back to avoid self-shadowing problems.
float3 ComputeThicknessDisplacement(BSDFData bsdfData, float3 L, float NdotL)
{
// Compute the thickness in world units along the normal.
float thicknessInMeters = bsdfData.thickness * METERS_PER_MILLIMETER;
float thicknessInUnits = thicknessInMeters * _WorldScales[bsdfData.subsurfaceProfile].y;
// Compute the thickness in world units along the light vector.
float unprojectedThickness = thicknessInUnits / -NdotL;
return unprojectedThickness * L;
}
// We assume that the back side of the object is a uniformly illuminated infinite plane
// with the reversed normal (and the view vector) of the current sample.
float3 EvaluateTransmission(BSDFData bsdfData, float intensity, float shadow)
// Transmitted lighting is computed as follows:
// - we assume that the object is a thick plane (slab);
// - we reverse the front-facing normal for the back of the object;
// - we assume that the incoming radiance is constant along the entire back surface;
// - we apply BSDF-specific diffuse transmission to transmit the light subsurface and back;
// - we integrate the diffuse reflectance profile w.r.t. the radius (while also accounting
// for the thickness) to compute the transmittance;
// - we multiply the transmitted radiance by the transmittance.
float3 EvaluateTransmission(BSDFData bsdfData, float NdotL, float NdotV, float attenuation)
// For low thickness, we can reuse the shadowing status for the back of the object.
shadow = bsdfData.useThinObjectMode ? shadow : 1;
float wrappedNdotL = ComputeWrappedDiffuseLighting(-NdotL, SSS_WRAP_LIGHT);
float negatedNdotL = saturate(-NdotL);
// Apply wrapped lighting to better handle thin objects (cards) at grazing angles.
float backNdotL = bsdfData.useThickObjectMode ? negatedNdotL : wrappedNdotL;
// Apply BSDF-specific diffuse transmission to attenuation. See also: [SSS-NOTE-TRSM]
// We don't multiply by 'bsdfData.diffuseColor' here. It's done only once in PostEvaluateBSDF().
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
attenuation *= Lambert();
#else
attenuation *= INV_PI * F_Transm_Schlick(0, 0.5, NdotV) * F_Transm_Schlick(0, 0.5, backNdotL);
#endif
float backLight = intensity * shadow;
float intensity = attenuation * backNdotL;
return backLight * bsdfData.transmittance; // Premultiplied with the diffuse color
return intensity * bsdfData.transmittance;
// EvaluateBSDF_Directional (supports directional and box projector lights)
// EvaluateBSDF_Directional
// Compute the NDC position (in [-1, 1]^2) by projecting 'positionWS' onto the near plane.
// Compute the CS position (in [-1, 1]^2) by projecting 'positionWS' onto the near plane.
float2 positionNDC = positionLS.xy;
float2 positionCS = positionLS.xy;
bool isInBounds;
// Tile the texture if the 'repeat' wrap mode is enabled.
bool isInBounds = lightData.tileCookie || max(abs(positionCS.x), abs(positionCS.y)) <= 1.0;
float2 coord = positionNDC * 0.5 + 0.5;
float2 positionNDC = frac(positionCS * 0.5 + 0.5);
if (lightData.tileCookie)
{
// Tile the texture if the 'repeat' wrap mode is enabled.
coord = frac(coord);
isInBounds = true;
}
else
{
isInBounds = Max3(abs(positionNDC.x), abs(positionNDC.y), 1.0 - positionLS.z) <= 1.0;
}
// We let the sampler handle clamping to border.
float4 cookie = SampleCookie2D(lightLoopContext, positionNDC, lightData.cookieIndex);
// We let the sampler handle tiling or clamping to border.
// Note: tiling (the repeat mode) is not currently supported.
float4 cookie = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
cookie.a = isInBounds ? cookie.a : 0.0;
cookie.a = isInBounds ? cookie.a : 0;
DirectLighting EvaluateBSDF_Directional( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData,
DirectionalLightData lightData, BSDFData bsdfData, BakeLightingData bakeLightingData)
// None of the outputs are premultiplied.
void EvaluateLight_Directional(LightLoopContext lightLoopContext, PositionInputs posInput,
DirectionalLightData lightData, BakeLightingData bakeLightingData,
float3 N, float3 L,
out float3 color, out float attenuation)
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
float shadow = 1.0;
float shadowMask = 1.0;
float3 L = -lightData.forward; // Lights are pointing backward in Unity
float NdotL = dot(bsdfData.normalWS, L);
float illuminance = saturate(NdotL);
color = lightData.color;
attenuation = 1.0;
float shadow = 1.0;
float shadowMask = 1.0;
#ifdef SHADOWS_SHADOWMASK
// shadowMaskSelector.x is -1 if there is no shadow mask
// Note that we override shadow value (in case we don't have any dynamic shadow)

[branch] if (lightData.shadowIndex >= 0)
{
#ifdef _SURFACE_TYPE_TRANSPARENT
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, bsdfData.normalWS, lightData.shadowIndex, L, posInput.unPositionSS);
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, N, lightData.shadowIndex, L, posInput.positionSS);
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.unPositionSS).x;
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.positionSS).x;
float fade = saturate(posInput.depthVS * lightData.fadeDistanceScaleAndBias.x + lightData.fadeDistanceScaleAndBias.y);
float fade = saturate(posInput.linearDepth * lightData.fadeDistanceScaleAndBias.x + lightData.fadeDistanceScaleAndBias.y);
// See comment in EvaluateBSDF_Punctual
shadow = lightData.dynamicShadowCasterOnly ? min(shadowMask, shadow) : shadow;

#endif
}
illuminance *= shadow;
attenuation *= shadow;
float3 lightToSurface = positionWS - lightData.positionWS;
float4 cookie = EvaluateCookie_Directional(lightLoopContext, lightData, lightToSurface);
float3 lightToSample = positionWS - lightData.positionWS;
float4 cookie = EvaluateCookie_Directional(lightLoopContext, lightData, lightToSample);
// Premultiply.
lightData.color *= cookie.rgb;
lightData.diffuseScale *= cookie.a;
lightData.specularScale *= cookie.a;
color *= cookie.rgb;
attenuation *= cookie.a;
}
[branch] if (illuminance > 0.0)
DirectLighting EvaluateBSDF_Directional(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput, PreLightData preLightData,
DirectionalLightData lightData, BSDFData bsdfData,
BakeLightingData bakeLightingData)
{
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
float3 N = bsdfData.normalWS;
float3 L = -lightData.forward; // Lights point backward in Unity
float NdotL = dot(N, L);
[flatten] if (bsdfData.useThickObjectMode && NdotL < 0)
BSDF(V, L, positionWS, preLightData, bsdfData, lighting.diffuse, lighting.specular);
lighting.diffuse *= illuminance * lightData.diffuseScale;
lighting.specular *= illuminance * lightData.specularScale;
posInput.positionWS += ComputeThicknessDisplacement(bsdfData, L, NdotL);
[branch] if (bsdfData.enableTransmission)
float3 color; float attenuation;
EvaluateLight_Directional(lightLoopContext, posInput, lightData, bakeLightingData, N, L,
color, attenuation);
float intensity = attenuation * saturate(NdotL);
[branch] if (intensity > 0.0)
// Apply wrapped lighting to better handle thin objects (cards) at grazing angles.
float wrappedNdotL = ComputeWrappedDiffuseLighting(-NdotL, SSS_WRAP_LIGHT);
BSDF(V, L, posInput.positionWS, preLightData, bsdfData, lighting.diffuse, lighting.specular);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
illuminance = Lambert() * wrappedNdotL;
#else
float tNdotL = saturate(-NdotL);
float NdotV = preLightData.NdotV;
illuminance = INV_PI * F_Transm_Schlick(0, 0.5, NdotV) * F_Transm_Schlick(0, 0.5, tNdotL) * wrappedNdotL;
#endif
lighting.diffuse *= intensity * lightData.diffuseScale;
lighting.specular *= intensity * lightData.specularScale;
}
[branch] if (bsdfData.enableTransmission)
{
// We don't multiply by 'bsdfData.diffuseColor' here. It's done only once in PostEvaluateBSDF().
lighting.diffuse += EvaluateTransmission(bsdfData, illuminance * lightData.diffuseScale, shadow);
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, preLightData.NdotV, attenuation * lightData.diffuseScale);
// Save ALU by applying 'lightData.color' only once.
lighting.diffuse *= lightData.color;
lighting.specular *= lightData.color;
// Save ALU by applying light and cookie colors only once.
lighting.diffuse *= color;
lighting.specular *= color;
return lighting;
}

{
// Compute the NDC position (in [-1, 1]^2) by projecting 'positionWS' onto the plane at 1m distance.
// Box projector lights require no perspective division.
float perspectiveZ = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? positionLS.z : 1;
float2 positionNDC = positionLS.xy / perspectiveZ;
bool isInBounds = Max3(abs(positionNDC.x), abs(positionNDC.y), 1 - positionLS.z) <= 1;
float perspectiveZ = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? positionLS.z : 1.0;
float2 positionCS = positionLS.xy / perspectiveZ;
bool isInBounds = Max3(abs(positionCS.x), abs(positionCS.y), 1.0 - positionLS.z) <= 1.0;
float2 coord = positionNDC * 0.5 + 0.5;
float2 positionNDC = positionCS * 0.5 + 0.5;
cookie = SampleCookie2D(lightLoopContext, coord, lightData.cookieIndex);
cookie = SampleCookie2D(lightLoopContext, positionNDC, lightData.cookieIndex);
cookie.a = isInBounds ? cookie.a : 0;
}

return attenuation * GetAngleAttenuation(L, -lightData.forward, lightData.angleScale, lightData.angleOffset);
}
DirectLighting EvaluateBSDF_Punctual( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BakeLightingData bakeLightingData, int GPULightType)
// None of the outputs are premultiplied.
void EvaluateLight_Punctual(LightLoopContext lightLoopContext, PositionInputs posInput,
LightData lightData, BakeLightingData bakeLightingData,
float3 N, float3 L, float distSq,
out float3 color, out float attenuation)
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
int lightType = GPULightType;
// All punctual light type in the same formula, attenuation is neutral depends on light type.
// light.positionWS is the normalize light direction in case of directional light and invSqrAttenuationRadius is 0
// mean dot(unL, unL) = 1 and mean GetDistanceAttenuation() will return 1
// For point light and directional GetAngleAttenuation() return 1
float shadow = 1.0;
float shadowMask = 1.0;
float3 lightToSurface = positionWS - lightData.positionWS;
float3 unL = -lightToSurface;
float distSq = dot(unL, unL);
float dist = sqrt(distSq);
float3 L = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? unL * rsqrt(distSq) : -lightData.forward;
float NdotL = dot(bsdfData.normalWS, L);
float illuminance = saturate(NdotL);
color = lightData.color;
attenuation = GetPunctualShapeAttenuation(lightData, L, distSq);
float attenuation = GetPunctualShapeAttenuation(lightData, L, distSq);
// Premultiply.
lightData.diffuseScale *= attenuation;
lightData.specularScale *= attenuation;
float shadow = 1.0;
float shadowMask = 1.0;
#ifdef SHADOWS_SHADOWMASK
// shadowMaskSelector.x is -1 if there is no shadow mask
// Note that we override shadow value (in case we don't have any dynamic shadow)

{
// TODO: make projector lights cast shadows.
float3 offset = float3(0.0, 0.0, 0.0); // GetShadowPosOffset(nDotL, normal);
float4 L_dist = { L, dist };
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, bsdfData.normalWS, lightData.shadowIndex, L_dist, posInput.unPositionSS);
float4 L_dist = float4(L, sqrt(distSq));
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, N, lightData.shadowIndex, L_dist, posInput.positionSS);
#ifdef SHADOWS_SHADOWMASK
// Note: Legacy Unity have two shadow mask mode. ShadowMask (ShadowMask contain static objects shadow and ShadowMap contain only dynamic objects shadow, final result is the minimun of both value)
// and ShadowMask_Distance (ShadowMask contain static objects shadow and ShadowMap contain everything and is blend with ShadowMask based on distance (Global distance setup in QualitySettigns)).

#endif
}
illuminance *= shadow;
attenuation *= shadow;
// Projector lights always have a cookies, so we can perform clipping inside the if().
// Projector lights always have cookies, so we can perform clipping inside the if().
float4 cookie = EvaluateCookie_Punctual(lightLoopContext, lightData, lightToSurface);
float3 lightToSample = positionWS - lightData.positionWS;
float4 cookie = EvaluateCookie_Punctual(lightLoopContext, lightData, lightToSample);
// Premultiply.
lightData.color *= cookie.rgb;
lightData.diffuseScale *= cookie.a;
lightData.specularScale *= cookie.a;
color *= cookie.rgb;
attenuation *= cookie.a;
}
}
DirectLighting EvaluateBSDF_Punctual(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BakeLightingData bakeLightingData)
{
DirectLighting lighting;
ZERO_INITIALIZE(DirectLighting, lighting);
float3 lightToSample = posInput.positionWS - lightData.positionWS;
int lightType = lightData.lightType;
float3 unL = (lightType != GPULIGHTTYPE_PROJECTOR_BOX) ? -lightToSample : -lightData.forward;
float distSq = dot(unL, unL);
float3 N = bsdfData.normalWS;
float3 L = unL * rsqrt(distSq);
float NdotL = dot(N, L);
[flatten] if (bsdfData.useThickObjectMode && NdotL < 0)
{
posInput.positionWS += ComputeThicknessDisplacement(bsdfData, L, NdotL);
[branch] if (illuminance > 0.0)
float3 color; float attenuation;
EvaluateLight_Punctual(lightLoopContext, posInput, lightData, bakeLightingData, N, L, distSq,
color, attenuation);
float intensity = attenuation * saturate(NdotL);
[branch] if (intensity > 0.0)
bsdfData.roughness = max(bsdfData.roughness, lightData.minRoughness); // Simulate that a punctual light have a radius with this hack
BSDF(V, L, positionWS, preLightData, bsdfData, lighting.diffuse, lighting.specular);
// Simulate a sphere light with this hack.
bsdfData.roughnessT = max(bsdfData.roughnessT, lightData.minRoughness);
bsdfData.roughnessB = max(bsdfData.roughnessB, lightData.minRoughness);
lighting.diffuse *= illuminance * lightData.diffuseScale;
lighting.specular *= illuminance * lightData.specularScale;
BSDF(V, L, posInput.positionWS, preLightData, bsdfData, lighting.diffuse, lighting.specular);
lighting.diffuse *= intensity * lightData.diffuseScale;
lighting.specular *= intensity * lightData.specularScale;
// Apply wrapped lighting to better handle thin objects (cards) at grazing angles.
float wrappedNdotL = ComputeWrappedDiffuseLighting(-NdotL, SSS_WRAP_LIGHT);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
illuminance = Lambert() * wrappedNdotL;
#else
float tNdotL = saturate(-NdotL);
float NdotV = preLightData.NdotV;
illuminance = INV_PI * F_Transm_Schlick(0, 0.5, NdotV) * F_Transm_Schlick(0, 0.5, tNdotL) * wrappedNdotL;
#endif
// We don't multiply by 'bsdfData.diffuseColor' here. It's done only once in PostEvaluateBSDF().
lighting.diffuse += EvaluateTransmission(bsdfData, illuminance * lightData.diffuseScale, shadow);
lighting.diffuse += EvaluateTransmission(bsdfData, NdotL, preLightData.NdotV, attenuation * lightData.diffuseScale);
// Save ALU by applying 'lightData.color' only once.
lighting.diffuse *= lightData.color;
lighting.specular *= lightData.color;
// Save ALU by applying light and cookie colors only once.
lighting.diffuse *= color;
lighting.specular *= color;
return lighting;
}

// Use the Lambertian approximation for performance reasons.
// The matrix multiplication should not generate any extra ALU on GCN.
// TODO: double evaluation is very inefficient! This is a temporary solution.
lighting.diffuse += EvaluateTransmission(bsdfData, ltcValue, 1);
lighting.diffuse += bsdfData.transmittance * ltcValue;
}
// Evaluate the coat part

float3x3 ltcTransform = mul(flipMatrix, k_identity3x3);
// Polygon irradiance in the transformed configuration.
// TODO: double evaluation is very inefficient! This is a temporary solution.
lighting.diffuse += EvaluateTransmission(bsdfData, ltcValue, 1);
lighting.diffuse += bsdfData.transmittance * ltcValue;
}
// Evaluate the coat part

return lighting;
}
DirectLighting EvaluateBSDF_Area( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BakeLightingData bakeLightingData, int GPULightType)
{
if (GPULightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_SSLighting for screen space lighting
// ----------------------------------------------------------------------------

float3 refractedBackPointWS = EstimateRaycast(V, posInput, preLightData.transmissionPositionWS, preLightData.transmissionRefractV);
// Calculate screen space coordinates of refracted point in back plane
float4 refractedBackPointCS = mul(UNITY_MATRIX_VP, float4(refractedBackPointWS, 1.0));
float2 refractedBackPointSS = ComputeScreenSpacePosition(refractedBackPointCS);
float2 refractedBackPointNDC = ComputeNormalizedDeviceCoordinates(refractedBackPointWS, UNITY_MATRIX_VP);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointSS * depthSize, 0).r, _ZBufferParams);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointNDC * depthSize, 0).r, _ZBufferParams);
if (refractedBackPointDepth < posInput.depthVS
|| any(refractedBackPointSS < 0.0)
|| any(refractedBackPointSS > 1.0))
if (refractedBackPointDepth < posInput.linearDepth
|| any(refractedBackPointNDC < 0.0)
|| any(refractedBackPointNDC > 1.0))
{
// Do nothing and don't update the hierarchy weight so we can fall back on refraction probe
return lighting;

lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointSS, preLightData.transmissionSSMipLevel).rgb;
lighting.specularTransmitted = SAMPLE_TEXTURE2D_LOD(_GaussianPyramidColorTexture, s_trilinear_clamp_sampler, refractedBackPointNDC, preLightData.transmissionSSMipLevel).rgb;
// Beer-Lamber law for absorption
lighting.specularTransmitted *= preLightData.transmissionTransmittance;

return lighting;
}
DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData,
BSDFData bsdfData, BakeLightingData bakeLightingData)
{
if (lightData.lightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Env
// ----------------------------------------------------------------------------

// TODO: factor this code in common, so other material authoring don't require to rewrite everything,
// TODO: test the strech from Tomasz
// float shrinkedRoughness = AnisotropicStrechAtGrazingAngle(bsdfData.roughness, bsdfData.perceptualRoughness, NdotV);
// float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
// float shrunkRoughness = AnisotropicStrechAtGrazingAngle(roughness, roughness, NdotV);
// Guideline for reflection volume: In HDRenderPipeline we separate the projection volume (the proxy of the scene) from the influence volume (what pixel on the screen is affected)
// However we add the constrain that the shape of the projection and influence volume is the same (i.e if we have a sphere shape projection volume, we have a shape influence).

// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.unPositionSS).x;
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.positionSS).x;
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
// Add indirect diffuse + emissive (if any) - Ambient occlusion is multiply by emissive which is wrong but not a big deal
#if GTAO_MULTIBOUNCE_APPROX

#endif
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(preLightData.NdotV, indirectAmbientOcclusion, bsdfData.roughness);
float roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(preLightData.NdotV, indirectAmbientOcclusion, roughness);
// Try to mimic multibounce with specular color. Not the point of the original formula but ok result.
// Take the min of screenspace specular occlusion and visibility cone specular occlusion
#if GTAO_MULTIBOUNCE_APPROX

}
else if (_DebugLightingMode == DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION_FROM_SSAO)
{
diffuseLighting = GetSpecularOcclusionFromAmbientOcclusion(preLightData.NdotV, indirectAmbientOcclusion, bsdfData.roughness);
diffuseLighting = specularOcclusion;
specularLighting = float3(0.0, 0.0, 0.0); // Disable specular lighting
}
#if GTAO_MULTIBOUNCE_APPROX

30
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.shader


[ToggleOff] _AlphaCutoffEnable("Alpha Cutoff Enable", Float) = 0.0
_AlphaCutoff("Alpha Cutoff", Range(0.0, 1.0)) = 0.5
[ToggleOff] _TransparentBackfaceEnable("_TransparentBackfaceEnable", Float) = 0.0
// Transparency
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionMode("Refraction Mode", Int) = 0

#include "ShaderPass/LitDistortionPass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassDistortion.hlsl"
ENDHLSL
}
Pass
{
Name "TransparentBackface"
Tags { "LightMode" = "TransparentBackface" }
Blend [_SrcBlend] [_DstBlend]
ZWrite [_ZWrite]
Cull Front
HLSLPROGRAM
#pragma multi_compile LIGHTMAP_OFF LIGHTMAP_ON
#pragma multi_compile DIRLIGHTMAP_OFF DIRLIGHTMAP_COMBINED
#pragma multi_compile DYNAMICLIGHTMAP_OFF DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}

2
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

2
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitReference.hlsl


}
else
{
ImportanceSampleGGX(u, V, localToWorld, bsdfData.roughness, NdotV, L, VdotH, NdotL, weightOverPdf);
ImportanceSampleGGX(u, V, localToWorld, bsdfData.roughnessT, NdotV, L, VdotH, NdotL, weightOverPdf);
}
if (NdotL > 0.0)

33
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitTessellation.shader


[ToggleOff] _AlphaCutoffEnable("Alpha Cutoff Enable", Float) = 0.0
_AlphaCutoff("Alpha Cutoff", Range(0.0, 1.0)) = 0.5
[ToggleOff] _TransparentBackfaceEnable("_TransparentBackfaceEnable", Float) = 0.0
// Transparency
[Enum(None, 0, Plane, 1, Sphere, 2)]_RefractionMode("Refraction Mode", Int) = 0

#include "ShaderPass/LitDistortionPass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassDistortion.hlsl"
ENDHLSL
}
Pass
{
Name "TransparentBackface"
Tags { "LightMode" = "TransparentBackface" }
Blend [_SrcBlend] [_DstBlend]
ZWrite [_ZWrite]
Cull Front
HLSLPROGRAM
#pragma hull Hull
#pragma domain Domain
#pragma multi_compile LIGHTMAP_OFF LIGHTMAP_ON
#pragma multi_compile DIRLIGHTMAP_OFF DIRLIGHTMAP_COMBINED
#pragma multi_compile DYNAMICLIGHTMAP_OFF DYNAMICLIGHTMAP_ON
#pragma multi_compile _ SHADOWS_SHADOWMASK
// #include "../../Lighting/Forward.hlsl"
#pragma multi_compile LIGHTLOOP_SINGLE_PASS LIGHTLOOP_TILE_PASS
#pragma multi_compile USE_FPTL_LIGHTLIST USE_CLUSTERED_LIGHTLIST
#define SHADERPASS SHADERPASS_FORWARD
#include "../../ShaderVariables.hlsl"
#include "../../Lighting/Lighting.hlsl"
#include "ShaderPass/LitSharePass.hlsl"
#include "LitData.hlsl"
#include "../../ShaderPass/ShaderPassForward.hlsl"
ENDHLSL
}

4
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader


[earlydepthstencil]
float4 Frag(Varyings input) : SV_Target // use SV_StencilRef in D3D 11.3+
{
uint2 positionSS = (uint2)input.positionCS.xy;
uint2 positionNDC = (uint2)input.positionCS.xy;
_HTile[positionSS / 8] = _StencilRef;
_HTile[positionNDC / 8] = _StencilRef;
#endif
return PackByte(_StencilRef);

104
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute


// Do not modify these.
#include "../../../ShaderPass/ShaderPass.cs.hlsl"
#define SHADERPASS SHADERPASS_SUBSURFACE_SCATTERING
#define MILLIMETERS_PER_METER 1000
#define CENTIMETERS_PER_METER 100
#define GROUP_SIZE_1D 16
#define GROUP_SIZE_2D (GROUP_SIZE_1D * GROUP_SIZE_1D)
#define TEXTURE_CACHE_BORDER 2

// Inputs & outputs
//--------------------------------------------------------------------------------------------------
float4 _WorldScales[SSS_N_PROFILES]; // Size of the world unit in meters (only the X component is used)
float4 _FilterKernels[SSS_N_PROFILES][SSS_N_SAMPLES_NEAR_FIELD]; // XY = near field, ZW = far field; 0 = radius, 1 = reciprocal of the PDF
TEXTURE2D(_DepthTexture); // Z-buffer

#endif
groupshared bool processGroup;
bool StencilTest(int2 pixelCoord, float stencilRef)
{
bool passedStencilTest;
#if SSS_SAMPLE_TEST_HTILE
int2 tileCoord = pixelCoord / 8;
// Perform the stencil test (reject at the tile rate).
passedStencilTest = stencilRef == LOAD_TEXTURE2D(_HTile, tileCoord).r;
[branch] if (passedStencilTest)
#else
// It is extremely uncommon for individual samples to fail the HTile test.
// Unfortunately, our copy of HTile does not allow to accept at the tile rate.
// Therefore, we choose not to perform the HiS test here.
#endif
{
// Unfortunately, our copy of HTile does not allow to accept at the tile rate.
// Therefore, we have to additionally perform the stencil test at the pixel rate.
// We check the tagged irradiance buffer to avoid an extra stencil texture fetch.
passedStencilTest = TestLightingForSSS(LOAD_TEXTURE2D(_IrradianceSource, pixelCoord).rgb);
}
return passedStencilTest;
}
#if SSS_USE_LDS_CACHE
float4 LoadSampleFromCacheMemory(int2 cacheCoord)
{

// Returns {irradiance, linearDepth}.
float4 LoadSample(int2 pixelCoord, int2 cacheAnchor)
{
#if SSS_USE_LDS_CACHE
#if SSS_USE_LDS_CACHE
[branch] if (isInCache)
{
return LoadSampleFromCacheMemory(cacheCoord);

{
float stencilRef = STENCILLIGHTINGUSAGE_SPLIT_LIGHTING;
[branch] if (StencilTest(pixelCoord, stencilRef))
{
return LoadSampleFromVideoMemory(pixelCoord);
}
else
{
return float4(0, 0, 0, 0);
}
// Always load both irradiance and depth.
// Avoid dependent texture reads at the cost of extra bandwidth.
return LoadSampleFromVideoMemory(pixelCoord);
}
}

#if SSS_USE_TANGENT_PLANE
float3 relPosVS = vec.x * tangentX + vec.y * tangentY;
float3 positionVS = centerPosVS + relPosVS;
float2 positionSS = ComputeScreenSpacePosition(positionCS, projMatrix);
float2 positionNDC = ComputeNormalizedDeviceCoordinates(positionCS, projMatrix);
position = (int2)(positionSS * _ScreenSize.xy);
position = (int2)(positionNDC * _ScreenSize.xy);
xy2 = dot(relPosVS.xy, relPosVS.xy);
#else
position = (int2)(centerCoord + vec * pixelsPerMm);

if (TestLightingForSSS(irradiance))
{
// Apply bilateral weighting.
float linearDepth = textureSample.a;
float z = linearDepth - centerPosVS.z;
float p = _FilterKernels[profileID][i][iP];
float3 w = ComputeBilateralWeight(xy2, z, mmPerUnit, shapeParam, p);
float viewZ = textureSample.a;
float relZ = viewZ - centerPosVS.z;
float rcpPdf = _FilterKernels[profileID][i][iP];
float3 weight = ComputeBilateralWeight(xy2, relZ, mmPerUnit, shapeParam, rcpPdf);
totalIrradiance += w * irradiance;
totalWeight += w;
totalIrradiance += weight * irradiance;
totalWeight += weight;
}
else
{

[branch] if (!processGroup) { return; }
float3 centerIrradiance = 0;
float centerDepth = 0;
float4 cachedValue = 0;
bool passedStencilTest = StencilTest((int2)pixelCoord, stencilRef);
float3 centerIrradiance = LOAD_TEXTURE2D(_IrradianceSource, pixelCoord).rgb;
float centerDepth = 0;
float centerViewZ = 0;
bool passedStencilTest = TestLightingForSSS(centerIrradiance);
// Save some bandwidth by only loading depth values for SSS pixels.
centerIrradiance = LOAD_TEXTURE2D(_IrradianceSource, pixelCoord).rgb;
centerDepth = LOAD_TEXTURE2D(_DepthTexture, pixelCoord).r;
cachedValue = float4(centerIrradiance, LinearEyeDepth(centerDepth, _ZBufferParams));
centerDepth = LOAD_TEXTURE2D(_DepthTexture, pixelCoord).r;
centerViewZ = LinearEyeDepth(centerDepth, _ZBufferParams);
textureCache[Mad24(TEXTURE_CACHE_SIZE_1D, cacheCoord.y, cacheCoord.x)] = cachedValue;
textureCache[Mad24(TEXTURE_CACHE_SIZE_1D, cacheCoord.y, cacheCoord.x)] = float4(centerIrradiance, centerViewZ);
uint numBorderQuadsPerWave = TEXTURE_CACHE_SIZE_1D / 2 - 1;
uint halfCacheWidthInQuads = TEXTURE_CACHE_SIZE_1D / 4;

uint2 quadCoord;
// The traversal order is such that the quad's X coordinate is monotonically increasing.
// The corner is always the near the block of the corresponding wavefront.
case 0:
case 0: // Bottom left
case 1:
case 1: // Bottom right
case 2:
case 2: // Top left
default: // 3
default: // Top right
uint2 cacheCoord2 = 2 * (startQuad + quadCoord) + uint2(laneIndex & 1, (laneIndex >> 1) & 1);
int2 pixelCoord2 = (int2)(tileAnchor + cacheCoord2) - TEXTURE_CACHE_BORDER;
float4 cachedValue2 = 0;
uint2 cacheCoord2 = 2 * (startQuad + quadCoord) + uint2(laneIndex & 1, (laneIndex >> 1) & 1);
int2 pixelCoord2 = (int2)(tileAnchor + cacheCoord2) - TEXTURE_CACHE_BORDER;
float3 irradiance2 = LOAD_TEXTURE2D(_IrradianceSource, pixelCoord2).rgb;
float viewZ2 = 0;
[branch] if (StencilTest(pixelCoord2, stencilRef))
// Save some bandwidth by only loading depth values for SSS pixels.
[branch] if (TestLightingForSSS(irradiance2))
cachedValue2 = LoadSampleFromVideoMemory(pixelCoord2);
viewZ2 = LinearEyeDepth(LOAD_TEXTURE2D(_DepthTexture, pixelCoord2).r, _ZBufferParams);
textureCache[Mad24(TEXTURE_CACHE_SIZE_1D, cacheCoord2.y, cacheCoord2.x)] = cachedValue2;
textureCache[Mad24(TEXTURE_CACHE_SIZE_1D, cacheCoord2.y, cacheCoord2.x)] = float4(irradiance2, viewZ2);
}
// Wait for the LDS.

float maxDistance = _ShapeParams[profileID].a;
// Reconstruct the view-space position corresponding to the central sample.
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);
float3 cornerPosVS = ComputeViewSpacePosition(cornerPosSS, centerDepth, UNITY_MATRIX_I_P);

bool useNearFieldKernel = SSS_ENABLE_NEAR_FIELD && maxDistInPixels > SSS_LOD_THRESHOLD;
#if SSS_DEBUG_LOD
StoreResult(pixelCoord, useNearFieldKernel ? float3(1, 0, 0) : float3(0.5, 0.5, 0);
StoreResult(pixelCoord, useNearFieldKernel ? float3(1, 0, 0) : float3(0.5, 0.5, 0));
return;
#endif

15
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader


// Do not modify these.
#include "../../../ShaderPass/ShaderPass.cs.hlsl"
#define SHADERPASS SHADERPASS_SUBSURFACE_SCATTERING
#define MILLIMETERS_PER_METER 1000
#define CENTIMETERS_PER_METER 100
#define SHADERPASS SHADERPASS_SUBSURFACE_SCATTERING
//-------------------------------------------------------------------------------------
// Include

// Inputs & outputs
//-------------------------------------------------------------------------------------
float4 _WorldScales[SSS_N_PROFILES]; // Size of the world unit in meters (only the X component is used)
float4 _FilterKernelsBasic[SSS_N_PROFILES][SSS_BASIC_N_SAMPLES]; // RGB = weights, A = radial distance
float4 _HalfRcpWeightedVariances[SSS_BASIC_N_SAMPLES]; // RGB for chromatic, A for achromatic

BSDFData bsdfData;
float3 unused;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, unused);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, unused);
int profileID = bsdfData.subsurfaceProfile;
float distScale = bsdfData.subsurfaceRadius;

// TODO: copy its neighborhood into LDS.
float2 centerPosition = posInput.unPositionSS;
float2 centerPosition = posInput.positionSS;
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float centerDepth = LOAD_TEXTURE2D(_MainDepthTexture, centerPosition).r;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);

#endif
// Take the first (central) sample.
float2 samplePosition = posInput.unPositionSS;
float2 samplePosition = posInput.positionSS;
float3 sampleWeight = _FilterKernelsBasic[profileID][0].rgb;
float3 sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

[unroll]
for (int i = 1; i < SSS_BASIC_N_SAMPLES; i++)
{
samplePosition = posInput.unPositionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
samplePosition = posInput.positionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
sampleWeight = _FilterKernelsBasic[profileID][i].rgb;
sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

4
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/SubsurfaceScatteringSettings.cs


[NonSerialized] public uint texturingModeFlags; // 1 bit/profile; 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 2 bit/profile; 0 = inf. thick, 1 = thin, 2 = regular
[NonSerialized] public Vector4[] thicknessRemaps; // Remap: 0 = start, 1 = end - start
[NonSerialized] public Vector4[] worldScales; // Size of the world unit in meters (only the X component is used)
[NonSerialized] public Vector4[] worldScales; // X = meters per world unit; Y = world units per meter
[NonSerialized] public Vector4[] shapeParams; // RGB = S = 1 / D, A = filter radius
[NonSerialized] public Vector4[] transmissionTints; // RGB = color, A = unused
[NonSerialized] public Vector4[] filterKernels; // XY = near field, ZW = far field; 0 = radius, 1 = reciprocal of the PDF

transmissionFlags |= (uint)profiles[p].transmissionMode << i * 2;
thicknessRemaps[i] = new Vector4(profiles[p].thicknessRemap.x, profiles[p].thicknessRemap.y - profiles[p].thicknessRemap.x, 0f, 0f);
worldScales[i] = new Vector4(profiles[p].worldScale, 0f, 0f, 0f);
worldScales[i] = new Vector4(profiles[p].worldScale, 1.0f / profiles[p].worldScale, 0f, 0f);
shapeParams[i] = profiles[p].shapeParam;
shapeParams[i].w = profiles[p].maxRadius;
transmissionTints[i] = profiles[p].transmissionTint * 0.25f; // Premultiplied

32
ScriptableRenderPipeline/HDRenderPipeline/Material/Unlit/Editor/BaseUnlitUI.cs


using System;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{

}
}
static public void SetKeyword(Material m, string keyword, bool state)
{
if (state)
m.EnableKeyword(keyword);
else
m.DisableKeyword(keyword);
}
SetKeyword(material, "_ALPHATEST_ON", alphaTestEnable);
CoreUtils.SetKeyword(material, "_ALPHATEST_ON", alphaTestEnable);
SetKeyword(material, "_SURFACE_TYPE_TRANSPARENT", surfaceType == SurfaceType.Transparent);
CoreUtils.SetKeyword(material, "_SURFACE_TYPE_TRANSPARENT", surfaceType == SurfaceType.Transparent);
SetKeyword(material, "_BLENDMODE_PRESERVE_SPECULAR_LIGHTING", enableBlendModePreserveSpecularLighting);
CoreUtils.SetKeyword(material, "_BLENDMODE_PRESERVE_SPECULAR_LIGHTING", enableBlendModePreserveSpecularLighting);
SetKeyword(material, "_BLENDMODE_ALPHA", false);
SetKeyword(material, "_BLENDMODE_ADD", false);
SetKeyword(material, "_BLENDMODE_PRE_MULTIPLY", false);
CoreUtils.SetKeyword(material, "_BLENDMODE_ALPHA", false);
CoreUtils.SetKeyword(material, "_BLENDMODE_ADD", false);
CoreUtils.SetKeyword(material, "_BLENDMODE_PRE_MULTIPLY", false);
if (surfaceType == SurfaceType.Opaque)
{

{
BlendMode blendMode = (BlendMode)material.GetFloat(kBlendMode);
SetKeyword(material, "_BLENDMODE_ALPHA", BlendMode.Alpha == blendMode);
SetKeyword(material, "_BLENDMODE_ADD", BlendMode.Additive == blendMode);
SetKeyword(material, "_BLENDMODE_PRE_MULTIPLY", BlendMode.PremultipliedAlpha == blendMode);
CoreUtils.SetKeyword(material, "_BLENDMODE_ALPHA", BlendMode.Alpha == blendMode);
CoreUtils.SetKeyword(material, "_BLENDMODE_ADD", BlendMode.Additive == blendMode);
CoreUtils.SetKeyword(material, "_BLENDMODE_PRE_MULTIPLY", BlendMode.PremultipliedAlpha == blendMode);
switch (blendMode)
{

}
bool fogEnabled = material.HasProperty(kEnableFogOnTransparent) && material.GetFloat(kEnableFogOnTransparent) > 0.0f && surfaceType == SurfaceType.Transparent;
SetKeyword(material, "_ENABLE_FOG_ON_TRANSPARENT", fogEnabled);
CoreUtils.SetKeyword(material, "_ENABLE_FOG_ON_TRANSPARENT", fogEnabled);
if (material.HasProperty(kDistortionEnable))
{

{
material.SetInt("_CullMode", (int)UnityEngine.Rendering.CullMode.Back);
}
SetKeyword(material, "_DOUBLESIDED_ON", doubleSidedEnable);
CoreUtils.SetKeyword(material, "_DOUBLESIDED_ON", doubleSidedEnable);
// A material's GI flag internally keeps track of whether emission is enabled at all, it's enabled but has no effect
// or is enabled and may be modified at runtime. This state depends on the values of the current flag and emissive color.

3
ScriptableRenderPipeline/HDRenderPipeline/Material/Unlit/Editor/UnlitUI.cs


using System;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{

SetupBaseUnlitKeywords(material);
SetupBaseUnlitMaterialPass(material);
SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
}
}
} // namespace UnityEditor

2
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader


float4 Frag(Varyings input) : SV_Target
{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
float4 worldPos = float4(posInput.positionWS, 1.0);
float4 prevPos = worldPos;

26
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/DefaultHDMaterial.mat


m_PrefabInternal: {fileID: 0}
m_Name: DefaultHDMaterial
m_Shader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}
m_ShaderKeywords: _BLENDMODE_PRESERVE_SPECULAR_LIGHTING _NORMALMAP_TANGENT_SPACE
m_ShaderKeywords: _ALBEDOAFFECTEMISSIVE_OFF _ALPHACUTOFFENABLE_OFF _BLENDMODE_PRESERVE_SPECULAR_LIGHTING
_DEPTHOFFSETENABLE_OFF _DISTORTIONDEPTHTEST_OFF _DISTORTIONENABLE_OFF _DISTORTIONONLY_OFF
_DOUBLESIDEDENABLE_OFF _ENABLESPECULAROCCLUSION_OFF _ENABLEWIND_OFF _NORMALMAP_TANGENT_SPACE
_PREREFRACTIONPASS_OFF
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0

m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}

m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ThicknessMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TransmittanceColorMap:
- _AORemapMax: 1
- _AORemapMin: 0
- _ATDistance: 1
- _AlbedoAffectEmissive: 0
- _AlphaCutoff: 0.5

- _DistortionOnly: 0
- _DistortionScale: 1
- _DistortionSrcBlend: 1
- _DistortionVectorBias: -1
- _DistortionVectorScale: 2
- _DoubleSidedEnable: 0
- _DoubleSidedMirrorEnable: 1
- _DoubleSidedNormalMode: 1

- _HeightMax: 1
- _HeightMin: -1
- _HorizonFade: 1
- _IOR: 1
- _IOR: 1.097
- _InitialBend: 1
- _InvTilingScale: 1
- _LinkDetailsWithBase: 1

- _NormalMapSpace: 0
- _NormalScale: 1
- _NormalScale: 1.088
- _OcclusionStrength: 1
- _PPDLodThreshold: 5
- _PPDMaxSamples: 15

- _RefractionMode: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2
- _Smoothness: 0.5
- _Smoothness: 0.712
- _SmoothnessRemapMax: 1
- _SmoothnessRemapMin: 0
- _SmoothnessTextureChannel: 0

- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 0.72794116, g: 0.72794116, b: 0.72794116, a: 1}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 0.72794116, g: 0.72794116, b: 0.72794116, a: 1}
- _ThicknessRemap: {r: 0, g: 1, b: 0, a: 0}
- _TransmittanceColor: {r: 1, g: 1, b: 1, a: 1}
- _UVDetailsMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UVMappingMask: {r: 1, g: 0, b: 0, a: 0}

1
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/HDRenderPipelineResources.asset


opaqueAtmosphericScattering: {fileID: 4800000, guid: 326059e48e5735e46a98047eff4f0295,
type: 3}
skyboxCubemap: {fileID: 103, guid: 0000000000000000f000000000000000, type: 0}
encodeBC6HCS: {fileID: 7200000, guid: aa922d239de60304f964e24488559eeb, type: 3}

3
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/RenderPipelineResources.cs


public Shader opaqueAtmosphericScattering;
public Shader skyboxCubemap;
// Utilities
public ComputeShader encodeBC6HCS;
}
}

7
ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/SceneSettings.cs


void OnEnable()
{
SceneSettingsManager.instance.AddSceneSettings(this);
HDRenderPipeline hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;
if (hdPipeline != null)
{
hdPipeline.OnSceneLoad();
}
}
void OnDisable()

5
ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs


// 3) When this is solve (i.e move previousPositionCS to a free attribute semantic), Unity only support one pSecondaryFormat. Mean if we ahve a vertex color instance stream and motion vector, motion vector will overwrite vertex color stream. See MeshRenderingData.cpp
// All this could be fix we a new Mesh API not ready yet. Note that this feature only affect animated mesh (vertex or skin) as others use depth reprojection.
VelocityInGBuffer = 0, // Change to 1 to enable the feature, then regenerate hlsl headers.
// TODO: not working yet, waiting for UINT16 RT format support
PackGBufferInU16 = 0,
CameraRelativeRendering = 1 // Rendering sets the origin of the world to the position of the primary (scene view) camera
};

// but it is usually small, so we are fine with it (until someone at microsoft fix the debuggger).
public const int k_VelocityInGbuffer = (int)ShaderOptions.VelocityInGBuffer;
public static int s_VelocityInGbuffer = (int)ShaderOptions.VelocityInGBuffer;
public const int k_PackgbufferInU16 = (int)ShaderOptions.PackGBufferInU16;
public static int s_PackgbufferInU16 = (int)ShaderOptions.PackGBufferInU16;
public const int k_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;
public static int s_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;

1
ScriptableRenderPipeline/HDRenderPipeline/ShaderConfig.cs.hlsl


// UnityEngine.Experimental.Rendering.HDPipeline.ShaderOptions: static fields
//
#define SHADEROPTIONS_VELOCITY_IN_GBUFFER (0)
#define SHADEROPTIONS_PACK_GBUFFER_IN_U16 (0)
#define SHADEROPTIONS_CAMERA_RELATIVE_RENDERING (1)

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存