浏览代码

Merge branch 'HDRP/staging' into HDRP/decals/v2_1/normal_buffer

# Conflicts:
#	com.unity.render-pipelines.high-definition/CHANGELOG.md
#	com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/RenderPipelineResources.cs
/main
Paul Melamed 6 年前
当前提交
9dbd8998
共有 1134 个文件被更改,包括 1235 次插入1322 次删除
  1. 6
      README.md
  2. 34
      TestProjects/CoreRP_Tests/Packages/manifest.json
  3. 3
      TestProjects/HDRP_Tests/ProjectSettings/EditorBuildSettings.asset
  4. 2
      TestProjects/LWGraphicsTest/Packages/manifest.json
  5. 119
      TestProjects/LWGraphicsTest/ProjectSettings/EditorBuildSettings.asset
  6. 8
      TestProjects/LWGraphicsTest/ProjectSettings/GraphicsSettings.asset
  7. 97
      TestProjects/LWGraphicsTest/ProjectSettings/ProjectSettings.asset
  8. 6
      TestProjects/LWGraphicsTest/ProjectSettings/TagManager.asset
  9. 15
      TestProjects/LWGraphicsTest/ProjectSettings/UnityConnectSettings.asset
  10. 2
      TestProjects/PostProcessing/Packages/manifest.json
  11. 8
      build.py
  12. 4
      com.unity.render-pipelines.core/CHANGELOG.md
  13. 46
      com.unity.render-pipelines.core/CoreRP/Common/XRGraphicsConfig.cs
  14. 225
      com.unity.render-pipelines.core/CoreRP/Debugging/Prefabs/Resources/DebugUI Canvas.prefab
  15. 2
      com.unity.render-pipelines.core/CoreRP/Editor/CameraEditorUtils.cs
  16. 6
      com.unity.render-pipelines.core/CoreRP/Editor/TextureCombiner/TextureCombiner.cs
  17. 34
      com.unity.render-pipelines.core/CoreRP/ShaderLibrary/CommonLighting.hlsl
  18. 18
      com.unity.render-pipelines.core/CoreRP/ShaderLibrary/CommonMaterial.hlsl
  19. 1
      com.unity.render-pipelines.core/CoreRP/Shadow/ShadowBase.cs
  20. 6
      com.unity.render-pipelines.core/CoreRP/Textures/TextureCache.cs
  21. 9
      com.unity.render-pipelines.core/CoreRP/Utilities/CoreUtils.cs
  22. 17
      com.unity.render-pipelines.core/CoreRP/Utilities/GeometryUtils.cs
  23. 2
      com.unity.render-pipelines.core/package.json
  24. 46
      com.unity.render-pipelines.high-definition/CHANGELOG.md
  25. 9
      com.unity.render-pipelines.high-definition/HDRP/Camera/HDCamera.cs
  26. 5
      com.unity.render-pipelines.high-definition/HDRP/Debug/DebugColorPicker.shader
  27. 2
      com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.cs
  28. 1
      com.unity.render-pipelines.high-definition/HDRP/Debug/LightingDebug.cs
  29. 13
      com.unity.render-pipelines.high-definition/HDRP/Debug/LightingDebug.cs.hlsl
  30. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/AssetProcessors/NormalMapFilteringTexturePostprocessor.cs.meta
  31. 30
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/HDLightEditor.cs
  32. 4
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Gizmos.cs
  33. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Handles.cs
  34. 3
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.cs
  35. 3
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeEditor.cs
  36. 4
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs
  37. 8
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/Volume/ProxyVolumeUI.cs
  38. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Volumetric/DensityVolumeTextureTool.cs.meta
  39. 76
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/HDAssetFactory.cs
  40. 18
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/HDEditorUtils.cs
  41. 3
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/FrameSettingsUI.cs
  42. 2
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/SerializedShadowInitParameters.cs
  43. 1
      com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/ShadowInitParametersUI.cs
  44. 161
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRPass.template
  45. 105
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDSubShaderUtilities.cs
  46. 162
      com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitPassForward.template
  47. 2
      com.unity.render-pipelines.high-definition/HDRP/Lighting/GlobalIlluminationUtils.cs
  48. 4
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Light/HDAdditionalLightData.cs
  49. 3
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightEvaluation.hlsl
  50. 2
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/Deferred.compute
  51. 4
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs
  52. 88
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/LightLoop.cs
  53. 20
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild-bigtile.compute
  54. 68
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute
  55. 38
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild.compute
  56. 67
      com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/scrbound.compute
  57. 18
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDAdditionalReflectionData.cs
  58. 4
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDProbe.cs
  59. 2
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbe.cs
  60. 2
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ProbeWrapper.cs
  61. 4
      com.unity.render-pipelines.high-definition/HDRP/Lighting/ScreenSpaceShadow.compute
  62. 3
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/DensityVolume.cs
  63. 2
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs
  64. 1
      com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  65. 6
      com.unity.render-pipelines.high-definition/HDRP/Material/LayeredLit/LayeredLitData.hlsl
  66. 8
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.hlsl
  67. 1
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.shader
  68. 2
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitBuiltinData.hlsl
  69. 20
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitData.hlsl
  70. 16
      com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitDataIndividualLayer.hlsl
  71. 40
      com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl
  72. 178
      com.unity.render-pipelines.high-definition/HDRP/Material/MaterialUtilities.hlsl
  73. 11
      com.unity.render-pipelines.high-definition/HDRP/Material/NormalBuffer.hlsl
  74. 1
      com.unity.render-pipelines.high-definition/HDRP/Material/StackLit/StackLitData.hlsl
  75. 4
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDCustomSamplerId.cs
  76. 81
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs
  77. 19
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDUtils.cs
  78. 7
      com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/Settings/FrameSettings.cs
  79. 107
      com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/RenderPipelineResources.cs
  80. 9
      com.unity.render-pipelines.high-definition/HDRP/Sky/SkyRenderingContext.cs
  81. 8
      com.unity.render-pipelines.high-definition/package.json
  82. 18
      com.unity.render-pipelines.lightweight/CHANGELOG.md
  83. 49
      com.unity.render-pipelines.lightweight/LWRP/DefaultRendererSetup.cs
  84. 4
      com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightLightEditor.cs
  85. 130
      com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightPipelineAssetEditor.cs
  86. 8
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightShaderGUI.cs
  87. 2
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightStandardGUI.cs
  88. 2
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightStandardSimpleLightingGUI.cs
  89. 1
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/LightWeightPBRSubShader.cs
  90. 1
      com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/LightWeightUnlitSubShader.cs
  91. 2
      com.unity.render-pipelines.lightweight/LWRP/IRendererSetup.cs
  92. 40
      com.unity.render-pipelines.lightweight/LWRP/LightweightForwardRenderer.cs
  93. 6
      com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs
  94. 7
      com.unity.render-pipelines.lightweight/LWRP/LightweightShadowUtils.cs
  95. 7
      com.unity.render-pipelines.lightweight/LWRP/Passes/BeginXRRenderingPass.cs
  96. 21
      com.unity.render-pipelines.lightweight/LWRP/Passes/CopyColorPass.cs
  97. 23
      com.unity.render-pipelines.lightweight/LWRP/Passes/CopyDepthPass.cs
  98. 9
      com.unity.render-pipelines.lightweight/LWRP/Passes/CreateLightweightRenderTexturesPass.cs
  99. 17
      com.unity.render-pipelines.lightweight/LWRP/Passes/DepthOnlyPass.cs
  100. 28
      com.unity.render-pipelines.lightweight/LWRP/Passes/DirectionalShadowsPass.cs

6
README.md


## NOTE: We are migrating reported issues to fogbugz. Please log issues there.
## NOTE: We have migrate reported issues to fogbugz. Please log issues via the in unity bug tracker.
# Unity Scriptable Render Pipeline
The Scriptable Render Pipeline (SRP) is a new Unity feature in active development. SRP has been designed to give artists and developers the tools they need to create modern, high-fidelity graphics in Unity. Including a built-in Lightweight Render Pipeline for use on all platforms, and a High Definition Render Pipeline (HDRP) for use on compute shader compatible platforms. These features are available in Unity 2018.1+.

For a more detailed overview of the planned features and philosophy behind SRP, refer to the following Gdoc: [ScriptableRenderPipeline](https://docs.google.com/document/d/1e2jkr_-v5iaZRuHdnMrSv978LuJKYZhsIYnrDkNAuvQ/edit?usp=sharing)
Detailed documentation is being added here: [Wiki](https://github.com/Unity-Technologies/ScriptableRenderPipeline/wiki)
This feature is currently in preview. Some features may change or be removed before we move to a full release.

This repository consists of a folder that should be cloned outside the Assets\ folder of your Unity project. We recommend creating a new project to test SRP. Do not clone this repo into an existing project unless you want to break it, or unless you are updating to a newer version of the SRP repo.
After cloning you will need to edit your project's `packages.json` file (in either `UnityPackageManager/` or `Packages/`) to point to the SRP submodules you wish to use. See: https://github.com/Unity-Technologies/ScriptableRenderPipeline/blob/build/automation/TestProjects/GraphicsTests/Packages/manifest.json
After cloning you will need to edit your project's `packages.json` file (in either `UnityPackageManager/` or `Packages/`) to point to the SRP submodules you wish to use. See: https://github.com/Unity-Technologies/ScriptableRenderPipeline/blob/master/TestProjects/HDRP_Tests/Packages/manifest.json
This will link your project to the specific version of SRP you have cloned.

34
TestProjects/CoreRP_Tests/Packages/manifest.json


{
"dependencies": {
"com.unity.render-pipelines.core": "file:../../../com.unity.render-pipelines.core",
"com.unity.modules.ai": "1.0.0",
"com.unity.modules.animation": "1.0.0",
"com.unity.modules.assetbundle": "1.0.0",
"com.unity.modules.audio": "1.0.0",
"com.unity.modules.cloth": "1.0.0",
"com.unity.modules.director": "1.0.0",
"com.unity.modules.imageconversion": "1.0.0",
"com.unity.modules.imgui": "1.0.0",
"com.unity.modules.jsonserialize": "1.0.0",
"com.unity.modules.particlesystem": "1.0.0",
"com.unity.modules.physics": "1.0.0",
"com.unity.modules.physics2d": "1.0.0",
"com.unity.modules.screencapture": "1.0.0",
"com.unity.modules.terrain": "1.0.0",
"com.unity.modules.terrainphysics": "1.0.0",
"com.unity.modules.tilemap": "1.0.0",
"com.unity.modules.ui": "1.0.0",
"com.unity.modules.uielements": "1.0.0",
"com.unity.modules.umbra": "1.0.0",
"com.unity.modules.unityanalytics": "1.0.0",
"com.unity.modules.unitywebrequest": "1.0.0",
"com.unity.modules.unitywebrequestassetbundle": "1.0.0",
"com.unity.modules.unitywebrequestaudio": "1.0.0",
"com.unity.modules.unitywebrequesttexture": "1.0.0",
"com.unity.modules.unitywebrequestwww": "1.0.0",
"com.unity.modules.vehicles": "1.0.0",
"com.unity.modules.video": "1.0.0",
"com.unity.modules.vr": "1.0.0",
"com.unity.modules.wind": "1.0.0",
"com.unity.modules.xr": "1.0.0"
"com.unity.testing.srp.core": "file:../../../com.unity.testing.srp.core"
"com.unity.render-pipelines.core"
"com.unity.render-pipelines.core",
"com.unity.testing.srp.core"
]
}

3
TestProjects/HDRP_Tests/ProjectSettings/EditorBuildSettings.asset


path: Assets/GraphicTests/Scenes/2x_Lighting/2005_Area_Light_Meshes.unity
guid: 359ded33a047fd540b5e19a98547f5e2
- enabled: 1
path: Assets/GraphicTests/Scenes/2x_Lighting/2102_GI_Emission.unity
guid: b8f49ca9a12abb4468116e9422139e63
- enabled: 1
path: Assets/GraphicTests/Scenes/2x_Lighting/2201_ReflectionProbes_Priority.unity
guid: d485acf0535eb4b42a9a3847f4a8274a
- enabled: 1

2
TestProjects/LWGraphicsTest/Packages/manifest.json


"com.unity.render-pipelines.lightweight": "file:../../../com.unity.render-pipelines.lightweight",
"com.unity.shadergraph": "file:../../../com.unity.shadergraph",
"com.unity.testframework.graphics": "file:../../../com.unity.testframework.graphics",
"com.unity.testing.srp.lightweight": "file:../../../com.unity.testing.srp.lightweight",
"com.unity.testing.srp.lightweight",
"com.unity.testframework.graphics"
]
}

119
TestProjects/LWGraphicsTest/ProjectSettings/EditorBuildSettings.asset


serializedVersion: 2
m_Scenes:
- enabled: 1
path: Assets/Scenes/001_SimpleCube.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/001_SimpleCube.unity
path: Assets/Scenes/005_LitBakedEmission.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/002_Camera_Clip.unity
guid: 4be459bb4af62441893ba57ce669de7f
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/003_Camera_Ortho.unity
guid: f8c4973533e7b4ca58d3b2255e7384d6
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/004_Camera_TargetTexture.unity
guid: c678e78e9c06d44789d69ac8df08d375
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/005_LitBakedEmission.unity
path: Assets/Scenes/006_LitShaderLightProbes.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/006_LitShaderLightProbes.unity
path: Assets/Scenes/007_LitShaderMaps.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/007_LitShaderMaps.unity
path: Assets/Scenes/011_UnlitSprites.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/011_UnlitSprites.unity
path: Assets/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/012_PBS_EnvironmentBRDF_Spheres.unity
path: Assets/Scenes/016_Lighting_Scene_Directional.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/013_CameraMulti_Splitscreen.unity
guid: 061267518b02f411c9c75f6b52f6d333
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/014_CameraMulti_MiniMap.unity
guid: 9225c2f5b8cf84f6bbe7cd83a8c22426
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/015_CameraMulti_FPSCam.unity
guid: 9d2d9ca35103040838283b3c356c0250
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/016_Lighting_Scene_Directional.unity
path: Assets/Scenes/017_Lighting_Scene_DirectionalBaked.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/017_Lighting_Scene_DirectionalBaked.unity
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/019_Lighting_Scene_PointLights.unity
guid: c44973f9bcb9e469ea540fc8b0f0324a
path: Assets/Scenes/018_Lighting_Scene_DirectionalBakedIndirect.unity
guid: 24ef026ffe44a4b21a6bf820a8488974
- enabled: 1
path: Assets/Scenes/020_Lighting_BasicDirectional.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/020_Lighting_BasicDirectional.unity
path: Assets/Scenes/021_Lighting_BasicPoint.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/021_Lighting_BasicPoint.unity
path: Assets/Scenes/022_Lighting_BasicSpot.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/022_Lighting_BasicSpot.unity
- enabled: 1
path: Assets/Scenes/023_Lighting_Mixed.unity
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/023_Lighting_Mixed_Indirect.unity
path: Assets/Scenes/026_Shader_PBRscene.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/026_Shader_PBRscene.unity
path: Assets/Scenes/035_Shader_TerrainShaders.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/024_Shader_PBRvalidation_Specular.unity
guid: 2b6b74b7929ae4191b3b936c12236e0a
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/025_Shader_PBRvalidation_Metallic.unity
guid: a770620fcd2f84f248d06edf3744b2c0
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/027_PostProcessing.unity
guid: 3ca3376bc2cd549fa9a8b28d5f8acc97
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/028_PostProcessing_Custom.unity
guid: e563e287471b641a88f8cfdb13746b1a
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/029_Particles.unity
guid: 65d99b408a8134214906126ccdea532f
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/031_Shader_GlossyEnvironmentSky.unity
guid: 2d287b701f8e1bf429c14805ac505eb9
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/032_Shader_GlossyEnvironmentColor.unity
guid: 375d3d0d24cdfe14aa69d3d595acb1c3
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/033_Shader_HighlightsEnvironmentGradientSH.unity
guid: ff592545ad5dd744893d54a73f85b2cc
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/034_Shader_HighlightsEnvironmentGradientBaked.unity
guid: e346f8573410f414ea2a8a4740526c88
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/035_Shader_TerrainShaders.unity
path: Assets/Scenes/045_CustomLWPipe.unity
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity
guid: 7a8ef7ce062f0465b9e493dd5730cecb
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/037_Particles_Standard.unity
guid: 67bc76c89479b264aa4a5c176433ec3a
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/040_UpgradeScene.unity
guid: 5b50bbd7a4d4f4f49b5a50ebed1ba42c
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/041_Lighting_BasicArea.unity
guid: cd37c7e9d54e74dd7add9b40a2698ada
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/042_Lighting_Scene_VertexLighting.unity
guid: 700788280c77e4c63a4d1919a8bf97e3
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/043_Lighting_Mixed_ShadowMask.unity
guid: a27d5eebd041b42dab7ad89ce8249e81
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/044_Lighting_ReflectionProbe.unity
guid: 0feef353efdf045989e9c2b501774338
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/045_CustomLWPipe.unity
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/046_Camera_OpaqueTexture.unity
guid: 2614aae0a36c9456b9e20ac481fbda4c
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/047_Lighting_DirectionalCascades.unity
guid: 4e49334e530744e0eaeaa5e3a7e874ca
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/048_Lighting_LocalShadows.unity
guid: 099c78bd4d7e8453b9dbb217cf06cac2
- enabled: 0
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/049_Lighting_Mixed_Subtractive.unity
guid: ffaad23a2c7fc41b48e4e7ab7e9273e4
- enabled: 1
path: Packages/com.unity.testing.srp.lightweight/Tests/Scenes/050_Shader_Graphs.unity
guid: 6e7a83866a1e446b3becf7972371c5d0
m_configObjects: {}

8
TestProjects/LWGraphicsTest/ProjectSettings/GraphicsSettings.asset


- {fileID: 15106, guid: 0000000000000000f000000000000000, type: 0}
- {fileID: 10753, guid: 0000000000000000f000000000000000, type: 0}
- {fileID: 10770, guid: 0000000000000000f000000000000000, type: 0}
- {fileID: 16000, guid: 0000000000000000f000000000000000, type: 0}
- {fileID: 17000, guid: 0000000000000000f000000000000000, type: 0}
m_SpritesDefaultMaterial: {fileID: 10754, guid: 0000000000000000f000000000000000,
type: 0}
m_CustomRenderPipeline: {fileID: 11400000, guid: e6987eea1dd29074597d54ed91a54a26,
type: 2}
m_SpritesDefaultMaterial: {fileID: 10754, guid: 0000000000000000f000000000000000, type: 0}
m_CustomRenderPipeline: {fileID: 11400000, guid: e6987eea1dd29074597d54ed91a54a26, type: 2}
m_TransparencySortMode: 0
m_TransparencySortAxis: {x: 0, y: 0, z: 1}
m_DefaultRenderingPath: 1

97
TestProjects/LWGraphicsTest/ProjectSettings/ProjectSettings.asset


use32BitDisplayBuffer: 1
preserveFramebufferAlpha: 0
disableDepthAndStencilBuffers: 0
androidStartInFullscreen: 1
androidBlitType: 0
defaultIsNativeResolution: 1
macRetinaSupport: 1

xboxEnableGuest: 0
xboxEnablePIXSampling: 0
metalFramebufferOnly: 0
n3dsDisableStereoscopicView: 0
n3dsEnableSharedListOpt: 1
n3dsEnableVSync: 0
xboxOneResolution: 0
xboxOneSResolution: 0
xboxOneXResolution: 3

xboxOnePresentImmediateThreshold: 0
switchQueueCommandMemory: 0
videoMemoryForVertexBuffers: 0
psp2PowerMode: 0
psp2AcquireBGM: 1
vulkanUseSWCommandBuffers: 1
m_SupportedAspectRatios:
4:3: 1
5:4: 1

dashSupport: 0
enable360StereoCapture: 0
protectGraphicsMemory: 0
enableFrameTimingStats: 0
useHDRDisplay: 0
m_ColorGamuts: 00000000
targetPixelDensity: 30

appleEnableAutomaticSigning: 0
iOSRequireARKit: 0
appleEnableProMotion: 0
vulkanEditorSupport: 0
clonedFromGUID: 00000000000000000000000000000000
templatePackageId:
templateDefaultScene:

ps4pnGameCustomData: 1
playerPrefsSupport: 0
enableApplicationExit: 0
resetTempFolder: 1
restrictedAudioUsageRights: 0
ps4UseResolutionFallback: 0
ps4ReprojectionSupport: 0

ps4attribEyeToEyeDistanceSettingVR: 0
ps4IncludedModules: []
monoEnv:
psp2Splashimage: {fileID: 0}
psp2NPTrophyPackPath:
psp2NPSupportGBMorGJP: 0
psp2NPAgeRating: 12
psp2NPTitleDatPath:
psp2NPCommsID:
psp2NPCommunicationsID:
psp2NPCommsPassphrase:
psp2NPCommsSig:
psp2ParamSfxPath:
psp2ManualPath:
psp2LiveAreaGatePath:
psp2LiveAreaBackroundPath:
psp2LiveAreaPath:
psp2LiveAreaTrialPath:
psp2PatchChangeInfoPath:
psp2PatchOriginalPackage:
psp2PackagePassword: F69AzBlax3CF3EDNhm3soLBPh71Yexui
psp2KeystoneFile:
psp2MemoryExpansionMode: 0
psp2DRMType: 0
psp2StorageType: 0
psp2MediaCapacity: 0
psp2DLCConfigPath:
psp2ThumbnailPath:
psp2BackgroundPath:
psp2SoundPath:
psp2TrophyCommId:
psp2TrophyPackagePath:
psp2PackagedResourcesPath:
psp2SaveDataQuota: 10240
psp2ParentalLevel: 1
psp2ShortTitle: Not Set
psp2ContentID: IV0000-ABCD12345_00-0123456789ABCDEF
psp2Category: 0
psp2MasterVersion: 01.00
psp2AppVersion: 01.00
psp2TVBootMode: 0
psp2EnterButtonAssignment: 2
psp2TVDisableEmu: 0
psp2AllowTwitterDialog: 1
psp2Upgradable: 0
psp2HealthWarning: 0
psp2UseLibLocation: 0
psp2InfoBarOnStartup: 0
psp2InfoBarColor: 0
psp2ScriptOptimizationLevel: 2
splashScreenBackgroundSourceLandscape: {fileID: 0}
splashScreenBackgroundSourcePortrait: {fileID: 0}
spritePackerPolicy:

webGLUseEmbeddedResources: 0
webGLCompressionFormat: 1
webGLLinkerTarget: 0
webGLThreadsSupport: 0
14: UNITY_POST_PROCESSING_STACK_V2
18: UNITY_POST_PROCESSING_STACK_V2
19: UNITY_POST_PROCESSING_STACK_V2
21: UNITY_POST_PROCESSING_STACK_V2

27: UNITY_POST_PROCESSING_STACK_V2
28: UNITY_POST_PROCESSING_STACK_V2
managedStrippingLevel: {}
incrementalIl2cppBuild: {}
allowUnsafeCode: 0
additionalIl2CppArgs:

metroMediumTileShowName: 0
metroLargeTileShowName: 0
metroWideTileShowName: 0
metroSupportStreamingInstall: 0
metroLastRequiredScene: 0
metroSplashScreenBackgroundColor: {r: 0.12941177, g: 0.17254902, b: 0.21568628,
a: 1}
metroSplashScreenBackgroundColor: {r: 0.12941177, g: 0.17254902, b: 0.21568628, a: 1}
metroSplashScreenUseBackgroundColor: 0
platformCapabilities: {}
metroFTAName:

n3dsUseExtSaveData: 0
n3dsCompressStaticMem: 1
n3dsExtSaveDataNumber: 0x12345
n3dsStackSize: 131072
n3dsTargetPlatform: 2
n3dsRegion: 7
n3dsMediaSize: 0
n3dsLogoStyle: 3
n3dsTitle: GameName
n3dsProductCode:
n3dsApplicationId: 0xFF3FF
XboxOneProductId:
XboxOneUpdateKey:
XboxOneSandboxId:

daydreamIconForeground: {fileID: 0}
daydreamIconBackground: {fileID: 0}
cloudServicesEnabled: {}
luminIcon:
m_Name:
m_ModelFolderPath:
m_PortalFolderPath:
luminCert:
m_CertPath:
m_PrivateKeyPath:
luminIsChannelApp: 0
luminVersion:
m_VersionCode: 1
m_VersionName:
facebookAppId:
facebookCookies: 1
facebookLogging: 1
facebookStatus: 1
facebookXfbml: 0
facebookFrictionlessRequests: 1
framebufferDepthMemorylessMode: 0
legacyClampBlendShapeWeights: 1

6
TestProjects/LWGraphicsTest/ProjectSettings/TagManager.asset


-
-
m_SortingLayers:
- name: BG
uniqueID: 3756855275
locked: 0
- name: FG
uniqueID: 116359401
locked: 0

15
TestProjects/LWGraphicsTest/ProjectSettings/UnityConnectSettings.asset


--- !u!310 &1
UnityConnectSettings:
m_ObjectHideFlags: 0
m_Enabled: 0
serializedVersion: 1
m_Enabled: 1
m_TestEventUrl:
m_TestConfigUrl:
m_EventOldUrl: https://api.uca.cloud.unity3d.com/v1/events
m_EventUrl: https://cdp.cloud.unity3d.com/v1/events
m_ConfigUrl: https://config.uca.cloud.unity3d.com
m_EventUrl: https://perf-events.cloud.unity3d.com/api/events/crashes
m_NativeEventUrl: https://perf-events.cloud.unity3d.com/symbolicate
m_EventUrl: https://perf-events.cloud.unity3d.com
m_LogBufferSize: 10
m_CaptureEditorExceptions: 1
UnityPurchasingSettings:
m_Enabled: 0

m_InitializeOnStartup: 1
m_TestEventUrl:
m_TestConfigUrl:
UnityAdsSettings:
m_Enabled: 0
m_InitializeOnStartup: 1

2
TestProjects/PostProcessing/Packages/manifest.json


{
"dependencies": {
"com.unity.postprocessing": "2.0.9-preview",
"com.unity.postprocessing": "2.0.10-preview",
"com.unity.testframework.graphics": "file:../../../com.unity.testframework.graphics",
"com.unity.textmeshpro": "1.2.4"
},

8
build.py


("com.unity.render-pipelines.core", os.path.join("com.unity.render-pipelines.core")),
("com.unity.render-pipelines.lightweight", os.path.join("com.unity.render-pipelines.lightweight")),
("com.unity.render-pipelines.high-definition", os.path.join("com.unity.render-pipelines.high-definition")),
("com.unity.testframework.graphics", os.path.join("com.unity.testframework.graphics"))
("com.unity.testframework.graphics", os.path.join("com.unity.testframework.graphics")),
("com.unity.testing.srp.core", os.path.join("com.unity.testing.srp.core")),
("com.unity.testing.srp.lightweight", os.path.join("com.unity.testing.srp.lightweight"))
"com.unity.render-pipelines.core",
"com.unity.render-pipelines.lightweight",
"com.unity.render-pipelines.high-definition",
"com.unity.shadergraph"
]
if __name__ == "__main__":

4
com.unity.render-pipelines.core/CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [3.3.0]
## [3.2.0]
## [3.1.0]
### Added

46
com.unity.render-pipelines.core/CoreRP/Common/XRGraphicsConfig.cs


using System;
using UnityEditor;
using UnityEngine.XR;
#if UNITY_2017_2_OR_NEWER
using UnityEngine.XR;
using XRSettings = UnityEngine.XR.XRSettings;
#elif UNITY_5_6_OR_NEWER
using UnityEngine.VR;
using XRSettings = UnityEngine.VR.VRSettings;
#endif
namespace UnityEngine.Experimental.Rendering
{

public float occlusionMaskScale;
public bool showDeviceView;
public GameViewRenderMode gameViewRenderMode;
public void SetConfig()
{ // If XR is enabled, sets XRSettings from our saved config
if (!enabled)

return false;
#endif
}
}
}
#if UNITY_EDITOR
// FIXME: We should probably have StereoREnderingPath defined in UnityEngine.XR, not UnityEditor...
public static StereoRenderingPath stereoRenderingMode
{
get
{
if (!enabled)
Assert.IsFalse(enabled);
#if UNITY_2018_3_OR_NEWER
return (StereoRenderingPath)XRSettings.stereoRenderingMode;
#else
if (eyeTextureDesc.vrUsage == VRTextureUsage.TwoEyes)
return StereoRenderingPath.SinglePass;
else if (eyeTextureDesc.dimension == UnityEngine.Rendering.TextureDimension.Tex2DArray)
return StereoRenderingPath.Instancing;
else
return StereoRenderingPath.MultiPass;
#endif
}
}
#endif
public static RenderTextureDescriptor eyeTextureDesc
{
get

}
}
public static string[] supportedDevices
public static int eyeTextureWidth
{
get
{
if (!enabled)
Assert.IsFalse(enabled);
return XRSettings.eyeTextureWidth;
}
}
public static int eyeTextureHeight
return XRSettings.supportedDevices;
return XRSettings.eyeTextureHeight;
}
}
}

225
com.unity.render-pipelines.core/CoreRP/Debugging/Prefabs/Resources/DebugUI Canvas.prefab


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!1001 &100100000
Prefab:
m_ObjectHideFlags: 1
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications: []
m_RemovedComponents: []
m_ParentPrefab: {fileID: 0}
m_RootGameObject: {fileID: 1153602445894428}
m_IsPrefabParent: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
serializedVersion: 5
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 224711363741255626}
- component: {fileID: 223912878945851142}

m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &114530362809716058
--- !u!224 &224711363741255626
RectTransform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1153602445894428}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0, y: 0}
--- !u!223 &223912878945851142
Canvas:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1153602445894428}
m_Enabled: 1
serializedVersion: 3
m_RenderMode: 0
m_Camera: {fileID: 0}
m_PlaneDistance: 100
m_PixelPerfect: 1
m_ReceivesEvents: 1
m_OverrideSorting: 0
m_OverridePixelPerfect: 0
m_SortingBucketNormalizedSize: 0
m_AdditionalShaderChannelsFlag: 0
m_SortingLayerID: 0
m_SortingOrder: 0
m_TargetDisplay: 0
--- !u!114 &114908889885781782
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Script: {fileID: 11500000, guid: 76db615e524a19c4990482d75a475543, type: 3}
m_Script: {fileID: 1980459831, guid: f70555f144d8491a825f0804e09c671c, type: 3}
panelPrefab: {fileID: 224481716535368988, guid: daa46a58178a6ad41ae1ddc2dc7f856d,
type: 2}
prefabs:
- type: UnityEngine.Experimental.Rendering.DebugUI+Value, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: dc0f88987826e6e48b1fe9c7c2b53a53, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+BoolField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224131888606727344, guid: ce347ad101f41ee4ab5c3fbc0ea447db, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+IntField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: ae00bb75e0cd5b04b8fe7fb4ab662629, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+UIntField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: f22bcc84a5f4a1944b075a2c4ac71493, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+FloatField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: 7d4fd3415ea7dd64bbcfe13fb48a730b, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+EnumField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224224135738715566, guid: 988db55689193434fb0b3b89538f978f, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Button, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224438017010656346, guid: f6ce33b91f6ffe54cadacbf4bb112440, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Foldout, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224053494956566916, guid: 1c87ab2ce8b8b304d98fbe9a734b1f74, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+ColorField, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224636372931965878, guid: 77c185820dd1a464eac89cae3abccddf, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector2Field, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224169904409585018, guid: 326f7c58aed965d41bf7805a782d1e44, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector3Field, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224119945032119512, guid: 94afea5f242d72547979595ba963f335, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector4Field, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224325631027038092, guid: d47f009476100f545971a81ede14c750, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+VBox, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224489511352681190, guid: ca3e294656861a64b8aeeb9f916da0a9, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+HBox, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224719784157228276, guid: f7f5e36797cf0c1408561665c67b179b, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Container, com.unity.render-pipelines.core.Runtime,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224284813447651300, guid: 38a07789c9e87004dad98c2909f58369, type: 2}
m_UiScaleMode: 0
m_ReferencePixelsPerUnit: 100
m_ScaleFactor: 1
m_ReferenceResolution: {x: 800, y: 600}
m_ScreenMatchMode: 0
m_MatchWidthOrHeight: 0
m_PhysicalUnit: 3
m_FallbackScreenDPI: 96
m_DefaultSpriteDPI: 96
m_DynamicPixelsPerUnit: 1
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1153602445894428}
m_Enabled: 1
m_EditorHideFlags: 0

m_BlockingMask:
serializedVersion: 2
m_Bits: 4294967295
--- !u!114 &114908889885781782
--- !u!114 &114530362809716058
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Script: {fileID: 1980459831, guid: f70555f144d8491a825f0804e09c671c, type: 3}
m_Script: {fileID: 11500000, guid: 76db615e524a19c4990482d75a475543, type: 3}
m_UiScaleMode: 0
m_ReferencePixelsPerUnit: 100
m_ScaleFactor: 1
m_ReferenceResolution: {x: 800, y: 600}
m_ScreenMatchMode: 0
m_MatchWidthOrHeight: 0
m_PhysicalUnit: 3
m_FallbackScreenDPI: 96
m_DefaultSpriteDPI: 96
m_DynamicPixelsPerUnit: 1
--- !u!223 &223912878945851142
Canvas:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1153602445894428}
m_Enabled: 1
serializedVersion: 3
m_RenderMode: 0
m_Camera: {fileID: 0}
m_PlaneDistance: 100
m_PixelPerfect: 1
m_ReceivesEvents: 1
m_OverrideSorting: 0
m_OverridePixelPerfect: 0
m_SortingBucketNormalizedSize: 0
m_AdditionalShaderChannelsFlag: 0
m_SortingLayerID: 0
m_SortingOrder: 0
m_TargetDisplay: 0
--- !u!224 &224711363741255626
RectTransform:
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 100100000}
m_GameObject: {fileID: 1153602445894428}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0, y: 0}
panelPrefab: {fileID: 224481716535368988, guid: daa46a58178a6ad41ae1ddc2dc7f856d, type: 2}
prefabs:
- type: UnityEngine.Experimental.Rendering.DebugUI+Value, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: dc0f88987826e6e48b1fe9c7c2b53a53, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+BoolField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224131888606727344, guid: ce347ad101f41ee4ab5c3fbc0ea447db, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+IntField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: ae00bb75e0cd5b04b8fe7fb4ab662629, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+UIntField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: f22bcc84a5f4a1944b075a2c4ac71493, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+FloatField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224720214277421396, guid: 7d4fd3415ea7dd64bbcfe13fb48a730b, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+EnumField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224224135738715566, guid: 988db55689193434fb0b3b89538f978f, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Button, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224438017010656346, guid: f6ce33b91f6ffe54cadacbf4bb112440, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Foldout, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224053494956566916, guid: 1c87ab2ce8b8b304d98fbe9a734b1f74, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+ColorField, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224636372931965878, guid: 77c185820dd1a464eac89cae3abccddf, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector2Field, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224169904409585018, guid: 326f7c58aed965d41bf7805a782d1e44, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector3Field, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224119945032119512, guid: 94afea5f242d72547979595ba963f335, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Vector4Field, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224325631027038092, guid: d47f009476100f545971a81ede14c750, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+VBox, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224489511352681190, guid: ca3e294656861a64b8aeeb9f916da0a9, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+HBox, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224719784157228276, guid: f7f5e36797cf0c1408561665c67b179b, type: 2}
- type: UnityEngine.Experimental.Rendering.DebugUI+Container, Unity.RenderPipelines.Core.Runtime, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
prefab: {fileID: 224284813447651300, guid: 38a07789c9e87004dad98c2909f58369, type: 2}

2
com.unity.render-pipelines.core/CoreRP/Editor/CameraEditorUtils.cs


return;
}
GL.sRGBWrite = QualitySettings.activeColorSpace == ColorSpace.Linear;
GL.sRGBWrite = false;
Graphics.DrawTexture(cameraRect, previewCamera.targetTexture, new Rect(0, 0, 1, 1), 0, 0, 0, 0, GUI.color, GUITextureBlit2SRGBMaterial);
}
}

6
com.unity.render-pipelines.core/CoreRP/Editor/TextureCombiner/TextureCombiner.cs


return tex;
}
public static TextureFormat[] TextureFormatsWithouthAlpha = new TextureFormat[] {
private static TextureFormat[] TextureFormatsWithouthAlpha = {
TextureFormat.ASTC_RGB_10x10 ,
TextureFormat.ASTC_RGB_12x12 ,
TextureFormat.ASTC_RGB_4x4 ,

TextureFormat.EAC_RG_SIGNED ,
TextureFormat.ETC2_RGB ,
TextureFormat.ETC_RGB4 ,
#if !UNITY_2018_3_OR_NEWER
#endif
TextureFormat.ETC_RGB4Crunched ,
TextureFormat.PVRTC_RGB2 ,
TextureFormat.PVRTC_RGB4 ,

private int m_aChanel;
// Chanels remaping
private Vector4[] m_remapings = new Vector4[] {
private Vector4[] m_remapings = {
new Vector4(0f, 1f, 0f, 0f),
new Vector4(0f, 1f, 0f, 0f),
new Vector4(0f, 1f, 0f, 0f),

34
com.unity.render-pipelines.core/CoreRP/ShaderLibrary/CommonLighting.hlsl


return area;
}
// ref: Practical Realtime Strategies for Accurate Indirect Occlusion
// http://blog.selfshadow.com/publications/s2016-shading-course/#course_content
// Original Cone-Cone method with cosine weighted assumption (p129 s2016_pbs_activision_occlusion)
real GetSpecularOcclusionFromBentAO(real3 V, real3 bentNormalWS, real3 normalWS, real ambientOcclusion, real roughness)
{
// Retrieve cone angle
// Ambient occlusion is cosine weighted, thus use following equation. See slide 129
real cosAv = sqrt(1.0 - ambientOcclusion);
roughness = max(roughness, 0.01); // Clamp to 0.01 to avoid edge cases
real cosAs = exp2((-log(10.0) / log(2.0)) * Sq(roughness));
real cosB = dot(bentNormalWS, reflect(-V, normalWS));
return SphericalCapIntersectionSolidArea(cosAv, cosAs, cosB) / (TWO_PI * (1.0 - cosAs));
}
// Ref: Steve McAuley - Energy-Conserving Wrapped Diffuse
real ComputeWrappedDiffuseLighting(real NdotL, real w)
{

real3 localY = cross(localZ, localX);
return real3x3(localX, localY, localZ);
}
// Construct a right-handed view-dependent orthogonal basis around the normal:
// b0-b2 is the view-normal aka reflection plane.
real3x3 GetOrthoBasisViewNormal(real3 V, real3 N, real unclampedNdotV, bool testSingularity = false)
{
real3x3 orthoBasisViewNormal;
if (testSingularity && (abs(1.0 - unclampedNdotV) <= FLT_EPS))
{
// In this case N == V, and azimuth orientation around N shouldn't matter for the caller,
// we can use any quaternion-based method, like Frisvad or Reynold's (Pixar):
orthoBasisViewNormal = GetLocalFrame(N);
}
else
{
orthoBasisViewNormal[0] = normalize(V - N * unclampedNdotV);
orthoBasisViewNormal[2] = N;
orthoBasisViewNormal[1] = cross(orthoBasisViewNormal[2], orthoBasisViewNormal[0]);
}
return orthoBasisViewNormal;
}
#endif // UNITY_COMMON_LIGHTING_INCLUDED

18
com.unity.render-pipelines.core/CoreRP/ShaderLibrary/CommonMaterial.hlsl


void ConvertRoughnessToAnisotropy(real roughnessT, real roughnessB, out real anisotropy)
{
anisotropy = ((roughnessT - roughnessB) / (roughnessT + roughnessB + 0.0001));
anisotropy = ((roughnessT - roughnessB) / max(roughnessT + roughnessB, 0.0001));
}
// Same as ConvertAnisotropyToRoughness but

return sqrt(2.0 / (variance + 2.0));
}
// Normal Map Filtering - This must match HDRP\Editor\AssetProcessors\NormalMapFilteringTexturePostprocessor.cs - highestVarianceAllowed (TODO: Move in core)
#define NORMALMAP_HIGHEST_VARIANCE 0.03125
float DecodeVariance(float gradientW)
{
return gradientW * NORMALMAP_HIGHEST_VARIANCE;
}
// Return modified perceptualSmoothness based on provided variance (get from GeometricNormalVariance + TextureNormalVariance)
float NormalFiltering(float perceptualSmoothness, float variance, float threshold)
{

return 1.0 - RoughnessToPerceptualRoughness(sqrt(squaredRoughness));
return RoughnessToPerceptualSmoothness(sqrt(squaredRoughness));
}
// Reference: Error Reduction and Simplification for Shading Anti-Aliasing

// like Toksvig.
float TextureNormalVariance(float avgNormalLength)
{
float variance = 0.0;
if (avgNormalLength < 1.0)
{
float avgNormLen2 = avgNormalLength * avgNormalLength;

// Relationship between gaussian lobe and vMF lobe is 2 * variance = 1 / (2 * kappa) = roughness^2
// (Equation 36 of Normal map filtering based on The Order : 1886 SIGGRAPH course notes implementation).
// So to get variance we must use variance = 1 / (4 * kappa)
return 0.25 * kappa;
variance = 0.25 / kappa;
return 0.0;
return variance;
}
float TextureNormalFiltering(float perceptualSmoothness, float avgNormalLength, float threshold)

1
com.unity.render-pipelines.core/CoreRP/Shadow/ShadowBase.cs


public int shadowAtlasWidth = kDefaultShadowAtlasSize;
public int shadowAtlasHeight = kDefaultShadowAtlasSize;
public bool shadowMap16Bit;
public int maxPointLightShadows = kDefaultMaxPointLightShadows;
public int maxSpotLightShadows = kDefaultMaxSpotLightShadows;

6
com.unity.render-pipelines.core/CoreRP/Textures/TextureCache.cs


bool TextureHasMipmaps(Texture texture)
{
var crt = texture as CustomRenderTexture;
else if (crt is CustomRenderTexture)
return ((CustomRenderTexture)texture).useMipMap;
else if (texture is RenderTexture)
return ((RenderTexture)texture).useMipMap;
return false;
}

9
com.unity.render-pipelines.core/CoreRP/Utilities/CoreUtils.cs


public static void DisplayUnsupportedAPIMessage()
{
string msg = "Platform " + SystemInfo.operatingSystem + " with device " + SystemInfo.graphicsDeviceType.ToString() + " is not supported, no rendering will occur";
// If we are in the editor they are many possible targets that does not matches the current OS so we use the active build target instead
#if UNITY_EDITOR
string currentPlatform = UnityEditor.EditorUserBuildSettings.activeBuildTarget.ToString();
#else
string currentPlatform = SystemInfo.operatingSystem;
#endif
string msg = "Platform " + currentPlatform + " with device " + SystemInfo.graphicsDeviceType.ToString() + " is not supported, no rendering will occur";
DisplayUnsupportedMessage(msg);
}

17
com.unity.render-pipelines.core/CoreRP/Utilities/GeometryUtils.cs


1.0f,
1.0f);
var q = inversion * cps;
var c = clipPlane * (2.0f / Vector4.Dot(clipPlane, q));
Vector4 M4 = new Vector4(projection[3], projection[7], projection[11], projection[15]);
projection[2] = c.x - projection[3];
projection[6] = c.y - projection[7];
projection[10] = c.z - projection[11];
projection[14] = c.w - projection[15];
var c = clipPlane * ((2.0f*Vector4.Dot(M4, q)) / Vector4.Dot(clipPlane, q));
projection[2] = c.x - M4.x;
projection[6] = c.y - M4.y;
projection[10] = c.z - M4.z;
projection[14] = c.w - M4.w;
return projection;
}

public static Matrix4x4 GetProjectionMatrixLHS(this Camera camera)
{
return camera.projectionMatrix * FlipMatrixLHSRHS;
}
public static bool IsProjectionMatrixOblique(Matrix4x4 projectionMatrix)
{
return projectionMatrix[2] != 0 || projectionMatrix[6] != 0;
}
public static Matrix4x4 CalculateProjectionMatrix(Camera camera)

2
com.unity.render-pipelines.core/package.json


{
"name": "com.unity.render-pipelines.core",
"description": "Core library for Unity render pipelines.",
"version": "3.1.0-preview",
"version": "3.3.0-preview",
"unity": "2018.3",
"displayName": "Render Pipeline Core Library"
}

46
com.unity.render-pipelines.high-definition/CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [3.3.0-preview]
### Added
- Added an error message to say to use Metal or Vulkan when trying to use OpenGL API
### Fixed
- Fix an issue where the screen where darken when rendering camera preview
- Fix display correct target platform when showing message to inform user that a platform is not supported
- Remove workaround for metal and vulkan in normal buffer encoding/decoding
- Fixed an issue with color picker not working in forward
### Changed
- Changed default reflection probe to be 256x256x6 and array size to be 64
- Removed dependence on the NdotL for thickness evaluation for translucency (based on artist's input)
### Added
- Added a luminance meter in the debug menu
- Added support of Light, reflection probe, emissive material, volume settings related to lighting to Lighting explorer
- Added support for 16bit shadows
- Fix issue with package upgrading (HDRP resources asset is now versionned to worarkound package manager limitation)
- Fix lux meter mode - The lux meter isn't affected by the sky anymore
- Fix area light size reset when multi-selected
- Fix filter pass number in HDUtils.BlitQuad
- Fix Lux meter mode that was applying SSS
- Fix planar reflections that were not working with tile/cluster (olbique matrix)
- Fix debug menu at runtime not working after nested prefab PR come to trunk
- Fix scrolling issue in density volume
### Changed
- Shader code refactor: Split MaterialUtilities file in two parts BuiltinUtilities (independent of FragInputs) and MaterialUtilities (Dependent of FragInputs)
- Change screen space shadow rendertarget format from ARGB32 to RG16
## [3.1.0-preview]

- Split EmissiveColor and bakeDiffuseLighting in forward avoiding the emissiveColor to be affect by SSAO
- Added a volume to control indirect light intensity
- Added EV 100 intensity unit for area lights
- Added support for RendererPriority on Renderer. This allow to control order of transparent rendering manually. HDRP have now two stage of sorting for transparent in addition to bact to front. Material have a priority then Renderer have a priority.
- Add Coupling of (HD)Camera and HDAdditionalCameraData for reset and remove in inspector contextual menu of Camera
- Add Coupling of (HD)ReflectionProbe and HDAdditionalReflectionData for reset and remove in inspector contextual menu of ReflectoinProbe
- Add macro to forbid unity_ObjectToWorld/unity_WorldToObject to be use as it doesn't handle camera relative rendering
- Add opacity control on contact shadow
### Fixed
- Fixed an issue with PreIntegratedFGD texture being sometimes destroyed and not regenerated causing rendering to break

- Replace the sampler used for density volumes for correct wrap mode handling
### Changed
- Movde Render Pipeline Debug "Windows from Windows->General-> Render Pipeline debug windows" to "Windows from Windows->Analysis-> Render Pipeline debug windows"
- Move Render Pipeline Debug "Windows from Windows->General-> Render Pipeline debug windows" to "Windows from Windows->Analysis-> Render Pipeline debug windows"
- Update detail map formula for smoothness and albedo, goal it to bright and dark perceptually and scale factor is use to control gradient speed
- Refactor the Upgrade material system. Now a material can be update from older version at any time. Call Edit/Render Pipeline/Upgrade all Materials to newer version
- Change name EnableDBuffer to EnableDecals at several place (shader, hdrp asset...), this require a call to Edit/Render Pipeline/Upgrade all Materials to newer version to have up to date material.

- Refactor shader code: GetBakedDiffuseLighting is not call anymore in GBuffer or forward pass, including the ConvertSurfaceDataToBSDFData and GetPreLightData, this is done in ModifyBakedDiffuseLighting now
- Refactor shader code: Added a backBakeDiffuseLighting to BuiltinData to handle lighting for transmission
- Refactor shader code: Material must now call InitBuiltinData (Init all to zero + init bakeDiffuseLighting and backBakeDiffuseLighting ) and PostInitBuiltinData
### Added
- Added support for RendererPriority on Renderer. This allow to control order of transparent rendering manually. HDRP have now two stage of sorting for transparent in addition to bact to front. Material have a priority then Renderer have a priority.
- Add Coupling of (HD)Camera and HDAdditionalCameraData for reset and remove in inspector contextual menu of Camera
- Add Coupling of (HD)ReflectionProbe and HDAdditionalReflectionData for reset and remove in inspector contextual menu of ReflectoinProbe
- Add macro to forbid unity_ObjectToWorld/unity_WorldToObject to be use as it doesn't handle camera relative rendering
- Add opacity control on contact shadow
## [3.0.0-preview]

9
com.unity.render-pipelines.high-definition/HDRP/Camera/HDCamera.cs


using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.Rendering.PostProcessing;
using UnityEngine.XR;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

m_ActualHeight = camera.pixelHeight;
var screenWidth = m_ActualWidth;
var screenHeight = m_ActualHeight;
#if !UNITY_SWITCH
screenWidth = XRSettings.eyeTextureWidth;
screenHeight = XRSettings.eyeTextureHeight;
screenWidth = XRGraphicsConfig.eyeTextureWidth;
screenHeight = XRGraphicsConfig.eyeTextureHeight;
var xrDesc = XRSettings.eyeTextureDesc;
var xrDesc = XRGraphicsConfig.eyeTextureDesc;
#endif
// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;

5
com.unity.render-pipelines.high-definition/HDRP/Debug/DebugColorPicker.shader


//Decompress value if luxMeter is active
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER && _ColorPickerMode != COLORPICKERDEBUGMODE_NONE)
result.rgb = result.rgb * LUXMETER_COMPRESSION_RATIO;
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUMINANCE_METER)
{
result = Luminance(result.rgb);
}
if (_FalseColor)
result.rgb = FasleColorRemap(Luminance(result.rgb), _FalseColorThresholds);

2
com.unity.render-pipelines.high-definition/HDRP/Debug/DebugDisplay.cs


// Lighting
MinLightingFullScreenDebug,
SSAO,
DeferredShadows,
ScreenSpaceShadows,
PreRefractionColorPyramid,
DepthPyramid,
FinalColorPyramid,

1
com.unity.render-pipelines.high-definition/HDRP/Debug/LightingDebug.cs


DiffuseLighting,
SpecularLighting,
LuxMeter,
LuminanceMeter,
VisualizeCascade,
VisualizeShadowMasks,
IndirectDiffuseOcclusion,

13
com.unity.render-pipelines.high-definition/HDRP/Debug/LightingDebug.cs.hlsl


#define DEBUGLIGHTINGMODE_DIFFUSE_LIGHTING (1)
#define DEBUGLIGHTINGMODE_SPECULAR_LIGHTING (2)
#define DEBUGLIGHTINGMODE_LUX_METER (3)
#define DEBUGLIGHTINGMODE_VISUALIZE_CASCADE (4)
#define DEBUGLIGHTINGMODE_VISUALIZE_SHADOW_MASKS (5)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION (6)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION (7)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION (8)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION (9)
#define DEBUGLIGHTINGMODE_LUMINANCE_METER (4)
#define DEBUGLIGHTINGMODE_VISUALIZE_CASCADE (5)
#define DEBUGLIGHTINGMODE_VISUALIZE_SHADOW_MASKS (6)
#define DEBUGLIGHTINGMODE_INDIRECT_DIFFUSE_OCCLUSION (7)
#define DEBUGLIGHTINGMODE_INDIRECT_SPECULAR_OCCLUSION (8)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION (9)
#define DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFLECTION (10)
//
// UnityEngine.Experimental.Rendering.HDPipeline.DebugScreenSpaceTracing: static fields

2
com.unity.render-pipelines.high-definition/HDRP/Editor/AssetProcessors/NormalMapFilteringTexturePostprocessor.cs.meta


fileFormatVersion: 2
guid: 3ceae5765b6bbee4fb4f76acdaae9130
guid: db58d0243609fae48a1a8f3460bdbd4c
MonoImporter:
externalObjects: {}
serializedVersion: 2

30
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/HDLightEditor.cs


case LightShape.Rectangle:
// TODO: Currently if we use Area type as it is offline light in legacy, the light will not exist at runtime
//m_BaseData.type.enumValueIndex = (int)LightType.Area;
//m_BaseData.type.enumValueIndex = (int)LightType.Rectangle;
EditorGUI.BeginChangeCheck();
m_AdditionalLightData.shapeWidth.floatValue = Mathf.Max(m_AdditionalLightData.shapeWidth.floatValue, k_MinAreaWidth);
m_AdditionalLightData.shapeHeight.floatValue = Mathf.Max(m_AdditionalLightData.shapeHeight.floatValue, k_MinAreaWidth);
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.areaSizeY.floatValue = m_AdditionalLightData.shapeHeight.floatValue;
if (EditorGUI.EndChangeCheck())
{
m_AdditionalLightData.shapeWidth.floatValue = Mathf.Max(m_AdditionalLightData.shapeWidth.floatValue, k_MinAreaWidth);
m_AdditionalLightData.shapeHeight.floatValue = Mathf.Max(m_AdditionalLightData.shapeHeight.floatValue, k_MinAreaWidth);
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.areaSizeY.floatValue = m_AdditionalLightData.shapeHeight.floatValue;
}
if (settings.isRealtime)
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
break;

//m_BaseData.type.enumValueIndex = (int)LightType.Area;
//m_BaseData.type.enumValueIndex = (int)LightType.Rectangle;
EditorGUI.BeginChangeCheck();
m_AdditionalLightData.shapeWidth.floatValue = Mathf.Max(m_AdditionalLightData.shapeWidth.floatValue, k_MinAreaWidth);
m_AdditionalLightData.shapeHeight.floatValue = Mathf.Max(m_AdditionalLightData.shapeHeight.floatValue, k_MinAreaWidth);
// Fake line with a small rectangle in vanilla unity for GI
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.areaSizeY.floatValue = k_MinAreaWidth;
if (EditorGUI.EndChangeCheck())
{
m_AdditionalLightData.shapeWidth.floatValue = Mathf.Max(m_AdditionalLightData.shapeWidth.floatValue, k_MinAreaWidth);
m_AdditionalLightData.shapeHeight.floatValue = Mathf.Max(m_AdditionalLightData.shapeHeight.floatValue, k_MinAreaWidth);
// Fake line with a small rectangle in vanilla unity for GI
settings.areaSizeX.floatValue = m_AdditionalLightData.shapeWidth.floatValue;
settings.areaSizeY.floatValue = k_MinAreaWidth;
}
settings.shadowsType.enumValueIndex = (int)LightShadows.None;
break;

4
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Gizmos.cs


return;
var reflectionData = reflectionProbe.GetComponent<HDAdditionalReflectionData>();
var mat = reflectionProbe.transform.localToWorldMatrix;
var mat = Matrix4x4.TRS(reflectionProbe.transform.position, reflectionProbe.transform.rotation, Vector3.one);
switch (EditMode.editMode)
{

var reflectionData = reflectionProbe.GetComponent<HDAdditionalReflectionData>();
Gizmos_CapturePoint(reflectionProbe, reflectionData, e);
var mat = Matrix4x4.TRS(reflectionProbe.transform.position, reflectionProbe.transform.rotation, Vector3.one);
InfluenceVolumeUI.DrawGizmos(e.m_UIState.influenceVolume, reflectionData.influenceVolume, mat, InfluenceVolumeUI.HandleType.None, InfluenceVolumeUI.HandleType.Base);
if (!e.sceneViewEditing)
return;

2
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.Handles.cs


if (!s.sceneViewEditing)
return;
var mat = p.target.transform.localToWorldMatrix;
var mat = Matrix4x4.TRS(p.target.transform.position, p.target.transform.rotation, Vector3.one);
EditorGUI.BeginChangeCheck();

3
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/HDReflectionProbeEditor.cs


}
InitializeAllTargetProbes();
HDAdditionalReflectionData probe = (HDAdditionalReflectionData)m_AdditionalDataSerializedObject.targetObject;
probe.influenceVolume.Init(probe);
}

3
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeEditor.cs


s_StateMap[m_TypedTargets[i]] = m_UIHandleState[i];
}
PlanarReflectionProbe probe = (PlanarReflectionProbe)target;
probe.influenceVolume.Init(probe);
}
void OnDisable()

4
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/PlanarReflectionProbeUI.Handles.cs


public static void DrawHandles(PlanarReflectionProbeUI s, PlanarReflectionProbe d, Editor o)
{
var mat = d.transform.localToWorldMatrix;
var mat = Matrix4x4.TRS(d.transform.position, d.transform.rotation, Vector3.one);
switch (EditMode.editMode)
{

if (!PlanarReflectionProbeEditor.TryGetUIStateFor(d, out s))
return;
var mat = d.transform.localToWorldMatrix;
var mat = Matrix4x4.TRS(d.transform.position, d.transform.rotation, Vector3.one);
switch (EditMode.editMode)
{

8
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Reflection/Volume/ProxyVolumeUI.cs


s.sphereProjectionHandle.radius = proxyVolume.sphereRadius;
var mat = Handles.matrix;
Handles.matrix = transform.localToWorldMatrix;
Handles.matrix = Matrix4x4.TRS(transform.position, transform.rotation, Vector3.one);
Handles.color = k_GizmoThemeColorProjection;
EditorGUI.BeginChangeCheck();
s.sphereProjectionHandle.DrawHandle();

s.boxProjectionHandle.size = proxyVolume.boxSize;
var mat = Handles.matrix;
Handles.matrix = transform.localToWorldMatrix;
Handles.matrix = Matrix4x4.TRS(transform.position, transform.rotation, Vector3.one);
Handles.color = k_GizmoThemeColorProjection;
EditorGUI.BeginChangeCheck();

static void Gizmos_EditNone_Sphere(Transform t, ProxyVolume d, ProxyVolumeUI s, Object o)
{
var mat = Gizmos.matrix;
Gizmos.matrix = t.localToWorldMatrix;
Gizmos.matrix = Matrix4x4.TRS(t.position, t.rotation, Vector3.one);
Gizmos.color = k_GizmoThemeColorProjection;
Gizmos.DrawWireSphere(Vector3.zero, d.sphereRadius);

static void Gizmos_EditNone_Box(Transform t, ProxyVolume d, ProxyVolumeUI s, Object o)
{
var mat = Gizmos.matrix;
Gizmos.matrix = t.localToWorldMatrix;
Gizmos.matrix = Matrix4x4.TRS(t.position, t.rotation, Vector3.one);
Gizmos.color = k_GizmoThemeColorProjection;
Gizmos.DrawWireCube(Vector3.zero, d.boxSize);

2
com.unity.render-pipelines.high-definition/HDRP/Editor/Lighting/Volumetric/DensityVolumeTextureTool.cs.meta


fileFormatVersion: 2
guid: 9723a65f374db464a8a4f39c94ca7f54
guid: 6fac8c32212b2374ba031d6c1cc3d128
MonoImporter:
externalObjects: {}
serializedVersion: 2

76
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/HDAssetFactory.cs


{
static string s_RenderPipelineResourcesPath
{
get { return HDEditorUtils.GetHDRenderPipelinePath() + "RenderPipelineResources/HDRenderPipelineResources.asset"; }
get { return HDUtils.GetHDRenderPipelinePath() + "RenderPipelineResources/HDRenderPipelineResources.asset"; }
}
class DoCreateNewAssetHDRenderPipeline : ProjectWindowCallback.EndNameEditAction

var newAsset = CreateInstance<RenderPipelineResources>();
newAsset.name = Path.GetFileName(pathName);
// Load default renderPipelineResources / Material / Shader
string HDRenderPipelinePath = HDEditorUtils.GetHDRenderPipelinePath();
string CorePath = HDEditorUtils.GetCorePath();
newAsset.defaultDiffuseMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMaterial.mat");
newAsset.defaultMirrorMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMirrorMaterial.mat");
newAsset.defaultDecalMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDDecalMaterial.mat");
newAsset.defaultShader = Load<Shader>(HDRenderPipelinePath + "Material/Lit/Lit.shader");
newAsset.debugFontTexture = Load<Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/DebugFont.tga");
newAsset.debugDisplayLatlongShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugDisplayLatlong.Shader");
newAsset.debugViewMaterialGBufferShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugViewMaterialGBuffer.Shader");
newAsset.debugViewTilesShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugViewTiles.Shader");
newAsset.debugFullScreenShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugFullScreen.Shader");
newAsset.debugColorPickerShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugColorPicker.Shader");
newAsset.deferredShader = Load<Shader>(HDRenderPipelinePath + "Lighting/Deferred.Shader");
newAsset.colorPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ColorPyramid.compute");
newAsset.depthPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/DepthPyramid.compute");
newAsset.copyChannelCS = Load<ComputeShader>(CorePath + "CoreResources/GPUCopy.compute");
newAsset.texturePaddingCS = Load<ComputeShader>(CorePath + "CoreResources/TexturePadding.compute");
newAsset.applyDistortionCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ApplyDistorsion.compute");
newAsset.clearDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute");
newAsset.buildDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute");
newAsset.buildScreenAABBShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute");
newAsset.buildPerTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute");
newAsset.buildPerBigTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute");
newAsset.buildPerVoxelLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute");
newAsset.buildMaterialFlagsShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute");
newAsset.deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute");
newAsset.screenSpaceShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceShadow.compute");
newAsset.volumeVoxelizationCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumeVoxelization.compute");
newAsset.volumetricLightingCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumetricLighting.compute");
newAsset.subsurfaceScatteringCS = Load<ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute");
newAsset.subsurfaceScattering = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.shader");
newAsset.combineLighting = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/CombineLighting.shader");
// General
newAsset.cameraMotionVectors = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CameraMotionVectors.shader");
newAsset.copyStencilBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyStencilBuffer.shader");
newAsset.copyDepthBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyDepthBuffer.shader");
newAsset.blit = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/Blit.shader");
// Sky
newAsset.blitCubemap = Load<Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader");
newAsset.buildProbabilityTables = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute");
newAsset.computeGgxIblSampleData = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute");
newAsset.GGXConvolve = Load<Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader");
newAsset.opaqueAtmosphericScattering = Load<Shader>(HDRenderPipelinePath + "Lighting/AtmosphericScattering/OpaqueAtmosphericScattering.shader");
newAsset.hdriSky = Load<Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader");
newAsset.integrateHdriSky = Load<Shader>(HDRenderPipelinePath + "Sky/HDRISky/IntegrateHDRISky.shader");
newAsset.proceduralSky = Load<Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader");
newAsset.gradientSky = Load<Shader>(HDRenderPipelinePath + "Sky/GradientSky/GradientSky.shader");
// Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor
newAsset.skyboxCubemap = Shader.Find("Skybox/Cubemap");
// Material
newAsset.preIntegratedFGD_GGXDisneyDiffuse = Load<Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_GGXDisneyDiffuse.shader");
newAsset.preIntegratedFGD_CharlieClothLambert = Load<Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_CharlieClothLambert.shader");
// Utilities / Core
newAsset.encodeBC6HCS = Load<ComputeShader>(CorePath + "CoreResources/EncodeBC6H.compute");
newAsset.cubeToPanoShader = Load<Shader>(CorePath + "CoreResources/CubeToPano.shader");
newAsset.blitCubeTextureFace = Load<Shader>(CorePath + "CoreResources/BlitCubeTextureFace.shader");
// Shadow
newAsset.shadowClearShader = Load<Shader>(CorePath + "Shadow/ShadowClear.shader");
newAsset.shadowBlurMoments = Load<ComputeShader>(CorePath + "Shadow/ShadowBlurMoments.compute");
newAsset.debugShadowMapShader = Load<Shader>(CorePath + "Shadow/DebugDisplayShadowMap.shader");
newAsset.Init();
// decal
newAsset.decalNormalBuffer = Load<Shader>(HDRenderPipelinePath + "Material/Decal/DecalNormalBuffer.shader");

18
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/HDEditorUtils.cs


using System.IO;
using System.Linq;
using UnityEngine;
using UnityEngine.Experimental.Rendering.HDPipeline;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{

{ "HDRenderPipeline/Decal", DecalUI.SetupMaterialKeywordsAndPass }
};
public static string GetHDRenderPipelinePath()
{
return "Packages/com.unity.render-pipelines.high-definition/HDRP/";
}
public static string GetPostProcessingPath()
{
return "Packages/com.unity.postprocessing/";
}
public static string GetCorePath()
{
return "Packages/com.unity.render-pipelines.core/CoreRP/";
}
return AssetDatabase.LoadAssetAtPath<T>(GetHDRenderPipelinePath() + relativePath);
return AssetDatabase.LoadAssetAtPath<T>(HDUtils.GetHDRenderPipelinePath() + relativePath);
}
public static bool ResetMaterialKeywords(Material material)

3
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/FrameSettingsUI.cs


EditorGUILayout.PropertyField(p.enableOpaqueObjects, _.GetContent("Enable Opaque Objects"));
EditorGUILayout.PropertyField(p.enableTransparentObjects, _.GetContent("Enable Transparent Objects"));
EditorGUILayout.PropertyField(p.enableMSAA, _.GetContent("Enable MSAA"));
// Hide for now as not supported
//EditorGUILayout.PropertyField(p.enableMSAA, _.GetContent("Enable MSAA"));
}
static void Drawer_FieldStereoEnabled(FrameSettingsUI s, SerializedFrameSettings p, Editor owner)

2
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/SerializedShadowInitParameters.cs


public SerializedProperty shadowAtlasWidth;
public SerializedProperty shadowAtlasHeight;
public SerializedProperty shadowMap16Bit;
public SerializedProperty maxPointLightShadows;
public SerializedProperty maxSpotLightShadows;

shadowAtlasWidth = root.Find((ShadowInitParameters s) => s.shadowAtlasWidth);
shadowAtlasHeight = root.Find((ShadowInitParameters s) => s.shadowAtlasHeight);
shadowMap16Bit = root.Find((ShadowInitParameters s) => s.shadowMap16Bit);
maxPointLightShadows = root.Find((ShadowInitParameters s) => s.maxPointLightShadows);
maxSpotLightShadows = root.Find((ShadowInitParameters s) => s.maxSpotLightShadows);
maxDirectionalLightShadows = root.Find((ShadowInitParameters s) => s.maxDirectionalLightShadows);

1
com.unity.render-pipelines.high-definition/HDRP/Editor/RenderPipeline/Settings/ShadowInitParametersUI.cs


++EditorGUI.indentLevel;
EditorGUILayout.PropertyField(d.shadowAtlasWidth, _.GetContent("Atlas Width"));
EditorGUILayout.PropertyField(d.shadowAtlasHeight, _.GetContent("Atlas Height"));
EditorGUILayout.PropertyField(d.shadowMap16Bit, _.GetContent("16-bit Shadow Maps"));
--EditorGUI.indentLevel;
EditorGUILayout.Space();

161
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDPBRPass.template


Pass
{
// based on HDPBRPass.template
Name "${PassName}"
Tags { "LightMode" = "${LightMode}" }
Name "$splice(PassName)"
Tags { "LightMode" = "$splice(LightMode)" }
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
$splice(Blending)
$splice(Culling)
$splice(ZTest)
$splice(ZWrite)
$splice(Stencil)
$splice(ColorMask)
//-------------------------------------------------------------------------------------
// End Render Modes
//-------------------------------------------------------------------------------------

#pragma target 4.5
#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
//#pragma enable_d3d11_debug_symbols
#pragma target 4.5
#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
//#pragma enable_d3d11_debug_symbols
//-------------------------------------------------------------------------------------
// Variant Definitions (active field translations to HDRP defines)

//-------------------------------------------------------------------------------------
// Defines
//-------------------------------------------------------------------------------------
${Defines}
$splice(Defines)
// this translates the new dependency tracker into the old preprocessor definitions for the existing HDRP shader code
$AttributesMesh.normalOS: #define ATTRIBUTES_NEED_NORMAL

// used for shaders that don't need lighting
#include "HDRP/Material/Material.hlsl"
#endif
#include "HDRP/Material/BuiltinUtilities.hlsl"
#include "HDRP/Material/MaterialUtilities.hlsl"
// this function assumes the bitangent flip is encoded in tangentWS.w

//-------------------------------------------------------------------------------------
// Interpolator Packing And Struct Declarations
//-------------------------------------------------------------------------------------
${InterpolatorPacking}
$buildType(AttributesMesh)
$buildType(VaryingsMeshToPS)
$buildType(VaryingsMeshToDS)
//-------------------------------------------------------------------------------------
// End Interpolator Packing And Struct Declarations
//-------------------------------------------------------------------------------------

//-------------------------------------------------------------------------------------
${Graph}
$splice(Graph)
#ifdef HAVE_MESH_MODIFICATION
// TODO: we should share this between template files somehow
VertexDescriptionInputs AttributesMeshToVertexDescriptionInputs(AttributesMesh input)
{
VertexDescriptionInputs output;
ZERO_INITIALIZE(VertexDescriptionInputs, output);
$features.modifyMesh: $include("VertexAnimation.template.hlsl")
$VertexDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = input.normalOS;
$VertexDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = TransformObjectToWorldNormal(input.normalOS);
$VertexDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = TransformWorldToViewDir(output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$VertexDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = input.tangentOS;
$VertexDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = TransformObjectToWorldDir(input.tangentOS.xyz);
$VertexDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = TransformWorldToViewDir(output.WorldSpaceTangent);
$VertexDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f);
$VertexDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = normalize(cross(input.normalOS, input.tangentOS) * (input.tangentOS.w > 0.0f ? 1.0f : -1.0f) * GetOddNegativeScale());
$VertexDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = TransformObjectToWorldDir(output.ObjectSpaceBiTangent);
$VertexDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = TransformWorldToViewDir(output.WorldSpaceBiTangent);
$VertexDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f);
$VertexDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = input.positionOS;
$VertexDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = GetAbsolutePositionWS(TransformObjectToWorld(input.positionOS));
$VertexDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = TransformWorldToView(output.WorldSpacePosition);
$VertexDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f);
$VertexDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = GetWorldSpaceNormalizeViewDir(output.WorldSpacePosition);
$VertexDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = TransformWorldToObjectDir(output.WorldSpaceViewDirection);
$VertexDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = TransformWorldToViewDir(output.WorldSpaceViewDirection);
$VertexDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection);
$VertexDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(output.WorldSpacePosition), _ProjectionParams.x);
$VertexDescriptionInputs.uv0: output.uv0 = float4(input.uv0, 0.0f, 0.0f);
$VertexDescriptionInputs.uv1: output.uv1 = float4(input.uv1, 0.0f, 0.0f);
$VertexDescriptionInputs.uv2: output.uv2 = float4(input.uv2, 0.0f, 0.0f);
$VertexDescriptionInputs.uv3: output.uv3 = float4(input.uv3, 0.0f, 0.0f);
$VertexDescriptionInputs.VertexColor: output.VertexColor = input.color;
$include("SharedCode.template.hlsl")
return output;
}
AttributesMesh ApplyMeshModification(AttributesMesh input)
{
// build graph inputs
VertexDescriptionInputs vertexDescriptionInputs = AttributesMeshToVertexDescriptionInputs(input);
// evaluate vertex graph
VertexDescription vertexDescription = VertexDescriptionFunction(vertexDescriptionInputs);
// copy graph output to the results
$VertexDescription.Position: input.positionOS = vertexDescription.Position;
return input;
}
#endif // HAVE_MESH_MODIFICATION
// TODO: Do we want to build include functionality for sharing these preprocessed functions across templates?
FragInputs BuildFragInputs(VaryingsMeshToPS input)
{
FragInputs output;
ZERO_INITIALIZE(FragInputs, output);
// Init to some default value to make the computer quiet (else it output 'divide by zero' warning even if value is not used).
// TODO: this is a really poor workaround, but the variable is used in a bunch of places
// to compute normals which are then passed on elsewhere to compute other values...
output.worldToTangent = k_identity3x3;
output.positionSS = input.positionCS; // input.positionCS is SV_Position
$FragInputs.positionRWS: output.positionRWS = input.positionRWS;
$FragInputs.worldToTangent: output.worldToTangent = BuildWorldToTangent(input.tangentWS, input.normalWS);
$FragInputs.texCoord0: output.texCoord0 = input.texCoord0;
$FragInputs.texCoord1: output.texCoord1 = input.texCoord1;
$FragInputs.texCoord2: output.texCoord2 = input.texCoord2;
$FragInputs.texCoord3: output.texCoord3 = input.texCoord3;
$FragInputs.color: output.color = input.color;
#if SHADER_STAGE_FRAGMENT
$FragInputs.isFrontFace: output.isFrontFace = IS_FRONT_VFACE(input.cullFace, true, false); // TODO: SHADER_STAGE_FRAGMENT only
$FragInputs.isFrontFace: // Handle handness of the view matrix (In Unity view matrix default to a determinant of -1)
$FragInputs.isFrontFace: // when we render a cubemap the view matrix handness is flipped (due to convention used for cubemap) we have a determinant of +1
$FragInputs.isFrontFace: output.isFrontFace = _DetViewMatrix < 0.0 ? output.isFrontFace : !output.isFrontFace;
#endif // SHADER_STAGE_FRAGMENT
return output;
}
SurfaceDescriptionInputs FragInputsToSurfaceDescriptionInputs(FragInputs input, float3 viewWS)
{
SurfaceDescriptionInputs output;
ZERO_INITIALIZE(SurfaceDescriptionInputs, output);
$SurfaceDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = normalize(input.worldToTangent[2].xyz);
$SurfaceDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_M); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_I_V); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$SurfaceDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = input.worldToTangent[0].xyz;
$SurfaceDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = TransformWorldToObjectDir(output.WorldSpaceTangent);
$SurfaceDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = TransformWorldToViewDir(output.WorldSpaceTangent);
$SurfaceDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = input.worldToTangent[1].xyz;
$SurfaceDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = TransformWorldToObjectDir(output.WorldSpaceBiTangent);
$SurfaceDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = TransformWorldToViewDir(output.WorldSpaceBiTangent);
$SurfaceDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f);
$SurfaceDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = normalize(viewWS);
$SurfaceDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = TransformWorldToObjectDir(output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = TransformWorldToViewDir(output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal);
$SurfaceDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = GetAbsolutePositionWS(input.positionRWS);
$SurfaceDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = TransformWorldToObject(input.positionRWS);
$SurfaceDescriptionInputs.ViewSpacePosition: float4 posViewSpace = TransformWorldToView(input.positionRWS);
$SurfaceDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = posViewSpace.xyz / posViewSpace.w;
$SurfaceDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionRWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.FaceSign: output.FaceSign = input.isFrontFace;
return output;
}
// existing HDRP code uses the combined function to go directly from packed to frag inputs
FragInputs UnpackVaryingsMeshToFragInputs(PackedVaryingsMeshToPS input)
{
VaryingsMeshToPS unpacked= UnpackVaryingsMeshToPS(input);
return BuildFragInputs(unpacked);
}
void BuildSurfaceData(FragInputs fragInputs, SurfaceDescription surfaceDescription, float3 V, out SurfaceData surfaceData)
{

//-------------------------------------------------------------------------------------
// Pass Includes
//-------------------------------------------------------------------------------------
${Includes}
$splice(Includes)
//-------------------------------------------------------------------------------------
// End Pass Includes
//-------------------------------------------------------------------------------------

105
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDSubShaderUtilities.cs


using UnityEditor.Graphing;
using UnityEngine; // Vector3,4
using UnityEditor.ShaderGraph;
using UnityEngine.Experimental.Rendering.HDPipeline;
public static class HDRPShaderStructs
internal static class HDRPShaderStructs
struct AttributesMesh
internal struct AttributesMesh
{
[Semantic("POSITION")] Vector3 positionOS;
[Semantic("NORMAL")][Optional] Vector3 normalOS;

[Semantic("COLOR")][Optional] Vector4 color;
};
struct VaryingsMeshToPS
[InterpolatorPack]
internal struct VaryingsMeshToPS
{
[Semantic("SV_Position")] Vector4 positionCS;
[Optional] Vector3 positionRWS;

};
};
struct VaryingsMeshToDS
[InterpolatorPack]
internal struct VaryingsMeshToDS
{
Vector3 positionRWS;
Vector3 normalWS;

};
};
struct FragInputs
internal struct FragInputs
{
public static Dependency[] dependencies = new Dependency[]
{

};
// this describes the input to the pixel shader graph eval
public struct SurfaceDescriptionInputs
internal struct SurfaceDescriptionInputs
{
[Optional] Vector3 ObjectSpaceNormal;
[Optional] Vector3 ViewSpaceNormal;

};
// this describes the input to the pixel shader graph eval
public struct VertexDescriptionInputs
internal struct VertexDescriptionInputs
{
[Optional] Vector3 ObjectSpaceNormal;
[Optional] Vector3 ViewSpaceNormal;

}
}
}
public static void Generate(
ShaderGenerator codeResult,
HashSet<string> activeFields)
{
// propagate requirements using dependencies
{
ShaderSpliceUtil.ApplyDependencies(
activeFields,
new List<Dependency[]>()
{
FragInputs.dependencies,
VaryingsMeshToPS.standardDependencies,
SurfaceDescriptionInputs.dependencies,
VertexDescriptionInputs.dependencies
});
}
// generate code based on requirements
ShaderSpliceUtil.BuildType(typeof(AttributesMesh), activeFields, codeResult);
ShaderSpliceUtil.BuildType(typeof(VaryingsMeshToPS), activeFields, codeResult);
ShaderSpliceUtil.BuildType(typeof(VaryingsMeshToDS), activeFields, codeResult);
ShaderSpliceUtil.BuildPackedType(typeof(VaryingsMeshToPS), activeFields, codeResult);
ShaderSpliceUtil.BuildPackedType(typeof(VaryingsMeshToDS), activeFields, codeResult);
}
};
public struct Pass

{
public static bool GenerateShaderPass(AbstractMaterialNode masterNode, Pass pass, GenerationMode mode, SurfaceMaterialOptions materialOptions, HashSet<string> activeFields, ShaderGenerator result, List<string> sourceAssetDependencyPaths)
{
var templateLocation = Path.Combine(Path.Combine(Path.Combine(HDEditorUtils.GetHDRenderPipelinePath(), "Editor"), "ShaderGraph"), pass.TemplateName);
string templatePath = Path.Combine(Path.Combine(HDUtils.GetHDRenderPipelinePath(), "Editor"), "ShaderGraph");
string templateLocation = Path.Combine(templatePath, pass.TemplateName);
if (!File.Exists(templateLocation))
{
// TODO: produce error here

bool debugOutput = false;
if (sourceAssetDependencyPaths != null)
sourceAssetDependencyPaths.Add(templateLocation);
// grab all of the active nodes (for pixel and vertex graphs)
var vertexNodes = ListPool<INode>.Get();

HDRPShaderStructs.AddRequiredFields(pass.RequiredFields, activeFields);
// apply dependencies to the active fields, and build interpolators (TODO: split this function)
var packedInterpolatorCode = new ShaderGenerator();
HDRPShaderStructs.Generate(
packedInterpolatorCode,
activeFields);
// propagate active field requirements using dependencies
ShaderSpliceUtil.ApplyDependencies(
activeFields,
new List<Dependency[]>()
{
HDRPShaderStructs.FragInputs.dependencies,
HDRPShaderStructs.VaryingsMeshToPS.standardDependencies,
HDRPShaderStructs.SurfaceDescriptionInputs.dependencies,
HDRPShaderStructs.VertexDescriptionInputs.dependencies
});
// debug output all active fields
var interpolatorDefines = new ShaderGenerator();

// build the hash table of all named fragments TODO: could make this Dictionary<string, ShaderGenerator / string> ?
Dictionary<string, string> namedFragments = new Dictionary<string, string>();
namedFragments.Add("${Defines}", defines.GetShaderString(2, false));
namedFragments.Add("${Graph}", graph.GetShaderString(2, false));
namedFragments.Add("${LightMode}", pass.LightMode);
namedFragments.Add("${PassName}", pass.Name);
namedFragments.Add("${Includes}", shaderPassIncludes.GetShaderString(2, false));
namedFragments.Add("${InterpolatorPacking}", packedInterpolatorCode.GetShaderString(2, false));
namedFragments.Add("${Blending}", blendCode.ToString());
namedFragments.Add("${Culling}", cullCode.ToString());
namedFragments.Add("${ZTest}", zTestCode.ToString());
namedFragments.Add("${ZWrite}", zWriteCode.ToString());
namedFragments.Add("${Stencil}", stencilCode.ToString());
namedFragments.Add("${ColorMask}", colorMaskCode.ToString());
namedFragments.Add("${LOD}", materialOptions.lod.ToString());
namedFragments.Add("Defines", defines.GetShaderString(2, false));
namedFragments.Add("Graph", graph.GetShaderString(2, false));
namedFragments.Add("LightMode", pass.LightMode);
namedFragments.Add("PassName", pass.Name);
namedFragments.Add("Includes", shaderPassIncludes.GetShaderString(2, false));
namedFragments.Add("Blending", blendCode.ToString());
namedFragments.Add("Culling", cullCode.ToString());
namedFragments.Add("ZTest", zTestCode.ToString());
namedFragments.Add("ZWrite", zWriteCode.ToString());
namedFragments.Add("Stencil", stencilCode.ToString());
namedFragments.Add("ColorMask", colorMaskCode.ToString());
namedFragments.Add("LOD", materialOptions.lod.ToString());
// this is the format string for building the 'C# qualified assembly type names' for $buildType() commands
string buildTypeAssemblyNameFormat = "UnityEditor.Experimental.Rendering.HDPipeline.HDRPShaderStructs+{0}, " + typeof(HDSubShaderUtilities).Assembly.FullName.ToString();
// process the template to generate the shader code for this pass
ShaderSpliceUtil.TemplatePreprocessor templatePreprocessor =
new ShaderSpliceUtil.TemplatePreprocessor(activeFields, namedFragments, debugOutput, templatePath, sourceAssetDependencyPaths, buildTypeAssemblyNameFormat);
// process the template to generate the shader code for this pass TODO: could make this a shared function
string[] templateLines = File.ReadAllLines(templateLocation);
System.Text.StringBuilder builder = new System.Text.StringBuilder();
foreach (string line in templateLines)
{
ShaderSpliceUtil.PreprocessShaderCode(line, activeFields, namedFragments, builder, debugOutput);
}
templatePreprocessor.ProcessTemplateFile(templateLocation);
result.AddShaderChunk(builder.ToString(), false);
result.AddShaderChunk(templatePreprocessor.GetShaderCode().ToString(), false);
return true;
}

162
com.unity.render-pipelines.high-definition/HDRP/Editor/ShaderGraph/HDUnlitPassForward.template


Pass
{
// based on HDUnlitPassForward.template
Name "${PassName}"
Tags { "LightMode" = "${LightMode}" }
Name "$splice(PassName)"
Tags { "LightMode" = "$splice(LightMode)" }
${Blending}
${Culling}
${ZTest}
${ZWrite}
${Stencil}
${ColorMask}
$splice(Blending)
$splice(Culling)
$splice(ZTest)
$splice(ZWrite)
$splice(Stencil)
$splice(ColorMask)
//-------------------------------------------------------------------------------------
// End Render Modes
//-------------------------------------------------------------------------------------

#pragma target 4.5
#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
//#pragma enable_d3d11_debug_symbols
#pragma target 4.5
#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
//#pragma enable_d3d11_debug_symbols
//-------------------------------------------------------------------------------------
// Variant Definitions (active field translations to HDRP defines)

//-------------------------------------------------------------------------------------
// Defines
//-------------------------------------------------------------------------------------
${Defines}
$splice(Defines)
// this translates the new dependency tracker into the old preprocessor definitions for the existing HDRP shader code
$AttributesMesh.normalOS: #define ATTRIBUTES_NEED_NORMAL

// used for shaders that don't need lighting
#include "HDRP/Material/Material.hlsl"
#endif
#include "HDRP/Material/BuiltinUtilities.hlsl"
#include "HDRP/Material/MaterialUtilities.hlsl"
// this function assumes the bitangent flip is encoded in tangentWS.w

//-------------------------------------------------------------------------------------
// Interpolator Packing And Struct Declarations
//-------------------------------------------------------------------------------------
${InterpolatorPacking}
$buildType(AttributesMesh)
$buildType(VaryingsMeshToPS)
$buildType(VaryingsMeshToDS)
//-------------------------------------------------------------------------------------
// End Interpolator Packing And Struct Declarations
//-------------------------------------------------------------------------------------

//-------------------------------------------------------------------------------------
${Graph}
$splice(Graph)
#ifdef HAVE_MESH_MODIFICATION
// TODO: we should share this between template files somehow
VertexDescriptionInputs AttributesMeshToVertexDescriptionInputs(AttributesMesh input)
{
VertexDescriptionInputs output;
ZERO_INITIALIZE(VertexDescriptionInputs, output);
$VertexDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = input.normalOS;
$VertexDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = TransformObjectToWorldNormal(input.normalOS);
$VertexDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = TransformWorldToViewDir(output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$VertexDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = input.tangentOS;
$VertexDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = TransformObjectToWorldDir(input.tangentOS.xyz);
$VertexDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = TransformWorldToViewDir(output.WorldSpaceTangent);
$VertexDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f);
$VertexDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = normalize(cross(input.normalOS, input.tangentOS) * (input.tangentOS.w > 0.0f ? 1.0f : -1.0f) * GetOddNegativeScale());
$VertexDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = TransformObjectToWorldDir(output.ObjectSpaceBiTangent);
$VertexDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = TransformWorldToViewDir(output.WorldSpaceBiTangent);
$VertexDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f);
$VertexDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = input.positionOS;
$VertexDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = GetAbsolutePositionWS(TransformObjectToWorld(input.positionOS));
$VertexDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = TransformWorldToView(output.WorldSpacePosition);
$VertexDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f);
$VertexDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = GetWorldSpaceNormalizeViewDir(output.WorldSpacePosition);
$VertexDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = TransformWorldToObjectDir(output.WorldSpaceViewDirection);
$VertexDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = TransformWorldToViewDir(output.WorldSpaceViewDirection);
$VertexDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal);
$VertexDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection);
$VertexDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(output.WorldSpacePosition), _ProjectionParams.x);
$VertexDescriptionInputs.uv0: output.uv0 = float4(input.uv0, 0.0f, 0.0f);
$VertexDescriptionInputs.uv1: output.uv1 = float4(input.uv1, 0.0f, 0.0f);
$VertexDescriptionInputs.uv2: output.uv2 = float4(input.uv2, 0.0f, 0.0f);
$VertexDescriptionInputs.uv3: output.uv3 = float4(input.uv3, 0.0f, 0.0f);
$VertexDescriptionInputs.VertexColor: output.VertexColor = input.color;
return output;
}
AttributesMesh ApplyMeshModification(AttributesMesh input)
{
// build graph inputs
VertexDescriptionInputs vertexDescriptionInputs = AttributesMeshToVertexDescriptionInputs(input);
// evaluate vertex graph
VertexDescription vertexDescription = VertexDescriptionFunction(vertexDescriptionInputs);
// copy graph output to the results
$VertexDescription.Position: input.positionOS = vertexDescription.Position;
return input;
}
#endif // HAVE_MESH_MODIFICATION
$features.modifyMesh: $include("VertexAnimation.template.hlsl")
// TODO: Do we want to build include functionality for sharing these preprocessed functions across templates?
FragInputs BuildFragInputs(VaryingsMeshToPS input)
{
FragInputs output;
ZERO_INITIALIZE(FragInputs, output);
$include("SharedCode.template.hlsl")
// Init to some default value to make the computer quiet (else it output 'divide by zero' warning even if value is not used).
// TODO: this is a really poor workaround, but the variable is used in a bunch of places
// to compute normals which are then passed on elsewhere to compute other values...
output.worldToTangent = k_identity3x3;
output.positionSS = input.positionCS; // input.positionCS is SV_Position
$FragInputs.positionRWS: output.positionRWS = input.positionRWS;
$FragInputs.worldToTangent: output.worldToTangent = BuildWorldToTangent(input.tangentWS, input.normalWS);
$FragInputs.texCoord0: output.texCoord0 = input.texCoord0;
$FragInputs.texCoord1: output.texCoord1 = input.texCoord1;
$FragInputs.texCoord2: output.texCoord2 = input.texCoord2;
$FragInputs.texCoord3: output.texCoord3 = input.texCoord3;
$FragInputs.color: output.color = input.color;
#if SHADER_STAGE_FRAGMENT
$FragInputs.isFrontFace: output.isFrontFace = IS_FRONT_VFACE(input.cullFace, true, false); // TODO: SHADER_STAGE_FRAGMENT only
$FragInputs.isFrontFace: // Handle handness of the view matrix (In Unity view matrix default to a determinant of -1)
$FragInputs.isFrontFace: // when we render a cubemap the view matrix handness is flipped (due to convention used for cubemap) we have a determinant of +1
$FragInputs.isFrontFace: output.isFrontFace = _DetViewMatrix < 0.0 ? output.isFrontFace : !output.isFrontFace;
#endif // SHADER_STAGE_FRAGMENT
return output;
}
SurfaceDescriptionInputs FragInputsToSurfaceDescriptionInputs(FragInputs input, float3 viewWS)
{
SurfaceDescriptionInputs output;
ZERO_INITIALIZE(SurfaceDescriptionInputs, output);
$SurfaceDescriptionInputs.WorldSpaceNormal: output.WorldSpaceNormal = normalize(input.worldToTangent[2].xyz);
$SurfaceDescriptionInputs.ObjectSpaceNormal: output.ObjectSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_M); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.ViewSpaceNormal: output.ViewSpaceNormal = mul(output.WorldSpaceNormal, (float3x3) UNITY_MATRIX_I_V); // transposed multiplication by inverse matrix to handle normal scale
$SurfaceDescriptionInputs.TangentSpaceNormal: output.TangentSpaceNormal = float3(0.0f, 0.0f, 1.0f);
$SurfaceDescriptionInputs.WorldSpaceTangent: output.WorldSpaceTangent = input.worldToTangent[0].xyz;
$SurfaceDescriptionInputs.ObjectSpaceTangent: output.ObjectSpaceTangent = TransformWorldToObjectDir(output.WorldSpaceTangent);
$SurfaceDescriptionInputs.ViewSpaceTangent: output.ViewSpaceTangent = TransformWorldToViewDir(output.WorldSpaceTangent);
$SurfaceDescriptionInputs.TangentSpaceTangent: output.TangentSpaceTangent = float3(1.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.WorldSpaceBiTangent: output.WorldSpaceBiTangent = input.worldToTangent[1].xyz;
$SurfaceDescriptionInputs.ObjectSpaceBiTangent: output.ObjectSpaceBiTangent = TransformWorldToObjectDir(output.WorldSpaceBiTangent);
$SurfaceDescriptionInputs.ViewSpaceBiTangent: output.ViewSpaceBiTangent = TransformWorldToViewDir(output.WorldSpaceBiTangent);
$SurfaceDescriptionInputs.TangentSpaceBiTangent: output.TangentSpaceBiTangent = float3(0.0f, 1.0f, 0.0f);
$SurfaceDescriptionInputs.WorldSpaceViewDirection: output.WorldSpaceViewDirection = normalize(viewWS);
$SurfaceDescriptionInputs.ObjectSpaceViewDirection: output.ObjectSpaceViewDirection = TransformWorldToObjectDir(output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.ViewSpaceViewDirection: output.ViewSpaceViewDirection = TransformWorldToViewDir(output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.TangentSpaceViewDirection: float3x3 tangentSpaceTransform = float3x3(output.WorldSpaceTangent,output.WorldSpaceBiTangent,output.WorldSpaceNormal);
$SurfaceDescriptionInputs.TangentSpaceViewDirection: output.TangentSpaceViewDirection = mul(tangentSpaceTransform, output.WorldSpaceViewDirection);
$SurfaceDescriptionInputs.WorldSpacePosition: output.WorldSpacePosition = GetAbsolutePositionWS(input.positionRWS);
$SurfaceDescriptionInputs.ObjectSpacePosition: output.ObjectSpacePosition = TransformWorldToObject(input.positionRWS);
$SurfaceDescriptionInputs.ViewSpacePosition: float4 posViewSpace = TransformWorldToView(input.positionRWS);
$SurfaceDescriptionInputs.ViewSpacePosition: output.ViewSpacePosition = posViewSpace.xyz / posViewSpace.w;
$SurfaceDescriptionInputs.TangentSpacePosition: output.TangentSpacePosition = float3(0.0f, 0.0f, 0.0f);
$SurfaceDescriptionInputs.ScreenPosition: output.ScreenPosition = ComputeScreenPos(TransformWorldToHClip(input.positionRWS), _ProjectionParams.x);
$SurfaceDescriptionInputs.uv0: output.uv0 = float4(input.texCoord0, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv1: output.uv1 = float4(input.texCoord1, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv2: output.uv2 = float4(input.texCoord2, 0.0f, 0.0f);
$SurfaceDescriptionInputs.uv3: output.uv3 = float4(input.texCoord3, 0.0f, 0.0f);
$SurfaceDescriptionInputs.VertexColor: output.VertexColor = input.color;
$SurfaceDescriptionInputs.FaceSign: output.FaceSign = input.isFrontFace;
return output;
}
// existing HDRP code uses the combined function to go directly from packed to frag inputs
FragInputs UnpackVaryingsMeshToFragInputs(PackedVaryingsMeshToPS input)
{
VaryingsMeshToPS unpacked= UnpackVaryingsMeshToPS(input);
return BuildFragInputs(unpacked);
}
void BuildSurfaceData(FragInputs fragInputs, SurfaceDescription surfaceDescription, float3 V, out SurfaceData surfaceData)
{

//-------------------------------------------------------------------------------------
// Pass Includes
//-------------------------------------------------------------------------------------
${Includes}
$splice(Includes)
//-------------------------------------------------------------------------------------
// End Pass Includes
//-------------------------------------------------------------------------------------

2
com.unity.render-pipelines.high-definition/HDRP/Lighting/GlobalIlluminationUtils.cs


break;
// Note: We don't support this type in HDRP, but ini just in case
case LightType.Area:
case LightType.Rectangle:
ld.orientation = l.transform.rotation;
ld.position = l.transform.position;
ld.range = l.range;

4
com.unity.render-pipelines.high-definition/HDRP/Lighting/Light/HDAdditionalLightData.cs


lightData.lightUnit = LightUnit.Lux;
lightData.intensity = k_DefaultDirectionalLightIntensity;
break;
case LightType.Area: // Rectangle by default when light is created
case LightType.Rectangle: // Rectangle by default when light is created
lightData.lightUnit = LightUnit.Lumen;
lightData.intensity = k_DefaultAreaLightIntensity;
break;

}
// Sanity check: lightData.lightTypeExtent is init to LightTypeExtent.Punctual (in case for unknow reasons we recreate additional data on an existing line)
if (light.type == LightType.Area && lightData.lightTypeExtent == LightTypeExtent.Punctual)
if (light.type == LightType.Rectangle && lightData.lightTypeExtent == LightTypeExtent.Punctual)
{
lightData.lightTypeExtent = LightTypeExtent.Rectangle;
light.type = LightType.Point; // Same as in HDLightEditor

3
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightEvaluation.hlsl


// Our subsurface scattering models use the semi-infinite planar slab assumption.
// Therefore, we need to find the thickness along the normal.
float thicknessInUnits = (distFrontFaceToLight - distBackFaceToLight) * -NdotL;
// Warning: based on the artist's input, dependence on the NdotL has been disabled.
float thicknessInUnits = (distFrontFaceToLight - distBackFaceToLight) /* * -NdotL */;
float thicknessInMeters = thicknessInUnits * _WorldScales[bsdfData.diffusionProfile].x;
float thicknessInMillimeters = thicknessInMeters * MILLIMETERS_PER_METER;

2
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/Deferred.compute


BSDFData bsdfData;
BuiltinData builtinData;
DECODE_FROM_GBUFFER(posInput.positionSS, UINT_MAX, bsdfData, builtinData);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, builtinData);
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

4
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/GlobalLightLoopSettings.cs


public int planarReflectionProbeCacheSize = 2;
public PlanarReflectionResolution planarReflectionTextureSize = PlanarReflectionResolution.PlanarReflectionResolution1024;
public int reflectionProbeCacheSize = 128;
public CubeReflectionResolution reflectionCubemapSize = CubeReflectionResolution.CubeReflectionResolution128;
public int reflectionProbeCacheSize = 64;
public CubeReflectionResolution reflectionCubemapSize = CubeReflectionResolution.CubeReflectionResolution256;
public bool reflectionCacheCompressed = false;
public bool planarReflectionCacheCompressed = false;
public SkyResolution skyReflectionSize = SkyResolution.SkyResolution256;

88
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/LightLoop.cs


atlasInit.baseInit.width = (uint)shadowInit.shadowAtlasWidth;
atlasInit.baseInit.height = (uint)shadowInit.shadowAtlasHeight;
atlasInit.baseInit.slices = 1;
atlasInit.baseInit.shadowmapBits = 32;
atlasInit.baseInit.shadowmapBits = shadowInit.shadowMap16Bit ? 16u : 32u;
atlasInit.baseInit.shadowmapFormat = RenderTextureFormat.Shadowmap;
atlasInit.baseInit.samplerState = SamplerState.Default();
atlasInit.baseInit.comparisonSamplerState = ComparisonSamplerState.Default();

m_ShadowMgr = new ShadowManager(shadowSettings, ref scInit, ref budgets, m_Shadowmaps);
// set global overrides - these need to match the override specified in LightLoop/Shadow.hlsl
bool useGlobalOverrides = true;
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Point , ShadowAlgorithm.PCF, ShadowVariant.V2, ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Spot , ShadowAlgorithm.PCF, ShadowVariant.V2, ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Directional , ShadowAlgorithm.PCF, ShadowVariant.V3, ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Point , ShadowAlgorithm.PCF, ShadowVariant.V2, shadowInit.shadowMap16Bit ? ShadowPrecision.Low : ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Spot , ShadowAlgorithm.PCF, ShadowVariant.V2, shadowInit.shadowMap16Bit ? ShadowPrecision.Low : ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetGlobalShadowOverride(GPUShadowType.Directional , ShadowAlgorithm.PCF, ShadowVariant.V3, shadowInit.shadowMap16Bit ? ShadowPrecision.Low : ShadowPrecision.High, useGlobalOverrides);
m_ShadowMgr.SetShadowLightTypeDelegate(HDShadowLightType);

static int s_GenAABBKernel;
static int s_GenAABBKernel_Oblique;
static int s_GenListPerTileKernel_Oblique;
static int s_GenListPerVoxelKernelOblique;
static int s_ClearVoxelAtomicKernel;
static int s_ClearDispatchIndirectKernel;
static int s_BuildDispatchIndirectKernel;

{ "TileLightListGen_NoDepthRT", "TileLightListGen_DepthRT", "TileLightListGen_DepthRT_MSAA" },
{ "TileLightListGen_NoDepthRT_SrcBigTile", "TileLightListGen_DepthRT_SrcBigTile", "TileLightListGen_DepthRT_MSAA_SrcBigTile" }
};
static string[,] s_ClusterObliqueKernelNames = new string[(int)ClusterPrepassSource.Count, (int)ClusterDepthSource.Count]
{
{ "TileLightListGen_NoDepthRT ", "TileLightListGen_DepthRT_Oblique", "TileLightListGen_DepthRT_MSAA_Oblique" },
{ "TileLightListGen_NoDepthRT_SrcBigTile", "TileLightListGen_DepthRT_SrcBigTile_Oblique", "TileLightListGen_DepthRT_MSAA_SrcBigTile_Oblique" }
};
// clustered light list specific buffers and data end
static int[] s_TempScreenDimArray = new int[2]; // Used to avoid GC stress when calling SetComputeIntParams

m_ReflectionPlanarProbeCache = new PlanarReflectionProbeCache(hdAsset, iblFilterGGX, gLightLoopSettings.planarReflectionProbeCacheSize, (int)gLightLoopSettings.planarReflectionTextureSize, planarProbeCacheFormat, true);
s_GenAABBKernel = buildScreenAABBShader.FindKernel("ScreenBoundsAABB");
s_GenAABBKernel_Oblique = buildScreenAABBShader.FindKernel("ScreenBoundsAABB_Oblique");
// The bounds and light volumes are view-dependent, and AABB is additionally projection dependent.
// The view and proj matrices are per eye in stereo. This means we have to double the size of these buffers.

s_AABBBoundsBuffer = new ComputeBuffer(k_MaxStereoEyes * 2 * k_MaxLightsOnScreen, 3 * sizeof(float));
s_AABBBoundsBuffer = new ComputeBuffer(k_MaxStereoEyes * 2 * k_MaxLightsOnScreen, 4 * sizeof(float));
s_ConvexBoundsBuffer = new ComputeBuffer(k_MaxStereoEyes * k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(SFiniteLightBound)));
s_LightVolumeDataBuffer = new ComputeBuffer(k_MaxStereoEyes * k_MaxLightsOnScreen, System.Runtime.InteropServices.Marshal.SizeOf(typeof(LightVolumeData)));
s_DispatchIndirectBuffer = new ComputeBuffer(LightDefinitions.s_NumFeatureVariants * 3, sizeof(uint), ComputeBufferType.IndirectArguments);

clustDepthSourceIdx = ClusterDepthSource.Depth;
}
var kernelName = s_ClusterKernelNames[(int)clustPrepassSourceIdx, (int)clustDepthSourceIdx];
var kernelObliqueName = s_ClusterObliqueKernelNames[(int)clustPrepassSourceIdx, (int)clustDepthSourceIdx];
s_GenListPerVoxelKernelOblique = buildPerVoxelLightListShader.FindKernel(kernelObliqueName);
s_GenListPerTileKernel_Oblique = buildPerTileLightListShader.FindKernel(m_FrameSettings.lightLoopSettings.enableBigTilePrepass ? "TileLightListGen_SrcBigTile_FeatureFlags_Oblique" : "TileLightListGen_FeatureFlags_Oblique");
s_GenListPerTileKernel_Oblique = buildPerTileLightListShader.FindKernel(m_FrameSettings.lightLoopSettings.enableBigTilePrepass ? "TileLightListGen_SrcBigTile_Oblique" : "TileLightListGen_Oblique");
}
m_CookieTexArray.NewFrame();

void VoxelLightListGeneration(CommandBuffer cmd, HDCamera hdCamera, Matrix4x4[] projscrArr, Matrix4x4[] invProjscrArr, RenderTargetIdentifier cameraDepthBufferRT)
{
Camera camera = hdCamera.camera;
var isProjectionOblique = GeometryUtils.IsProjectionMatrixOblique(camera.projectionMatrix);
// clear atomic offset index
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_ClearVoxelAtomicKernel, 1, 1, 1);

cmd.SetComputeFloatParam(buildPerVoxelLightListShader, HDShaderIDs.g_fClustScale, m_ClustScale);
cmd.SetComputeFloatParam(buildPerVoxelLightListShader, HDShaderIDs.g_fClustBase, k_ClustLogBase);
cmd.SetComputeTextureParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_vLayeredLightList, s_PerVoxelLightLists);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_LayeredOffset, s_PerVoxelOffset);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
var genListPerVoxelKernel = isProjectionOblique ? s_GenListPerVoxelKernelOblique : s_GenListPerVoxelKernel;
cmd.SetComputeTextureParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_vLayeredLightList, s_PerVoxelLightLists);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_LayeredOffset, s_PerVoxelOffset);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_LayeredSingleIdxBuffer, s_GlobalLightListAtomic);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_logBaseBuffer, s_PerTileLogBaseTweak);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_logBaseBuffer, s_PerTileLogBaseTweak);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerVoxelLightListShader, genListPerVoxelKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
//cmd.DispatchCompute(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, numTilesX, numTilesY, 1);
cmd.DispatchCompute(buildPerVoxelLightListShader, s_GenListPerVoxelKernel, numTilesX, numTilesY, numEyes);
//cmd.DispatchCompute(buildPerVoxelLightListShader, genListPerVoxelKernel, numTilesX, numTilesY, 1);
cmd.DispatchCompute(buildPerVoxelLightListShader, genListPerVoxelKernel, numTilesX, numTilesY, numEyes);
}
public void BuildGPULightListsCommon(HDCamera hdCamera, CommandBuffer cmd, RenderTargetIdentifier cameraDepthBufferRT, RenderTargetIdentifier stencilTextureRT, bool skyEnabled)

projArr[eyeIndex] = CameraProjectionStereoLHS(hdCamera.camera, (Camera.StereoscopicEye)eyeIndex);
projscrArr[eyeIndex] = temp * projArr[eyeIndex];
invProjscrArr[eyeIndex] = projscrArr[eyeIndex].inverse;
projArr[0] = CameraProjectionNonObliqueLHS(hdCamera);
projArr[0] = GeometryUtils.GetProjectionMatrixLHS(hdCamera.camera);
var isProjectionOblique = GeometryUtils.IsProjectionMatrixOblique(projArr[0]);
// generate screen-space AABBs (used for both fptl and clustered).
if (m_lightCount != 0)

invProjhArr[0] = projhArr[0].inverse;
}
var genAABBKernel = isProjectionOblique ? s_GenAABBKernel_Oblique : s_GenAABBKernel;
cmd.SetComputeBufferParam(buildScreenAABBShader, s_GenAABBKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeBufferParam(buildScreenAABBShader, genAABBKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeBufferParam(buildScreenAABBShader, s_GenAABBKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.SetComputeBufferParam(buildScreenAABBShader, genAABBKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.DispatchCompute(buildScreenAABBShader, s_GenAABBKernel, (m_lightCount + 7) / 8, tgY, 1);
cmd.DispatchCompute(buildScreenAABBShader, genAABBKernel, (m_lightCount + 7) / 8, tgY, 1);
}
// enable coarse 2D pass on 64x64 tiles (used for both fptl and clustered).

// optimized for opaques only
if (m_FrameSettings.lightLoopSettings.isFptlEnabled)
{
var genListPerTileKernel = isProjectionOblique ? s_GenListPerTileKernel_Oblique : s_GenListPerTileKernel;
cmd.SetComputeIntParam(buildPerTileLightListShader, HDShaderIDs.g_isOrthographic, isOrthographic ? 1 : 0);
cmd.SetComputeIntParams(buildPerTileLightListShader, HDShaderIDs.g_viDimensions, s_TempScreenDimArray);
cmd.SetComputeIntParam(buildPerTileLightListShader, HDShaderIDs._EnvLightIndexShift, m_lightList.lights.Count);

cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_vBoundsBuffer, s_AABBBoundsBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs._LightVolumeData, s_LightVolumeDataBuffer);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_data, s_ConvexBoundsBuffer);
cmd.SetComputeTextureParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_vLightList, s_LightList);
cmd.SetComputeTextureParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_depth_tex, cameraDepthBufferRT);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_vLightList, s_LightList);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_vBigTileLightList, s_BigTileLightList);
if (enableFeatureVariants)
{

baseFeatureFlags |= LightDefinitions.s_MaterialFeatureMaskFlags;
}
cmd.SetComputeIntParam(buildPerTileLightListShader, HDShaderIDs.g_BaseFeatureFlags, (int)baseFeatureFlags);
cmd.SetComputeBufferParam(buildPerTileLightListShader, s_GenListPerTileKernel, HDShaderIDs.g_TileFeatureFlags, s_TileFeatureFlags);
cmd.SetComputeBufferParam(buildPerTileLightListShader, genListPerTileKernel, HDShaderIDs.g_TileFeatureFlags, s_TileFeatureFlags);
cmd.DispatchCompute(buildPerTileLightListShader, s_GenListPerTileKernel, numTilesX, numTilesY, 1);
cmd.DispatchCompute(buildPerTileLightListShader, genListPerTileKernel, numTilesX, numTilesY, 1);
}
// Cluster

public bool outputSplitLighting;
}
public void RenderDeferredDirectionalShadow(HDCamera hdCamera, RTHandleSystem.RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
public void RenderScreenSpaceShadows(HDCamera hdCamera, RTHandleSystem.RTHandle deferredShadowRT, RenderTargetIdentifier depthTexture, CommandBuffer cmd)
{
bool sunLightShadow = m_CurrentSunLight != null && m_CurrentSunLight.GetComponent<AdditionalShadowData>() != null && m_CurrentSunLightShadowIndex >= 0;

return;
}
using (new ProfilingSample(cmd, "Deferred Directional Shadow", CustomSamplerId.TPDeferredDirectionalShadow.GetSampler()))
using (new ProfilingSample(cmd, "Deferred Directional Shadow", CustomSamplerId.TPScreenSpaceShadows.GetSampler()))
{
Vector4 lightDirection = Vector4.zero;
Vector4 lightPosition = Vector4.zero;

// - if there is a sun light casting shadow, we need to use comput directional light shadows
// - if there is a sun light casting shadow, we need to use compute directional light shadows
// and contact shadows of the dominant light (or the directional if contact shadows are enabled on it)
// - if there is no sun or it's not casting shadows, we don't need to compute it's costy directional
// shadows so we only compute contact shadows for the dominant light

20
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild-bigtile.compute


#define EXACT_EDGE_TESTS
#define PERFORM_SPHERICAL_INTERSECTION_TESTS
// is not actually used for anything in this kernel
#define USE_OBLIQUE_MODE
#define MAX_NR_BIGTILE_LIGHTS (MAX_NR_BIG_TILE_LIGHTS_PLUS_ONE-1)

uniform float g_fFarPlane;
uniform uint g_isOrthographic;
StructuredBuffer<float3> g_vBoundsBuffer : register( t1 );
StructuredBuffer<float4> g_vBoundsBuffer : register( t1 );
StructuredBuffer<LightVolumeData> _LightVolumeData : register(t2);
StructuredBuffer<SFiniteLightBound> g_data : register( t3 );

// TODO: Remove this function and g_mInvScrProjectionArr from constants.
// Only usage of that constant.
float GetLinearDepth(float zDptBufSpace) // 0 is near 1 is far
float GetLinearDepth(float2 pixXY, float zDptBufSpace) // 0 is near 1 is far
float4x4 g_mInvScrProjection = g_mInvScrProjectionArr[0];
float4x4 g_mInvScrProjection = g_mInvScrProjectionArr[0];
// for perspective projection m22 is zero and m23 is +1/-1 (depends on left/right hand proj)
#ifdef USE_OBLIQUE_MODE
float2 res2 = mul(g_mInvScrProjection, float4(pixXY, zDptBufSpace, 1.0)).zw;
return res2.x / res2.y;
#else
// for perspective projection m22 is zero and m23 is +1/-1 (depends on left/right hand proj)
//float3 vP = float3(0.0f,0.0f,zDptBufSpace);
//float4 v4Pres = mul(g_mInvScrProjection, float4(vP,1.0));
//return v4Pres.z / v4Pres.w;
#endif
}
float3 GetViewPosFromLinDepth(float2 v2ScrPos, float fLinDepth, uint eyeIndex)

68
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild-clustered.compute


#pragma kernel TileLightListGen_NoDepthRT_SrcBigTile LIGHTLISTGEN=TileLightListGen_NoDepthRT_SrcBigTile USE_TWO_PASS_TILED_LIGHTING
#pragma kernel TileLightListGen_DepthRT_SrcBigTile LIGHTLISTGEN=TileLightListGen_DepthRT_SrcBigTile ENABLE_DEPTH_TEXTURE_BACKPLANE USE_TWO_PASS_TILED_LIGHTING
#pragma kernel TileLightListGen_DepthRT_MSAA_SrcBigTile LIGHTLISTGEN=TileLightListGen_DepthRT_MSAA_SrcBigTile ENABLE_DEPTH_TEXTURE_BACKPLANE MSAA_ENABLED USE_TWO_PASS_TILED_LIGHTING
#pragma kernel TileLightListGen_DepthRT_Oblique LIGHTLISTGEN=TileLightListGen_DepthRT_Oblique ENABLE_DEPTH_TEXTURE_BACKPLANE USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_DepthRT_MSAA_Oblique LIGHTLISTGEN=TileLightListGen_DepthRT_MSAA_Oblique ENABLE_DEPTH_TEXTURE_BACKPLANE MSAA_ENABLED USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_DepthRT_SrcBigTile_Oblique LIGHTLISTGEN=TileLightListGen_DepthRT_SrcBigTile_Oblique ENABLE_DEPTH_TEXTURE_BACKPLANE USE_TWO_PASS_TILED_LIGHTING USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_DepthRT_MSAA_SrcBigTile_Oblique LIGHTLISTGEN=TileLightListGen_DepthRT_MSAA_SrcBigTile_Oblique ENABLE_DEPTH_TEXTURE_BACKPLANE MSAA_ENABLED USE_TWO_PASS_TILED_LIGHTING USE_OBLIQUE_MODE
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "ShaderBase.hlsl"

#else
Texture2D g_depth_tex : register( t0 );
#endif
StructuredBuffer<float3> g_vBoundsBuffer : register( t1 );
StructuredBuffer<float4> g_vBoundsBuffer : register( t1 );
StructuredBuffer<LightVolumeData> _LightVolumeData : register(t2);
StructuredBuffer<SFiniteLightBound> g_data : register( t3 );

groupshared uint lightOffsSph;
#endif
float GetLinearDepth(float zDptBufSpace, uint eyeIndex) // 0 is near 1 is far
float GetLinearDepth(float2 pixXY, float zDptBufSpace, uint eyeIndex) // 0 is near 1 is far
#ifdef USE_OBLIQUE_MODE
float2 res2 = mul(g_mInvScrProjection, float4(pixXY, zDptBufSpace, 1.0)).zw;
return res2.x / res2.y;
#else
// for perspective projection m22 is zero and m23 is +1/-1 (depends on left/right hand proj)
// however this function must also work for orthographic projection so we keep it like this.
float m22 = g_mInvScrProjection[2].z, m23 = g_mInvScrProjection[2].w;

//float3 vP = float3(0.0f,0.0f,zDptBufSpace);
//float4 v4Pres = mul(g_mInvScrProjection, float4(vP,1.0));
//return v4Pres.z / v4Pres.w;
#endif
}
float3 GetViewPosFromLinDepth(float2 v2ScrPos, float fLinDepth, uint eyeIndex)

bool CheckIntersection(int l, int k, uint2 viTilLL, uint2 viTilUR, float suggestedBase, uint eyeIndex)
{
// If this light's screen space depth bounds intersect this cluster...simple cluster test
// TODO: Unify this code with the code in CheckIntersectionBasic...
unsigned int val = (clusterIdxs[l>>1]>>(16*(l&1)))&0xffff;
bool bIsHit = ((val>>0)&0xff)<=((uint) k) && ((uint) k)<=((val>>8)&0xff);
if(bIsHit)

GroupMemoryBarrierWithGroupSync();
#endif
float dpt_ma=1.0;
//float linMaDist=g_fFarPlane;
// establish min and max depth first
dpt_ma=0.0;
// establish max depth first
float linMaDist=0.0;
for(int idx=t; idx<(TILE_SIZE_CLUSTERED*TILE_SIZE_CLUSTERED); idx+=NR_THREADS)
{

for(int i=0; i<g_iNumSamplesMSAA; i++)
{
const float fDpth = FetchDepthMSAA(g_depth_tex, uPixCrd, i);
const float2 fracSampleCoord = g_depth_tex.GetSamplePosition(i).xy; // this is optimized away when USE_OBLIQUE_MODE is NOT set.
const float2 fracSampleCoord = float2(0.5,0.5);
dpt_ma = max(fDpth, dpt_ma);
// unclear here if stereoDWOffset is taken into account in g_mInvScrProjectionArr[eyeIndex] used in GetLinearDepth()
// otherwise it should not be included in uPixCrd when querying GetLinearDepth().
float linZ = GetLinearDepth(uPixCrd+fracSampleCoord, fDpth, eyeIndex);
#if USE_LEFT_HAND_CAMERA_SPACE
float linDistZ = linZ;
#else
float linDistZ = -linZ;
#endif
linMaDist = max(linDistZ, linMaDist);
}
#ifdef MSAA_ENABLED
}

// Why is this a uint? Doesn't InterlockedMax support shared mem floats?
InterlockedMax(ldsZMax, asuint(dpt_ma) );
linMaDist = max(linMaDist, 0.0);
InterlockedMax(ldsZMax, asuint(linMaDist) );
dpt_ma = asfloat(ldsZMax);
if(dpt_ma<=0.0) dpt_ma = VIEWPORT_SCALE_Z; // assume sky pixel
linMaDist = asfloat(ldsZMax);
if(linMaDist<=0.0) linMaDist = g_fFarPlane; // assume sky pixel
#endif
// 'Normalized' coordinates of tile, for use with AABB bounds in g_vBoundsBuffer

for(int l=(int) t; l<(int) g_iNrVisibLights; l += NR_THREADS)
{
#endif
// TODO: Seems kinda funny that we repeat this exact code here, bigtile, and FPTL...
const ScreenSpaceBoundsIndices boundsIndices = GenerateScreenSpaceBoundsIndices(l, g_iNrVisibLights, eyeIndex);
const float2 vMi = g_vBoundsBuffer[boundsIndices.min].xy;
const float2 vMa = g_vBoundsBuffer[boundsIndices.max].xy;

#endif
#ifdef ENABLE_DEPTH_TEXTURE_BACKPLANE
#if USE_LEFT_HAND_CAMERA_SPACE
float fTileFarPlane = GetLinearDepth(dpt_ma, eyeIndex);
#else // USE_LEFT_HAND_CAMERA_SPACE
float fTileFarPlane = -GetLinearDepth(dpt_ma, eyeIndex);
#endif
float fTileFarPlane = linMaDist;
float suggestedBase = SuggestLogBase50(fTileFarPlane);
#else // ENABLE_DEPTH_TEXTURE_BACKPLANE
float fTileFarPlane = g_fFarPlane;

#endif
// sort lights (gives a more efficient execution in both deferred and tiled forward lighting).
// NOTE: Why not sort on console?
#if !defined(SHADER_API_XBOXONE) && !defined(SHADER_API_PSSL)
SORTLIST(coarseList, iNrCoarseLights, MAX_NR_COARSE_ENTRIES, t, NR_THREADS);
#endif

const ScreenSpaceBoundsIndices l0Bounds = GenerateScreenSpaceBoundsIndices(l0, g_iNrVisibLights, eyeIndex);
const ScreenSpaceBoundsIndices l1Bounds = GenerateScreenSpaceBoundsIndices(l1, g_iNrVisibLights, eyeIndex);
const unsigned int clustIdxMi0 = (const unsigned int)min(255, SnapToClusterIdx(GetLinearDepth(g_vBoundsBuffer[l0Bounds.min].z, eyeIndex), suggestedBase));
const unsigned int clustIdxMa0 = (const unsigned int)min(255, SnapToClusterIdx(GetLinearDepth(g_vBoundsBuffer[l0Bounds.max].z, eyeIndex), suggestedBase));
const unsigned int clustIdxMi1 = (const unsigned int)min(255, SnapToClusterIdx(GetLinearDepth(g_vBoundsBuffer[l1Bounds.min].z, eyeIndex), suggestedBase));
const unsigned int clustIdxMa1 = (const unsigned int)min(255, SnapToClusterIdx(GetLinearDepth(g_vBoundsBuffer[l1Bounds.max].z, eyeIndex), suggestedBase));
const unsigned int clustIdxMi0 = (const unsigned int)min(255, SnapToClusterIdx(g_vBoundsBuffer[l0Bounds.min].w, suggestedBase));
const unsigned int clustIdxMa0 = (const unsigned int)min(255, SnapToClusterIdx(g_vBoundsBuffer[l0Bounds.max].w, suggestedBase));
const unsigned int clustIdxMi1 = (const unsigned int)min(255, SnapToClusterIdx(g_vBoundsBuffer[l1Bounds.min].w, suggestedBase));
const unsigned int clustIdxMa1 = (const unsigned int)min(255, SnapToClusterIdx(g_vBoundsBuffer[l1Bounds.max].w, suggestedBase));
clusterIdxs[l] = (clustIdxMa1<<24) | (clustIdxMi1<<16) | (clustIdxMa0<<8) | (clustIdxMi0<<0);
}
}

// to make it work correctly
int shiftIndex[LIGHTCATEGORY_COUNT];
ZERO_INITIALIZE_ARRAY(int, shiftIndex, LIGHTCATEGORY_COUNT);
// NOTE: Why is this indexed like this?
shiftIndex[LIGHTCATEGORY_COUNT - 3] = _EnvLightIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 2] = _DecalIndexShift;
shiftIndex[LIGHTCATEGORY_COUNT - 1] = _DensityVolumeIndexShift;

38
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/lightlistbuild.compute


// https://github.com/wolfgangfengel/GPU-Pro-7
#pragma kernel TileLightListGen LIGHTLISTGEN=TileLightListGen
#pragma kernel TileLightListGen_SrcBigTile LIGHTLISTGEN=TileLightListGen_SrcBigTile USE_TWO_PASS_TILED_LIGHTING
#pragma kernel TileLightListGen_FeatureFlags LIGHTLISTGEN=TileLightListGen_FeatureFlags USE_FEATURE_FLAGS
#pragma kernel TileLightListGen_SrcBigTile_FeatureFlags LIGHTLISTGEN=TileLightListGen_SrcBigTile_FeatureFlags USE_TWO_PASS_TILED_LIGHTING USE_FEATURE_FLAGS
#pragma kernel TileLightListGen_SrcBigTile LIGHTLISTGEN=TileLightListGen_SrcBigTile USE_TWO_PASS_TILED_LIGHTING
#pragma kernel TileLightListGen_FeatureFlags LIGHTLISTGEN=TileLightListGen_FeatureFlags USE_FEATURE_FLAGS
#pragma kernel TileLightListGen_SrcBigTile_FeatureFlags LIGHTLISTGEN=TileLightListGen_SrcBigTile_FeatureFlags USE_TWO_PASS_TILED_LIGHTING USE_FEATURE_FLAGS
#pragma kernel TileLightListGen_Oblique LIGHTLISTGEN=TileLightListGen_Oblique USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_SrcBigTile_Oblique LIGHTLISTGEN=TileLightListGen_SrcBigTile_Oblique USE_TWO_PASS_TILED_LIGHTING USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_FeatureFlags_Oblique LIGHTLISTGEN=TileLightListGen_FeatureFlags_Oblique USE_FEATURE_FLAGS USE_OBLIQUE_MODE
#pragma kernel TileLightListGen_SrcBigTile_FeatureFlags_Oblique LIGHTLISTGEN=TileLightListGen_SrcBigTile_FeatureFlags_Oblique USE_TWO_PASS_TILED_LIGHTING USE_FEATURE_FLAGS USE_OBLIQUE_MODE
//#pragma #pragma enable_d3d11_debug_symbols

#define FINE_PRUNING_ENABLED
#define PERFORM_SPHERICAL_INTERSECTION_TESTS
uniform int g_iNrVisibLights;
uniform uint2 g_viDimensions;
uniform float4x4 g_mInvScrProjection;

uniform uint g_BaseFeatureFlags;
Texture2D g_depth_tex : register( t0 );
StructuredBuffer<float3> g_vBoundsBuffer : register( t1 );
StructuredBuffer<float4> g_vBoundsBuffer : register( t1 );
StructuredBuffer<LightVolumeData> _LightVolumeData : register(t2);
StructuredBuffer<SFiniteLightBound> g_data : register( t3 );

RWStructuredBuffer<uint> g_TileFeatureFlags;
#endif
//float GetLinearDepth(float3 vP)
//{
// float4 v4Pres = mul(g_mInvScrProjection, float4(vP,1.0));
// return v4Pres.z / v4Pres.w;
//}
float GetLinearDepth(float zDptBufSpace) // 0 is near 1 is far
float GetLinearDepth(float2 pixXY, float zDptBufSpace) // 0 is near 1 is far
// for perspective projection m22 is zero and m23 is +1/-1 (depends on left/right hand proj)
#ifdef USE_OBLIQUE_MODE
float2 res2 = mul(g_mInvScrProjection, float4(pixXY, zDptBufSpace, 1.0)).zw;
return res2.x / res2.y;
#else
// for perspective projection m22 is zero and m23 is +1/-1 (depends on left/right hand proj)
//float3 vP = float3(0.0f,0.0f,zDptBufSpace);
//float4 v4Pres = mul(g_mInvScrProjection, float4(vP,1.0));
//return v4Pres.z / v4Pres.w;
#endif
}
float3 GetViewPosFromLinDepth(float2 v2ScrPos, float fLinDepth)

int idx = i * NR_THREADS + t;
uint2 uCrd = min( uint2(viTilLL.x+(idx&0xf), viTilLL.y+(idx>>4)), uint2(iWidth-1, iHeight-1) );
const float fDepth = FetchDepth(g_depth_tex, uCrd);
vLinDepths[i] = GetLinearDepth(fDepth);
vLinDepths[i] = GetLinearDepth(uCrd+float2(0.5,0.5), fDepth);
if(fDepth<VIEWPORT_SCALE_Z) // if not skydome
{
dpt_mi = min(fDepth, dpt_mi);

// Skip density volumes (lights are sorted by category). TODO: improve data locality
if (_LightVolumeData[l].lightCategory == LIGHTCATEGORY_DENSITY_VOLUME) { break; }
const float3 vMi = g_vBoundsBuffer[l];
const float3 vMa = g_vBoundsBuffer[l+g_iNrVisibLights];
const float3 vMi = g_vBoundsBuffer[l].xyz;
const float3 vMa = g_vBoundsBuffer[l+g_iNrVisibLights].xyz;
if( all(vMa>vTileLL) && all(vMi<vTileUR))
{

67
com.unity.render-pipelines.high-definition/HDRP/Lighting/LightLoop/scrbound.compute


// The implementation is based on the demo on "fine pruned tiled lighting" published in GPU Pro 7.
// https://github.com/wolfgangfengel/GPU-Pro-7
#pragma kernel ScreenBoundsAABB
#pragma kernel ScreenBoundsAABB SCRAABBGEN=ScreenBoundsAABB
#pragma kernel ScreenBoundsAABB_Oblique SCRAABBGEN=ScreenBoundsAABB_Oblique USE_OBLIQUE_MODE
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "LightLoop.cs.hlsl"

#define NR_THREADS 64
// output buffer
RWStructuredBuffer<float3> g_vBoundsBuffer : register( u0 );
RWStructuredBuffer<float4> g_vBoundsBuffer : register( u0 );
#define MAX_PNTS 9 // strictly this should be 10=6+4 but we get more wavefronts and 10 seems to never hit (fingers crossed)
// However, worst case the plane that would be skipped if such an extreme case ever happened would be backplane

[numthreads(NR_THREADS, 1, 1)]
void ScreenBoundsAABB(uint threadID : SV_GroupIndex, uint3 u3GroupID : SV_GroupID)
void SCRAABBGEN(uint threadID : SV_GroupIndex, uint3 u3GroupID : SV_GroupID)
{
uint groupID = u3GroupID.x;
uint eyeIndex = u3GroupID.y; // currently, can only be 0 or 1

}
////////////////////// look for camera frustum verts that need to be included. That is frustum vertices inside the convex hull for the light
#ifdef USE_OBLIQUE_MODE
bool bIsObliqueClipPlane = true;
#else
bool bIsObliqueClipPlane = false;
#endif
const int nrFrustVertsToTest = bIsObliqueClipPlane ? 4 : 8;
for(i=0; i<8; i++) // establish 8 camera frustum vertices
for(i=0; i<nrFrustVertsToTest; i++) // establish 8 camera frustum vertices
{
float3 vVertPSpace = float3((i&1)!=0 ? 1 : (-1), (i&2)!=0 ? 1 : (-1), (i&4)!=0 ? 1 : 0);

float3 vP0, vN;
GetHullPlane(vP0, vN, boxX, boxY, boxZ, center, scaleXY, f);
for(i=0; i<8; i++)
for(i=0; i<nrFrustVertsToTest; i++)
{
float3 vViewSpace = float3(posX[subLigt*MAX_PNTS*2 + i], posY[subLigt*MAX_PNTS*2 + i], posZ[subLigt*MAX_PNTS*2 + i]);
uVisibFl &= ( dot(vViewSpace-vP0, vN)<0 ? 0xff : (~(1<<i)) );

// apply camera frustum vertices inside the convex hull to the AABB
for(i=0; i<8; i++)
for(i=0; i<nrFrustVertsToTest; i++)
{
if((uVisibFl&(1<<i))!=0)
{

vMin.xy = max(vMin.xy, vMi);
vMax.xy = min(vMax.xy, vMa);
}
#ifndef USE_OBLIQUE_MODE
#if USE_LEFT_HAND_CAMERA_SPACE
if((center.z-radius)>0.0)
{

vMin = float3(-3,-3,-3);
vMax = float3(-2,-2,-2);
}
#endif
}

// a set of maxs, and each set is equal to g_iNrVisibLights.
const ScreenSpaceBoundsIndices boundsIndices = GenerateScreenSpaceBoundsIndices(lgtIndex, g_iNrVisibLights, eyeIndex);
g_vBoundsBuffer[boundsIndices.min] = float3(0.5*vMin.x + 0.5, 0.5*vMin.y + 0.5, vMin.z*VIEWPORT_SCALE_Z);
g_vBoundsBuffer[boundsIndices.max] = float3(0.5*vMax.x + 0.5, 0.5*vMax.y + 0.5, vMax.z*VIEWPORT_SCALE_Z);
// build a linear (in camera space) min/max Z for the aabb. This is needed for clustered when oblique is active
float linMiZ, linMaZ;
#ifndef USE_OBLIQUE_MODE
float2 vMiZW = mul(g_mInvProjection, float4(vMin,1)).zw;
float2 vMaZW = mul(g_mInvProjection, float4(vMax,1)).zw;
linMiZ = vMiZW.x/vMiZW.y; linMaZ = vMaZW.x/vMaZW.y;
#else
for(int i=0; i<8; i++) // establish 8 aabb points in camera space.
{
float3 vP = float3((i&1)!=0 ? vMax.x : vMin.x, (i&2)!=0 ? vMax.y : vMin.y, (i&4)!=0 ? vMax.z : vMin.z);
float2 v2Pc = mul(g_mInvProjection, float4(vP,1)).zw;
float linZ = v2Pc.x/v2Pc.y;
if(i==0) { linMiZ=linZ; linMaZ=linZ; }
#if USE_LEFT_HAND_CAMERA_SPACE
linMiZ = min(linMiZ, linZ); linMaZ = max(linMaZ, linZ);
#else
linMiZ = max(linMiZ, linZ); linMaZ = min(linMaZ, linZ);
#endif
}
float z0 = center.z-radius, z1 = center.z+radius;
#if USE_LEFT_HAND_CAMERA_SPACE
linMiZ = max(linMiZ, z0); linMaZ = min(linMaZ, z1);
#else
linMiZ = min(linMiZ, z1); linMaZ = max(linMaZ, z0);
#endif
#endif
g_vBoundsBuffer[boundsIndices.min] = float4(0.5*vMin.x + 0.5, 0.5*vMin.y + 0.5, vMin.z*VIEWPORT_SCALE_Z, linMiZ);
g_vBoundsBuffer[boundsIndices.max] = float4(0.5*vMax.x + 0.5, 0.5*vMax.y + 0.5, vMax.z*VIEWPORT_SCALE_Z, linMaZ);
}
}
}

unsigned int GetClip(const float4 P)
{
#ifdef USE_OBLIQUE_MODE
bool bIsObliqueClipPlane = true;
#else
bool bIsObliqueClipPlane = false;
#endif
return ((P.x<-P.w)?1:0) | ((P.x>P.w)?2:0) | ((P.y<-P.w)?4:0) | ((P.y>P.w)?8:0) | ((P.z<0)?16:0) | ((P.z>P.w)?32:0);
return (((P.x<-P.w)?1:0) | ((P.x>P.w)?2:0) | ((P.y<-P.w)?4:0) | ((P.y>P.w)?8:0) | ((P.z<0)?16:0) | ((P.z>P.w)?32:0)) & (bIsObliqueClipPlane ? 0x1f : 0x3f);
}
float4 GenNewVert(const float4 vVisib, const float4 vInvisib, const int p)

18
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDAdditionalReflectionData.cs


int m_Version;
ReflectionProbe m_LegacyProbe;
ReflectionProbe legacyProbe { get { return m_LegacyProbe ?? (m_LegacyProbe = GetComponent<ReflectionProbe>()); } }
ReflectionProbe legacyProbe
{
get
{
if (m_LegacyProbe == null || m_LegacyProbe.Equals(null))
{
m_LegacyProbe = GetComponent<ReflectionProbe>();
return m_LegacyProbe;
}
else
{
return m_LegacyProbe;
}
}
}
#pragma warning disable 649 //never assigned
//data only kept for migration, to be removed in future version

internal override void UpdatedInfluenceVolumeShape(Vector3 size, Vector3 offset)
{
legacyProbe.size = size;
legacyProbe.center = offset;
legacyProbe.center = transform.rotation*offset;
}
}
}

4
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/HDProbe.cs


public InfluenceVolume influenceVolume { get { return m_InfluenceVolume; } private set { m_InfluenceVolume = value; } }
/// <summary>Multiplier factor of reflection (non PBR parameter).</summary>
public float multiplier { get { return m_Multiplier; } protected set { m_Multiplier = value; } }
public float multiplier { get { return m_Multiplier; } set { m_Multiplier = value; } }
public float weight { get { return m_Weight; } protected set { m_Weight = value; } }
public float weight { get { return m_Weight; } set { m_Weight = value; } }
/// <summary>The capture mode.</summary>
public virtual ReflectionProbeMode mode

2
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/PlanarReflectionProbe.cs


get
{
return proxyVolume != null
? proxyVolume.transform.localToWorldMatrix
? Matrix4x4.TRS(proxyVolume.transform.position, proxyVolume.transform.rotation, Vector3.one)
: influenceToWorld;
}
}

2
com.unity.render-pipelines.high-definition/HDRP/Lighting/Reflection/ProbeWrapper.cs


get
{
return additional.proxyVolume != null
? additional.proxyVolume.transform.localToWorldMatrix
? Matrix4x4.TRS(additional.proxyVolume.transform.position, additional.proxyVolume.transform.rotation, Vector3.one)
: influenceToWorld;
}
}

4
com.unity.render-pipelines.high-definition/HDRP/Lighting/ScreenSpaceShadow.compute


contactShadow = ComputeContactShadow(posInput, direction);
#endif
_DeferredShadowTextureUAV[pixelCoord] = float4(shadow, contactShadow, 1.0, 1.0);
_DeferredShadowTextureUAV[pixelCoord] = float4(shadow, contactShadow, 1.0, 1.0); // Note: RT is RG16 format
}
[numthreads(DEFERRED_SHADOW_TILE_SIZE, DEFERRED_SHADOW_TILE_SIZE, 1)]

contactShadow = ComputeContactShadow(posInput, direction);
_DeferredShadowTextureUAV[pixelCoord] = float4(1.0, contactShadow, 1.0, 1.0);
_DeferredShadowTextureUAV[pixelCoord] = float4(1.0, contactShadow, 1.0, 1.0); // Note: RT is RG16 format
}

3
com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/DensityVolume.cs


{
float animationTime = animate ? time : 0.0f;
volumeScrollingAmount = (textureScrollingSpeed * animationTime);
// Switch from right-handed to left-handed coordinate system.
volumeScrollingAmount.x = -volumeScrollingAmount.x;
volumeScrollingAmount.y = -volumeScrollingAmount.y;
}
}

2
com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs


}
}
private void TriggerVolumeAtlasRefresh()
public void TriggerVolumeAtlasRefresh()
{
atlasNeedsRefresh = true;
}

1
com.unity.render-pipelines.high-definition/HDRP/Lighting/Volumetrics/VBuffer.hlsl


BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
// Apply the viewport scale right at the end.
// TODO: perform per-sample (4, in this case) bilateral filtering, rather than per-pixel. This should reduce leaking.
// TODO: precompute (VBufferResolution.zw * VBufferUvScale).
result = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3(min((ic + float2(offsets[0].x, offsets[0].y)) * (VBufferResolution.zw * VBufferUvScale), VBufferUvLimit), w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3(min((ic + float2(offsets[1].x, offsets[0].y)) * (VBufferResolution.zw * VBufferUvScale), VBufferUvLimit), w), 0) // Top right

6
com.unity.render-pipelines.high-definition/HDRP/Material/LayeredLit/LayeredLitData.hlsl


// If user provide bent normal then we process a better term
#if (defined(_BENTNORMALMAP0) || defined(_BENTNORMALMAP1) || defined(_BENTNORMALMAP2) || defined(_BENTNORMALMAP3)) && defined(_ENABLESPECULAROCCLUSION)
// If we have bent normal and ambient occlusion, process a specular occlusion
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAO(V, bentNormalWS, surfaceData);
#ifdef SPECULAR_OCCLUSION_USE_SPTD
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAOPivot(V, bentNormalWS, surfaceData.normalWS, surfaceData.ambientOcclusion, PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothness));
#else
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAO(V, bentNormalWS, surfaceData.normalWS, surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
#endif
#elif defined(_MASKMAP0) || defined(_MASKMAP1) || defined(_MASKMAP2) || defined(_MASKMAP3)
surfaceData.specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(dot(surfaceData.normalWS, V), surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
#else

8
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.hlsl


preLightData.ltcTransformSpecular._m00_m02_m11_m20 = SAMPLE_TEXTURE2D_ARRAY_LOD(_LtcData, s_linear_clamp_sampler, uv, LTC_GGX_MATRIX_INDEX, 0);
// Construct a right-handed view-dependent orthogonal basis around the normal
preLightData.orthoBasisViewNormal[0] = normalize(V - N * preLightData.NdotV); // Do not clamp NdotV here
preLightData.orthoBasisViewNormal[2] = N;
preLightData.orthoBasisViewNormal[1] = cross(preLightData.orthoBasisViewNormal[2], preLightData.orthoBasisViewNormal[0]);
preLightData.orthoBasisViewNormal = GetOrthoBasisViewNormal(V, N, preLightData.NdotV);
preLightData.ltcTransformCoat = 0.0;
if (HasFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))

// If refraction is enable we use the transmittanceMask to lerp between current diffuse lighting and refraction value
// Physically speaking, transmittanceMask should be 1, but for artistic reasons, we let the value vary
//
// Note we also transfer the refracted light (lighting.indirect.specularTransmitted) into diffuseLighting
// since we know it won't be further processed: it is called at the end of the LightLoop(), but doing this
// enables opacity to affect it (in ApplyBlendMode()) while the rest of specularLighting escapes it.
#if HAS_REFRACTION
diffuseLighting = lerp(diffuseLighting, lighting.indirect.specularTransmitted, bsdfData.transmittanceMask);
#endif

1
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/Lit.shader


#pragma target 4.5
#pragma only_renderers d3d11 ps4 xboxone vulkan metal switch
#pragma enable_d3d11_debug_symbols
//-------------------------------------------------------------------------------------
// Variant
//-------------------------------------------------------------------------------------

2
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitBuiltinData.hlsl


#include "HDRP/Material/BuiltinUtilities.hlsl"
void GetBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, SurfaceData surfaceData, float alpha, float3 bentNormalWS, float depthOffset, out BuiltinData builtinData)
{
// For back lighting we use the oposite vertex normal

20
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitData.hlsl


#include "HDRP/Material/MaterialUtilities.hlsl"
#include "HDRP/Material/Decal/DecalUtilities.hlsl"
// TODO: move this function to commonLighting.hlsl once validated it work correctly
float GetSpecularOcclusionFromBentAO(float3 V, float3 bentNormalWS, SurfaceData surfaceData)
{
// Retrieve cone angle
// Ambient occlusion is cosine weighted, thus use following equation. See slide 129
float cosAv = sqrt(1.0 - surfaceData.ambientOcclusion);
float roughness = max(PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness), 0.01); // Clamp to 0.01 to avoid edge cases
float cosAs = exp2((-log(10.0)/log(2.0)) * Sq(roughness));
float cosB = dot(bentNormalWS, reflect(-V, surfaceData.normalWS));
return SphericalCapIntersectionSolidArea(cosAv, cosAs, cosB) / (TWO_PI * (1.0 - cosAs));
}
//#include "HDRP/Material/SphericalCapPivot/SPTDistribution.hlsl"
//#define SPECULAR_OCCLUSION_USE_SPTD
// Struct that gather UVMapping info of all layers + common calculation
// This is use to abstract the mapping that can differ on layers

// If user provide bent normal then we process a better term
#if defined(_BENTNORMALMAP) && defined(_ENABLESPECULAROCCLUSION)
// If we have bent normal and ambient occlusion, process a specular occlusion
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAO(V, bentNormalWS, surfaceData);
#ifdef SPECULAR_OCCLUSION_USE_SPTD
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAOPivot(V, bentNormalWS, surfaceData.normalWS, surfaceData.ambientOcclusion, PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothness));
#else
surfaceData.specularOcclusion = GetSpecularOcclusionFromBentAO(V, bentNormalWS, surfaceData.normalWS, surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
#endif
#elif defined(_MASKMAP)
surfaceData.specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(dot(surfaceData.normalWS, V)), surfaceData.ambientOcclusion, PerceptualSmoothnessToRoughness(surfaceData.perceptualSmoothness));
#else

16
com.unity.render-pipelines.high-definition/HDRP/Material/Lit/LitDataIndividualLayer.hlsl


#ifdef SURFACE_GRADIENT
normalTS += detailNormalTS * detailMask;
#else
normalTS = lerp(normalTS, BlendNormalRNM(normalTS, detailNormalTS), detailMask);
normalTS = lerp(normalTS, BlendNormalRNM(normalTS, detailNormalTS), detailMask); // todo: detailMask should lerp the angle of the quaternion rotation, not the normals
#endif
#endif
#else

surfaceData.baseColor = SAMPLE_UVMAPPING_TEXTURE2D(ADD_IDX(_BaseColorMap), ADD_ZERO_IDX(sampler_BaseColorMap), ADD_IDX(layerTexCoord.base)).rgb * ADD_IDX(_BaseColor).rgb;
#ifdef _DETAIL_MAP_IDX
// Goal: we want the detail albedo map to be able to darken down to black and brighten up to white the surface albedo.
// The scale control the speed of the gradient. We simply remap detailAlbedo from [0..1] to [-1..1] then perform a lerp to black or white
// with a factor based on speed.
// Goal: we want the detail albedo map to be able to darken down to black and brighten up to white the surface albedo.
// The scale control the speed of the gradient. We simply remap detailAlbedo from [0..1] to [-1..1] then perform a lerp to black or white
// with a factor based on speed.
float albedoDetailSpeed = saturate(abs(detailAlbedo) * ADD_IDX(_DetailAlbedoScale));
float3 baseColorOverlay = lerp(sqrt(surfaceData.baseColor), (detailAlbedo < 0.0) ? float3(0.0, 0.0, 0.0) : float3(1.0, 1.0, 1.0), albedoDetailSpeed * albedoDetailSpeed);
baseColorOverlay *= baseColorOverlay;
float albedoDetailSpeed = saturate(abs(detailAlbedo) * ADD_IDX(_DetailAlbedoScale));
float3 baseColorOverlay = lerp(sqrt(surfaceData.baseColor), (detailAlbedo < 0.0) ? float3(0.0, 0.0, 0.0) : float3(1.0, 1.0, 1.0), albedoDetailSpeed * albedoDetailSpeed);
baseColorOverlay *= baseColorOverlay;
// Lerp with details mask
surfaceData.baseColor = lerp(surfaceData.baseColor, saturate(baseColorOverlay), detailMask);
#endif

#ifdef _DETAIL_MAP_IDX
// See comment for baseColorOverlay
float smoothnessDetailSpeed = saturate(abs(detailSmoothness) * ADD_IDX(_DetailSmoothnessScale));
float smoothnessOverlay = lerp(surfaceData.perceptualSmoothness, (detailSmoothness < 0.0) ? 0.0 : 1.0, smoothnessDetailSpeed);
float smoothnessOverlay = lerp(surfaceData.perceptualSmoothness, (detailSmoothness < 0.0) ? 0.0 : 1.0, smoothnessDetailSpeed);
// Lerp with details mask
surfaceData.perceptualSmoothness = lerp(surfaceData.perceptualSmoothness, saturate(smoothnessOverlay), detailMask);
#endif

40
com.unity.render-pipelines.high-definition/HDRP/Material/MaterialEvaluation.hlsl


float3 indirectSpecularOcclusion;
};
void GetScreenSpaceAmbientOcclusion(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, out AmbientOcclusionFactor aoFactor)
// Get screen space ambient occlusion only:
float GetScreenSpaceDiffuseOcclusion(float2 positionSS)
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
float directAmbientOcclusion = 1.0;
return indirectAmbientOcclusion;
}
void GetScreenSpaceAmbientOcclusion(float2 positionSS, float NdotV, float perceptualRoughness, float ambientOcclusionFromData, float specularOcclusionFromData, out AmbientOcclusionFactor aoFactor)
{
float indirectAmbientOcclusion = GetScreenSpaceDiffuseOcclusion(positionSS);
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);

aoFactor.directAmbientOcclusion = lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
}
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the diffuseColor)
// Use GTAOMultiBounce approximation for ambient occlusion (allow to get a tint from the diffuseColor)
// Note: When we ImageLoad outside of texture size, the value returned by Load is 0 (Note: On Metal maybe it clamp to value of texture which is also fine)
// We use this property to have a neutral value for AO that doesn't consume a sampler and work also with compute shader (i.e use ImageLoad)
// We store inverse AO so neutral is black. So either we sample inside or outside the texture it return 0 in case of neutral
// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float indirectAmbientOcclusion = GetScreenSpaceDiffuseOcclusion(positionSS);
#else
float indirectAmbientOcclusion = 1.0;
float directAmbientOcclusion = 1.0;
#endif
float roughness = PerceptualRoughnessToRoughness(perceptualRoughness);
float specularOcclusion = GetSpecularOcclusionFromAmbientOcclusion(ClampNdotV(NdotV), indirectAmbientOcclusion, roughness);

void ApplyAmbientOcclusionFactor(AmbientOcclusionFactor aoFactor, inout BuiltinData builtinData, inout AggregateLighting lighting)
{
// Note: in case of deferred Lit, builtinData.bakeDiffuseLighting contain indirect diffuse * surfaceData.ambientOcclusion + emissive,
// so emissive is affected by SSAO and we get a double darkening from SSAO and from AO which is incorrect but we accept the tradeoff
// Note: In case of deferred Lit, builtinData.bakeDiffuseLighting contains indirect diffuse * surfaceData.ambientOcclusion + emissive,
// so SSAO is multiplied by emissive which is wrong.
// Also, we have double occlusion for diffuse lighting since it already had precomputed AO (aka "FromData") applied
// (the * surfaceData.ambientOcclusion above)
// This is a tradeoff to avoid storing the precomputed (from data) AO in the GBuffer.
// (This is also why GetScreenSpaceAmbientOcclusion*() is effectively called with AOFromData = 1.0 in Lit:PostEvaluateBSDF() in the
// deferred case since DecodeFromGBuffer will init bsdfData.ambientOcclusion to 1.0 and we will only have SSAO in the aoFactor here)
builtinData.bakeDiffuseLighting *= aoFactor.indirectAmbientOcclusion;
lighting.indirect.specularReflected *= aoFactor.indirectSpecularOcclusion;
lighting.direct.diffuse *= aoFactor.directAmbientOcclusion;

178
com.unity.render-pipelines.high-definition/HDRP/Material/MaterialUtilities.hlsl


// Return camera relative probe volume world to object transformation
float4x4 GetProbeVolumeWorldToObject()
{
return ApplyCameraTranslationToInverseMatrix(unity_ProbeVolumeWorldToObject);
}
// In unity we can have a mix of fully baked lightmap (static lightmap) + enlighten realtime lightmap (dynamic lightmap)
// for each case we can have directional lightmap or not.
// Else we have lightprobe for dynamic/moving entity. Either SH9 per object lightprobe or SH4 per pixel per object volume probe
float3 SampleBakedGI(float3 positionRWS, float3 normalWS, float2 uvStaticLightmap, float2 uvDynamicLightmap)
{
// If there is no lightmap, it assume lightprobe
#if !defined(LIGHTMAP_ON) && !defined(DYNAMICLIGHTMAP_ON)
// TODO: Confirm with Ionut but it seems that UNITY_LIGHT_PROBE_PROXY_VOLUME is always define for high end and
// unity_ProbeVolumeParams always bind.
if (unity_ProbeVolumeParams.x == 0.0)
{
// TODO: pass a tab of coefficient instead!
real4 SHCoefficients[7];
SHCoefficients[0] = unity_SHAr;
SHCoefficients[1] = unity_SHAg;
SHCoefficients[2] = unity_SHAb;
SHCoefficients[3] = unity_SHBr;
SHCoefficients[4] = unity_SHBg;
SHCoefficients[5] = unity_SHBb;
SHCoefficients[6] = unity_SHC;
return SampleSH9(SHCoefficients, normalWS);
}
else
{
return SampleProbeVolumeSH4(TEXTURE3D_PARAM(unity_ProbeVolumeSH, samplerunity_ProbeVolumeSH), positionRWS, normalWS, GetProbeVolumeWorldToObject(),
unity_ProbeVolumeParams.y, unity_ProbeVolumeParams.z, unity_ProbeVolumeMin, unity_ProbeVolumeSizeInv);
}
#else
float3 bakeDiffuseLighting = float3(0.0, 0.0, 0.0);
#ifdef UNITY_LIGHTMAP_FULL_HDR
bool useRGBMLightmap = false;
float4 decodeInstructions = float4(0.0, 0.0, 0.0, 0.0); // Never used but needed for the interface since it supports gamma lightmaps
#else
bool useRGBMLightmap = true;
#if defined(UNITY_LIGHTMAP_RGBM_ENCODING)
float4 decodeInstructions = float4(34.493242, 2.2, 0.0, 0.0); // range^2.2 = 5^2.2, gamma = 2.2
#else
float4 decodeInstructions = float4(2.0, 2.2, 0.0, 0.0); // range = 2.0^2.2 = 4.59
#endif
#endif
#ifdef LIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap),
TEXTURE2D_PARAM(unity_LightmapInd, samplerunity_Lightmap),
uvStaticLightmap, unity_LightmapST, normalWS, useRGBMLightmap, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_Lightmap, samplerunity_Lightmap), uvStaticLightmap, unity_LightmapST, useRGBMLightmap, decodeInstructions);
#endif
#endif
#ifdef DYNAMICLIGHTMAP_ON
#ifdef DIRLIGHTMAP_COMBINED
bakeDiffuseLighting += SampleDirectionalLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap),
TEXTURE2D_PARAM(unity_DynamicDirectionality, samplerunity_DynamicLightmap),
uvDynamicLightmap, unity_DynamicLightmapST, normalWS, false, decodeInstructions);
#else
bakeDiffuseLighting += SampleSingleLightmap(TEXTURE2D_PARAM(unity_DynamicLightmap, samplerunity_DynamicLightmap), uvDynamicLightmap, unity_DynamicLightmapST, false, decodeInstructions);
#endif
#endif
return bakeDiffuseLighting;
#endif
}
float4 SampleShadowMask(float3 positionRWS, float2 uvStaticLightmap) // normalWS not use for now
{
#if defined(LIGHTMAP_ON)
float2 uv = uvStaticLightmap * unity_LightmapST.xy + unity_LightmapST.zw;
float4 rawOcclusionMask = SAMPLE_TEXTURE2D(unity_ShadowMask, samplerunity_Lightmap, uv); // Reuse sampler from Lightmap
#else
float4 rawOcclusionMask;
if (unity_ProbeVolumeParams.x == 1.0)
{
rawOcclusionMask = SampleProbeOcclusion(TEXTURE3D_PARAM(unity_ProbeVolumeSH, samplerunity_ProbeVolumeSH), positionRWS, GetProbeVolumeWorldToObject(),
unity_ProbeVolumeParams.y, unity_ProbeVolumeParams.z, unity_ProbeVolumeMin, unity_ProbeVolumeSizeInv);
}
else
{
// Note: Default value when the feature is not enabled is float(1.0, 1.0, 1.0, 1.0) in C++
rawOcclusionMask = unity_ProbesOcclusion;
}
#endif
return rawOcclusionMask;
}
// Calculate velocity in Clip space [-1..1]
float2 CalculateVelocity(float4 positionCS, float4 previousPositionCS)
{
// This test on define is required to remove warning of divide by 0 when initializing empty struct
// TODO: Add forward opaque MRT case...
#if (SHADERPASS == SHADERPASS_VELOCITY)
// Encode velocity
positionCS.xy = positionCS.xy / positionCS.w;
previousPositionCS.xy = previousPositionCS.xy / previousPositionCS.w;
float2 velocity = (positionCS.xy - previousPositionCS.xy);
#if UNITY_UV_STARTS_AT_TOP
velocity.y = -velocity.y;
#endif
return velocity;
#else
return float2(0.0, 0.0);
#endif
}
// For builtinData we want to allow the user to overwrite default GI in the surface shader / shader graph.
// So we perform the following order of operation:
// 1. InitBuiltinData - Init bakeDiffuseLighting and backBakeDiffuseLighting
// 2. User can overwrite these value in the surface shader / shader graph
// 3. PostInitBuiltinData - Handle debug mode + allow the current lighting model to update the data with ModifyBakedDiffuseLighting
// This method initialize BuiltinData usual values and after update of builtinData by the caller must be follow by PostInitBuiltinData
void InitBuiltinData( float alpha, float3 normalWS, float3 backNormalWS, float3 positionRWS, float2 texCoord1, float2 texCoord2,
out BuiltinData builtinData)
{
ZERO_INITIALIZE(BuiltinData, builtinData);
builtinData.opacity = alpha;
// Sample lightmap/lightprobe/volume proxy
builtinData.bakeDiffuseLighting = SampleBakedGI(positionRWS, normalWS, texCoord1, texCoord2);
// We also sample the back lighting in case we have transmission. If not use this will be optimize out by the compiler
// For now simply recall the function with inverted normal, the compiler should be able to optimize the lightmap case to not resample the directional lightmap
// however it may not optimize the lightprobe case due to the proxy volume relying on dynamic if (to verify), not a problem for SH9, but a problem for proxy volume.
// TODO: optimize more this code.
builtinData.backBakeDiffuseLighting = SampleBakedGI(positionRWS, backNormalWS, texCoord1, texCoord2);
#ifdef SHADOWS_SHADOWMASK
float4 shadowMask = SampleShadowMask(positionRWS, texCoord1);
builtinData.shadowMask0 = shadowMask.x;
builtinData.shadowMask1 = shadowMask.y;
builtinData.shadowMask2 = shadowMask.z;
builtinData.shadowMask3 = shadowMask.w;
#endif
// Use uniform directly - The float need to be cast to uint (as unity don't support to set a uint as uniform)
builtinData.renderingLayers = _EnableLightLayers ? asuint(unity_RenderingLayer.x) : DEFAULT_LIGHT_LAYERS;
}
// InitBuiltinData must be call before calling PostInitBuiltinData
void PostInitBuiltinData( float3 V, inout PositionInputs posInput, SurfaceData surfaceData,
inout BuiltinData builtinData)
{
#ifdef DEBUG_DISPLAY
if (_DebugLightingMode == DEBUGLIGHTINGMODE_LUX_METER)
{
// The lighting in SH or lightmap is assume to contain bounced light only (i.e no direct lighting),
// and is divide by PI (i.e Lambert is apply), so multiply by PI here to get back the illuminance
builtinData.bakeDiffuseLighting *= PI; // don't take into account backBakeDiffuseLighting
}
else
#endif
{
// Apply control from the indirect lighting volume settings - This is apply here so we don't affect emissive
// color in case of lit deferred for example and avoid material to have to deal with it
builtinData.bakeDiffuseLighting *= _IndirectLightingMultiplier.x;
builtinData.backBakeDiffuseLighting *= _IndirectLightingMultiplier.x;
#ifdef MODIFY_BAKED_DIFFUSE_LIGHTING
ModifyBakedDiffuseLighting(V, posInput, surfaceData, builtinData);
#endif
}
}
// Flipping or mirroring a normal can be done directly on the tangent space. This has the benefit to apply to the whole process either in surface gradient or not.
// This function will modify FragInputs and this is not propagate outside of GetSurfaceAndBuiltinData(). This is ok as tangent space is not use outside of GetSurfaceAndBuiltinData().
void ApplyDoubleSidedFlipOrMirror(inout FragInputs input)

11
com.unity.render-pipelines.high-definition/HDRP/Material/NormalBuffer.hlsl


// RT1 - 8:8:8:8
// Our tangent encoding is based on our normal.
#if defined(SHADER_API_METAL) || defined(SHADER_API_VULKAN)
// With octahedral quad packing we get an artifact for reconstructed tangent at the center of this quad. We use rect packing instead to avoid it.
float2 octNormalWS = PackNormalOctRectEncode(normalData.normalWS);
#else
float2 octNormalWS = PackNormalOctQuadEncode(normalData.normalWS);
#endif
float2 octNormalWS = PackNormalOctQuadEncode(normalData.normalWS);
float3 packNormalWS = PackFloat2To888(saturate(octNormalWS * 0.5 + 0.5));
// We store perceptualRoughness instead of roughness because it is perceptually linear.
outNormalBuffer0 = float4(packNormalWS, normalData.perceptualRoughness);

{
float3 packNormalWS = normalBuffer.rgb;
float2 octNormalWS = Unpack888ToFloat2(packNormalWS);
#if defined(SHADER_API_METAL) || defined(SHADER_API_VULKAN)
normalData.normalWS = UnpackNormalOctRectEncode(octNormalWS * 2.0 - 1.0);
#else
#endif
normalData.perceptualRoughness = normalBuffer.a;
}

1
com.unity.render-pipelines.high-definition/HDRP/Material/StackLit/StackLitData.hlsl


// Fill SurfaceData/Builtin data function
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Sampling/SampleUVMapping.hlsl"
#include "HDRP/Material/BuiltinUtilities.hlsl"
#include "HDRP/Material/MaterialUtilities.hlsl"
//-----------------------------------------------------------------------------

4
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDCustomSamplerId.cs


Forward,
RenderSSAO,
RenderShadows,
RenderDeferredDirectionalShadow,
ScreenSpaceShadows,
BuildLightList,
BlitToFinalRT,
Distortion,

TPPrepareLightsForGPU,
TPPushGlobalParameters,
TPTiledLightingDebug,
TPDeferredDirectionalShadow,
TPScreenSpaceShadows,
TPTileSettingsEnableTileAndCluster,
TPForwardPass,
TPForwardTiledClusterpass,

81
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDRenderPipeline.cs


using System.Linq;
using UnityEngine.Rendering.PostProcessing;
using UnityEngine.Experimental.GlobalIllumination;
using UnityEngine.XR;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{

RTHandleSystem.RTHandle m_CameraStencilBufferCopy;
RTHandleSystem.RTHandle m_VelocityBuffer;
RTHandleSystem.RTHandle m_DeferredShadowBuffer;
RTHandleSystem.RTHandle m_ScreenSpaceShadowsBuffer;
RTHandleSystem.RTHandle m_AmbientOcclusionBuffer;
RTHandleSystem.RTHandle m_DistortionBuffer;

m_Asset = asset;
// Upgrade the resources (re-import every references in RenderPipelineResources) if the resource version mismatches
// It's done here because we know every HDRP assets have been imported before
UpgradeResourcesIfNeeded();
// Initial state of the RTHandle system.
// Tells the system that we will require MSAA or not so that we can avoid wasteful render texture allocation.
// TODO: Might want to initialize to at least the window resolution to avoid un-necessary re-alloc in the player

InitializeRenderStateBlocks();
}
void UpgradeResourcesIfNeeded()
{
#if UNITY_EDITOR
m_Asset.renderPipelineResources.UpgradeIfNeeded();
#endif
}
void InitializeRenderTextures()
{
if (!m_Asset.renderPipelineSettings.supportOnlyForward)

m_DistortionBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: Builtin.GetDistortionBufferFormat(), sRGB: Builtin.GetDistortionBufferSRGBFlag(), name: "Distortion");
// TODO: For MSAA, we'll need to add a Draw path in order to support MSAA properly
m_DeferredShadowBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.ARGB32, sRGB: false, enableRandomWrite: true, name: "DeferredShadow");
// TODO: For MSAA, we'll need to add a Draw path in order to support MSAA properlye
// Use RG16 as we only have one deferred directional and one screen space shadow light currently
m_ScreenSpaceShadowsBuffer = RTHandles.Alloc(Vector2.one, filterMode: FilterMode.Point, colorFormat: RenderTextureFormat.RG16, sRGB: false, enableRandomWrite: true, name: "ScreenSpaceShadowsBuffer");
if (Debug.isDebugBuild)
{

RTHandles.Release(m_AmbientOcclusionBuffer);
RTHandles.Release(m_VelocityBuffer);
RTHandles.Release(m_DistortionBuffer);
RTHandles.Release(m_DeferredShadowBuffer);
RTHandles.Release(m_ScreenSpaceShadowsBuffer);
RTHandles.Release(m_DebugColorPickerBuffer);
RTHandles.Release(m_DebugFullScreenTempBuffer);

{
CoreUtils.DisplayUnsupportedAPIMessage();
// Display more information to the users when it should have use Metal instead of OpenGL
if (SystemInfo.graphicsDeviceType.ToString().StartsWith("OpenGL"))
{
if (SystemInfo.operatingSystem.StartsWith("Mac"))
CoreUtils.DisplayUnsupportedMessage("Use Metal API instead.");
else if (SystemInfo.operatingSystem.StartsWith("Windows"))
CoreUtils.DisplayUnsupportedMessage("Use Vulkan API instead.");
}
#if !UNITY_SWITCH
if (XRSettings.isDeviceActive)
if (XRGraphicsConfig.enabled)
#endif
return true;
}

{
currentFrameSettings.enablePostprocess = false;
}
// Disable SSS if luxmeter is enabled
if (debugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuxMeter)
{
currentFrameSettings.enableSubsurfaceScattering = false;
}
var hdCamera = HDCamera.Get(camera);

hdCamera.SetupGlobalStereoParams(cmd);
}
using (new ProfilingSample(cmd, "Deferred directional shadows", CustomSamplerId.RenderDeferredDirectionalShadow.GetSampler()))
using (new ProfilingSample(cmd, "Screen space shadows", CustomSamplerId.ScreenSpaceShadows.GetSampler()))
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.DeferredShadows)
if (m_CurrentDebugDisplaySettings.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceShadows)
HDUtils.SetRenderTarget(cmd, hdCamera, m_DeferredShadowBuffer, ClearFlag.Color, CoreUtils.clearColorAllBlack);
HDUtils.SetRenderTarget(cmd, hdCamera, m_ScreenSpaceShadowsBuffer, ClearFlag.Color, CoreUtils.clearColorAllBlack);
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBuffer, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(hdCamera, cmd, m_DeferredShadowBuffer, FullScreenDebugMode.DeferredShadows);
m_LightLoop.RenderScreenSpaceShadows(hdCamera, m_ScreenSpaceShadowsBuffer, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(hdCamera, cmd, m_ScreenSpaceShadowsBuffer, FullScreenDebugMode.ScreenSpaceShadows);
}
if (hdCamera.frameSettings.enableAsyncCompute)

public void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd)
{
// See ShaderPassForward.hlsl: for forward shaders, if DEBUG_DISPLAY is enabled and no DebugLightingMode or DebugMipMapMod
// modes have been set, lighting is automatically skipped (To avoid some crashed due to lighting RT not set on console).
// However debug mode like colorPickerModes and false color don't need DEBUG_DISPLAY and must work with the lighting.
// So we will enabled DEBUG_DISPLAY independently
// Enable globally the keyword DEBUG_DISPLAY on shader that support it with multicompile
CoreUtils.SetKeyword(cmd, "DEBUG_DISPLAY", m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled());
// enable globally the keyword DEBUG_DISPLAY on shader that support it with multicompile
cmd.EnableShaderKeyword("DEBUG_DISPLAY");
// This is for texture streaming
m_CurrentDebugDisplaySettings.UpdateMaterials();

// The DebugNeedsExposure test allows us to set a neutral value if exposure is not needed. This way we don't need to make various tests inside shaders but only in this function.
cmd.SetGlobalFloat(HDShaderIDs._DebugExposure, m_CurrentDebugDisplaySettings.DebugNeedsExposure() ? lightingDebugSettings.debugExposure : 0.0f);
}
else
{
// TODO: Be sure that if there is no change in the state of this keyword, it doesn't imply any work on CPU side! else we will need to save the sate somewher
cmd.DisableShaderKeyword("DEBUG_DISPLAY");
}
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_DebugDisplaySettings.falseColorDebugSettings.falseColor)
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_DebugDisplaySettings.falseColorDebugSettings.falseColor || m_DebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuminanceMeter)
{
using (new ProfilingSample(cmd, "Push To Color Picker"))
{

// TODO TEMP: Not sure I want to keep this special case. Gotta see how to get rid of it (not sure it will work correctly for non-full viewports.
public void PushColorPickerDebugTexture(HDCamera hdCamera, CommandBuffer cmd, RenderTargetIdentifier textureID)
{
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_DebugDisplaySettings.falseColorDebugSettings.falseColor)
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_DebugDisplaySettings.falseColorDebugSettings.falseColor || m_DebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuminanceMeter)
{
using (new ProfilingSample(cmd, "Push To Color Picker"))
{

DecalSystem.instance.RenderDebugOverlay(hdCamera, cmd, m_CurrentDebugDisplaySettings, ref x, ref y, overlaySize, hdCamera.actualWidth);
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_CurrentDebugDisplaySettings.falseColorDebugSettings.falseColor)
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None || m_CurrentDebugDisplaySettings.falseColorDebugSettings.falseColor || m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuminanceMeter)
{
ColorPickerDebugSettings colorPickerDebugSettings = m_CurrentDebugDisplaySettings.colorPickerDebugSettings;
FalseColorDebugSettings falseColorDebugSettings = m_CurrentDebugDisplaySettings.falseColorDebugSettings;

using (new ProfilingSample(cmd, "Clear HDR target", CustomSamplerId.ClearHDRTarget.GetSampler()))
{
if (hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.BackgroundColor ||
// If the luxmeter is enabled, the sky isn't rendered so we clear the background color
m_DebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuxMeter ||
// If we want the sky but the sky don't exist, still clear with background color
(hdCamera.clearColorMode == HDAdditionalCameraData.ClearColorMode.Sky && !m_SkyManager.IsVisualSkyValid()) ||
// Special handling for Preview we force to clear with background color (i.e black)

{
Color clearColor = hdCamera.backgroundColorHDR;
// We set the background color to black when the luxmeter is enabled to avoid picking the sky color
if (m_DebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.LuxMeter)
clearColor = Color.black;
HDUtils.SetRenderTarget(cmd, hdCamera, m_CameraColorBuffer, m_CameraDepthStencilBuffer, ClearFlag.Color, clearColor);
}
}

19
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/HDUtils.cs


s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBias, scaleBiasTex);
s_PropertyBlock.SetVector(HDShaderIDs._BlitScaleBiasRt, scaleBiasRT);
s_PropertyBlock.SetFloat(HDShaderIDs._BlitMipLevel, mipLevelTex);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 2 : 3, MeshTopology.Quads, 4, 1, s_PropertyBlock);
cmd.DrawProcedural(Matrix4x4.identity, GetBlitMaterial(), bilinear ? 3 : 2, MeshTopology.Quads, 4, 1, s_PropertyBlock);
}
public static void BlitTexture(CommandBuffer cmd, RTHandleSystem.RTHandle source, RTHandleSystem.RTHandle destination, Vector4 scaleBias, float mipLevel, bool bilinear)

&& layer.antialiasingMode == PostProcessLayer.Antialiasing.TemporalAntialiasing
&& layer.temporalAntialiasing.IsSupported();
}
// We need these at runtime for RenderPipelineResources upgrade
public static string GetHDRenderPipelinePath()
{
return "Packages/com.unity.render-pipelines.high-definition/HDRP/";
}
public static string GetPostProcessingPath()
{
return "Packages/com.unity.postprocessing/";
}
public static string GetCorePath()
{
return "Packages/com.unity.render-pipelines.core/CoreRP/";
}
}
}

7
com.unity.render-pipelines.high-definition/HDRP/RenderPipeline/Settings/FrameSettings.cs


using System;
using System.Collections.Generic;
using UnityEngine.XR;
using UnityEngine.Serialization;
namespace UnityEngine.Experimental.Rendering.HDPipeline

// Planar and real time cubemap doesn't need post process and render in FP16
aggregate.enablePostprocess = camera.cameraType != CameraType.Reflection && srcFrameSettings.enablePostprocess;
#if UNITY_SWITCH
aggregate.enableStereo = false;
#else
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRSettings.isDeviceActive && (camera.stereoTargetEye == StereoTargetEyeMask.Both) && renderPipelineSettings.supportStereo;
#endif
aggregate.enableStereo = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableStereo && XRGraphicsConfig.enabled && (camera.stereoTargetEye == StereoTargetEyeMask.Both);
aggregate.enableAsyncCompute = srcFrameSettings.enableAsyncCompute && SystemInfo.supportsAsyncCompute;

107
com.unity.render-pipelines.high-definition/HDRP/RenderPipelineResources/RenderPipelineResources.cs


using UnityEngine.Serialization;
#if UNITY_EDITOR
using UnityEditor;
#endif
[HideInInspector]
public float version = 1.0f;
const int currentVersion = 2;
[SerializeField]
[FormerlySerializedAs("version")]
int m_Version = 1;
// Default Material / Shader
public Material defaultDiffuseMaterial;

// Decal
public Shader decalNormalBuffer;
#if UNITY_EDITOR
public void UpgradeIfNeeded()
{
if (m_Version != currentVersion)
{
Init();
m_Version = currentVersion;
}
}
// Note: move this to a static using once we can target C#6+
T Load<T>(string path) where T : UnityEngine.Object
{
return AssetDatabase.LoadAssetAtPath<T>(path);
}
public void Init()
{
// Load default renderPipelineResources / Material / Shader
string HDRenderPipelinePath = HDUtils.GetHDRenderPipelinePath();
string CorePath = HDUtils.GetCorePath();
defaultDiffuseMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMaterial.mat");
defaultMirrorMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDMirrorMaterial.mat");
defaultDecalMaterial = Load<Material>(HDRenderPipelinePath + "RenderPipelineResources/DefaultHDDecalMaterial.mat");
defaultShader = Load<Shader>(HDRenderPipelinePath + "Material/Lit/Lit.shader");
debugFontTexture = Load<Texture2D>(HDRenderPipelinePath + "RenderPipelineResources/DebugFont.tga");
debugDisplayLatlongShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugDisplayLatlong.Shader");
debugViewMaterialGBufferShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugViewMaterialGBuffer.Shader");
debugViewTilesShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugViewTiles.Shader");
debugFullScreenShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugFullScreen.Shader");
debugColorPickerShader = Load<Shader>(HDRenderPipelinePath + "Debug/DebugColorPicker.Shader");
deferredShader = Load<Shader>(HDRenderPipelinePath + "Lighting/Deferred.Shader");
colorPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ColorPyramid.compute");
depthPyramidCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/DepthPyramid.compute");
copyChannelCS = Load<ComputeShader>(CorePath + "CoreResources/GPUCopy.compute");
texturePaddingCS = Load<ComputeShader>(CorePath + "CoreResources/TexturePadding.compute");
applyDistortionCS = Load<ComputeShader>(HDRenderPipelinePath + "RenderPipelineResources/ApplyDistorsion.compute");
clearDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/cleardispatchindirect.compute");
buildDispatchIndirectShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/builddispatchindirect.compute");
buildScreenAABBShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/scrbound.compute");
buildPerTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild.compute");
buildPerBigTileLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-bigtile.compute");
buildPerVoxelLightListShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/lightlistbuild-clustered.compute");
buildMaterialFlagsShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/materialflags.compute");
deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute");
screenSpaceShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/ScreenSpaceShadow.compute");
volumeVoxelizationCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumeVoxelization.compute");
volumetricLightingCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/VolumetricLighting.compute");
subsurfaceScatteringCS = Load<ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute");
subsurfaceScattering = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.shader");
combineLighting = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/CombineLighting.shader");
// General
cameraMotionVectors = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CameraMotionVectors.shader");
copyStencilBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyStencilBuffer.shader");
copyDepthBuffer = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/CopyDepthBuffer.shader");
blit = Load<Shader>(HDRenderPipelinePath + "RenderPipelineResources/Blit.shader");
// Sky
blitCubemap = Load<Shader>(HDRenderPipelinePath + "Sky/BlitCubemap.shader");
buildProbabilityTables = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/BuildProbabilityTables.compute");
computeGgxIblSampleData = Load<ComputeShader>(HDRenderPipelinePath + "Material/GGXConvolution/ComputeGgxIblSampleData.compute");
GGXConvolve = Load<Shader>(HDRenderPipelinePath + "Material/GGXConvolution/GGXConvolve.shader");
opaqueAtmosphericScattering = Load<Shader>(HDRenderPipelinePath + "Lighting/AtmosphericScattering/OpaqueAtmosphericScattering.shader");
hdriSky = Load<Shader>(HDRenderPipelinePath + "Sky/HDRISky/HDRISky.shader");
integrateHdriSky = Load<Shader>(HDRenderPipelinePath + "Sky/HDRISky/IntegrateHDRISky.shader");
proceduralSky = Load<Shader>(HDRenderPipelinePath + "Sky/ProceduralSky/ProceduralSky.shader");
gradientSky = Load<Shader>(HDRenderPipelinePath + "Sky/GradientSky/GradientSky.shader");
// Skybox/Cubemap is a builtin shader, must use Sahder.Find to access it. It is fine because we are in the editor
skyboxCubemap = Shader.Find("Skybox/Cubemap");
// Material
preIntegratedFGD_GGXDisneyDiffuse = Load<Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_GGXDisneyDiffuse.shader");
preIntegratedFGD_CharlieClothLambert = Load<Shader>(HDRenderPipelinePath + "Material/PreIntegratedFGD/PreIntegratedFGD_CharlieClothLambert.shader");
// Utilities / Core
encodeBC6HCS = Load<ComputeShader>(CorePath + "CoreResources/EncodeBC6H.compute");
cubeToPanoShader = Load<Shader>(CorePath + "CoreResources/CubeToPano.shader");
blitCubeTextureFace = Load<Shader>(CorePath + "CoreResources/BlitCubeTextureFace.shader");
// Shadow
shadowClearShader = Load<Shader>(CorePath + "Shadow/ShadowClear.shader");
shadowBlurMoments = Load<ComputeShader>(CorePath + "Shadow/ShadowBlurMoments.compute");
debugShadowMapShader = Load<Shader>(CorePath + "Shadow/DebugDisplayShadowMap.shader");
}
#endif
}
}

9
com.unity.render-pipelines.high-definition/HDRP/Sky/SkyRenderingContext.cs


m_BuiltinParameters.debugSettings = debugSettings;
skyContext.renderer.SetRenderTargets(m_BuiltinParameters);
// When rendering the visual sky for reflection probes, we need to remove the sun disk if skySettings.includeSunInBaking is false.
skyContext.renderer.RenderSky(m_BuiltinParameters, false, hdCamera.camera.cameraType != CameraType.Reflection || skyContext.skySettings.includeSunInBaking);
// If the luxmeter is enabled, we don't render the sky
if (debugSettings.lightingDebugSettings.debugLightingMode != DebugLightingMode.LuxMeter)
{
// When rendering the visual sky for reflection probes, we need to remove the sun disk if skySettings.includeSunInBaking is false.
skyContext.renderer.RenderSky(m_BuiltinParameters, false, hdCamera.camera.cameraType != CameraType.Reflection || skyContext.skySettings.includeSunInBaking);
}
}
}
}

8
com.unity.render-pipelines.high-definition/package.json


{
"name": "com.unity.render-pipelines.high-definition",
"description": "HD Render Pipeline for Unity.",
"version": "3.1.0-preview",
"version": "3.3.0-preview",
"com.unity.postprocessing": "2.0.9-preview",
"com.unity.render-pipelines.core": "3.1.0-preview",
"com.unity.shadergraph": "3.1.0-preview"
"com.unity.postprocessing": "2.0.10-preview",
"com.unity.render-pipelines.core": "3.3.0-preview",
"com.unity.shadergraph": "3.3.0-preview"
}
}

18
com.unity.render-pipelines.lightweight/CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [3.3.0-preview]
## [3.2.0-preview]
### Changed
- Receive Shadows property is now exposed in the material instead of in the renderer.
- The UI for Lightweight asset has been updated with new categories. A more clean structure and foldouts has been added to keep things organized.
### Fixed
- Shadow casters are now properly culled per cascade. (case 1059142)
- Rendering no longer breaks when Android platform is selected in Build Settings. (case 1058812)
- Scriptable passes no longer have missing material references. Now they access cached materials in the renderer.(case 1061353)
- When you change a Shadow Cascade option in the Pipeline Asset, this no longer warns you that you've exceeded the array size for the _WorldToShadow property.
- Terrain shader optimizations.
### Fixed
- Fixed assert errors caused by multi spot lights
- Fixed LWRP-DirectionalShadowConstantBuffer params setting
## [3.0.0-preview]
### Added

49
com.unity.render-pipelines.lightweight/LWRP/DefaultRendererSetup.cs


[NonSerialized]
private bool m_Initialized = false;
private void Init(LightweightForwardRenderer renderer)
private void Init()
m_DepthOnlyPass = new DepthOnlyPass(renderer);
m_DirectionalShadowPass = new DirectionalShadowsPass(renderer);
m_LocalShadowPass = new LocalShadowsPass(renderer);
m_SetupForwardRenderingPass = new SetupForwardRenderingPass(renderer);
m_ScreenSpaceShadowResovePass = new ScreenSpaceShadowResolvePass(renderer);
m_CreateLightweightRenderTexturesPass = new CreateLightweightRenderTexturesPass(renderer);
m_BeginXrRenderingPass = new BeginXRRenderingPass(renderer);
m_SetupLightweightConstants = new SetupLightweightConstanstPass(renderer);
m_RenderOpaqueForwardPass = new RenderOpaqueForwardPass(renderer);
m_OpaquePostProcessPass = new OpaquePostProcessPass(renderer);
m_DrawSkyboxPass = new DrawSkyboxPass(renderer);
m_CopyDepthPass = new CopyDepthPass(renderer);
m_CopyColorPass = new CopyColorPass(renderer);
m_RenderTransparentForwardPass = new RenderTransparentForwardPass(renderer);
m_TransparentPostProcessPass = new TransparentPostProcessPass(renderer);
m_FinalBlitPass = new FinalBlitPass(renderer);
m_EndXrRenderingPass = new EndXRRenderingPass(renderer);
m_DepthOnlyPass = new DepthOnlyPass();
m_DirectionalShadowPass = new DirectionalShadowsPass();
m_LocalShadowPass = new LocalShadowsPass();
m_SetupForwardRenderingPass = new SetupForwardRenderingPass();
m_ScreenSpaceShadowResovePass = new ScreenSpaceShadowResolvePass();
m_CreateLightweightRenderTexturesPass = new CreateLightweightRenderTexturesPass();
m_BeginXrRenderingPass = new BeginXRRenderingPass();
m_SetupLightweightConstants = new SetupLightweightConstanstPass();
m_RenderOpaqueForwardPass = new RenderOpaqueForwardPass();
m_OpaquePostProcessPass = new OpaquePostProcessPass();
m_DrawSkyboxPass = new DrawSkyboxPass();
m_CopyDepthPass = new CopyDepthPass();
m_CopyColorPass = new CopyColorPass();
m_RenderTransparentForwardPass = new RenderTransparentForwardPass();
m_TransparentPostProcessPass = new TransparentPostProcessPass();
m_FinalBlitPass = new FinalBlitPass();
m_EndXrRenderingPass = new EndXRRenderingPass();
m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass(renderer);
m_SceneViewDepthCopyPass = new SceneViewDepthCopyPass();
#endif
// RenderTexture format depends on camera and pipeline (HDR, non HDR, etc)

public void Setup(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults, ref RenderingData renderingData)
{
Init(renderer);
Init();
RenderTextureDescriptor baseDescriptor = renderer.CreateRTDesc(ref renderingData.cameraData);
RenderTextureDescriptor baseDescriptor = LightweightForwardRenderer.CreateRTDesc(ref renderingData.cameraData);
RenderTextureDescriptor shadowDescriptor = baseDescriptor;
shadowDescriptor.dimension = TextureDimension.Tex2D;

if (renderingData.shadowData.renderLocalShadows)
{
m_LocalShadowPass.Setup(LocalShadowmap);
m_LocalShadowPass.Setup(LocalShadowmap, renderer.maxVisibleLocalLights);
renderer.EnqueuePass(m_LocalShadowPass);
}

bool dynamicBatching = renderingData.supportsDynamicBatching;
RendererConfiguration rendererConfiguration = LightweightForwardRenderer.GetRendererConfiguration(renderingData.lightData.totalAdditionalLightsCount);
m_SetupLightweightConstants.Setup(renderer.maxVisibleLocalLights, renderer.perObjectLightIndices);
renderer.EnqueuePass(m_SetupLightweightConstants);
m_RenderOpaqueForwardPass.Setup(baseDescriptor, colorHandle, depthHandle, LightweightForwardRenderer.GetCameraClearFlag(camera), camera.backgroundColor, rendererConfiguration,dynamicBatching);

renderingData.cameraData.postProcessLayer.HasOpaqueOnlyEffects(renderer.postProcessRenderContext))
{
m_OpaquePostProcessPass.Setup(baseDescriptor, colorHandle);
m_OpaquePostProcessPass.Setup(renderer.postProcessRenderContext, baseDescriptor, colorHandle);
renderer.EnqueuePass(m_OpaquePostProcessPass);
}

if (renderingData.cameraData.postProcessEnabled)
{
m_TransparentPostProcessPass.Setup(baseDescriptor, colorHandle);
m_TransparentPostProcessPass.Setup(renderer.postProcessRenderContext, baseDescriptor, colorHandle);
renderer.EnqueuePass(m_TransparentPostProcessPass);
}
else if (!renderingData.cameraData.isOffscreenRender && colorHandle != RenderTargetHandle.CameraTarget)

4
com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightLightEditor.cs


public bool spotOptionsValue { get { return typeIsSame && lightProperty.type == LightType.Spot; } }
public bool pointOptionsValue { get { return typeIsSame && lightProperty.type == LightType.Point; } }
public bool dirOptionsValue { get { return typeIsSame && lightProperty.type == LightType.Directional; } }
public bool areaOptionsValue { get { return typeIsSame && lightProperty.type == LightType.Area; } }
public bool areaOptionsValue { get { return typeIsSame && lightProperty.type == LightType.Rectangle; } }
public bool runtimeOptionsValue { get { return typeIsSame && (lightProperty.type != LightType.Area && lightProperty.type != LightType.Point && !settings.isCompletelyBaked); } }
public bool runtimeOptionsValue { get { return typeIsSame && (lightProperty.type != LightType.Rectangle && lightProperty.type != LightType.Point && !settings.isCompletelyBaked); } }
public bool bakedShadowRadius { get { return typeIsSame && (lightProperty.type == LightType.Point || lightProperty.type == LightType.Spot) && settings.isBakedOrMixed; } }
public bool bakedShadowAngle { get { return typeIsSame && lightProperty.type == LightType.Directional && settings.isBakedOrMixed; } }
public bool shadowOptionsValue { get { return shadowTypeIsSame && lightProperty.shadows != LightShadows.None; } }

130
com.unity.render-pipelines.lightweight/LWRP/Editor/LightweightPipelineAssetEditor.cs


[CustomEditor(typeof(LightweightPipelineAsset))]
public class LightweightPipelineAssetEditor : Editor
{
bool generalSettingsFoldout = false;
bool qualitySettingsFoldout = false;
bool shadowsSettingsFoldout = false;
public static GUIContent renderingLabel = new GUIContent("Rendering");
public static GUIContent qualityLabel = new GUIContent("Quality");
public static GUIContent capabilitiesLabel = new GUIContent("Capabilities");
public static GUIContent featuresLabel = new GUIContent("Shader Features");
public static GUIContent renderScaleLabel = new GUIContent("Render Scale", "Scales the camera render target allowing the game to render at a resolution different than native resolution. UI is always rendered at native resolution.");

SerializedProperty m_CustomShaderVariantStripSettingsProp;
SerializedProperty m_XRConfig;
public override void OnInspectorGUI()
{
serializedObject.Update();

DrawCapabilitiesSettings();
DrawShaderFeaturesSettings();
DrawQualitySettings();
DrawShadowSettings();
EditorGUILayout.PropertyField(m_XRConfig);
serializedObject.ApplyModifiedProperties();

{
{
m_RenderScale = serializedObject.FindProperty("m_RenderScale");
m_MaxPixelLights = serializedObject.FindProperty("m_MaxPixelLights");
m_SupportsVertexLightProp = serializedObject.FindProperty("m_SupportsVertexLight");

m_ShowOpaqueTextureScale.valueChanged.AddListener(Repaint);
m_ShowOpaqueTextureScale.value = m_RequireOpaqueTextureProp.boolValue;
m_XRConfig = serializedObject.FindProperty("m_SavedXRConfig");
}
void OnDisable()

m_ShowOpaqueTextureScale.target = m_RequireOpaqueTextureProp.boolValue;
}
void DrawGeneralSettings()
void DrawShaderFeaturesSettings()
EditorGUILayout.LabelField(Styles.generalSettingsLabel, EditorStyles.boldLabel);
EditorGUILayout.LabelField(Styles.featuresLabel, EditorStyles.boldLabel);
m_RenderScale.floatValue = EditorGUILayout.Slider(Styles.renderScaleLabel, m_RenderScale.floatValue, k_MinRenderScale, k_MaxRenderScale);
m_MaxPixelLights.intValue = EditorGUILayout.IntSlider(Styles.maxPixelLightsLabel, m_MaxPixelLights.intValue, 0, k_MaxSupportedPixelLights);
EditorGUILayout.PropertyField(m_SupportsVertexLightProp, Styles.enableVertexLightLabel);
EditorGUILayout.PropertyField(m_DirectionalShadowsSupportedProp, Styles.supportsDirectionalShadows);
EditorGUILayout.PropertyField(m_LocalShadowSupportedProp, Styles.supportsLocalShadows);
EditorGUI.BeginDisabledGroup(!(m_DirectionalShadowsSupportedProp.boolValue || m_LocalShadowSupportedProp.boolValue));
EditorGUILayout.PropertyField(m_SoftShadowsSupportedProp, Styles.supportsSoftShadows);
EditorGUI.EndDisabledGroup();
EditorGUI.indentLevel--;
EditorGUILayout.Space();
}
bool directionalShadows = m_DirectionalShadowsSupportedProp.boolValue;
if (directionalShadows)
void DrawGeneralSettings()
{
generalSettingsFoldout = EditorGUILayout.Foldout(generalSettingsFoldout, Styles.generalSettingsLabel, true);
if (generalSettingsFoldout)
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_SupportsDynamicBatching, Styles.dynamicBatching);
EditorGUILayout.PropertyField(m_RequireDepthTextureProp, Styles.requireDepthTexture);
EditorGUI.BeginDisabledGroup(!m_RequireDepthTextureProp.boolValue);
EditorGUILayout.PropertyField(m_RequireSoftParticlesProp, Styles.requireSoftParticles);
EditorGUI.EndDisabledGroup();
EditorGUILayout.PropertyField(m_RequireOpaqueTextureProp, Styles.requireOpaqueTexture);
EditorGUI.indentLevel++;
EditorGUI.BeginDisabledGroup(!m_RequireOpaqueTextureProp.boolValue);
EditorGUILayout.PropertyField(m_OpaqueDownsamplingProp, Styles.opaqueDownsampling);
EditorGUI.EndDisabledGroup();
EditorGUI.indentLevel--;
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.Space();
}
}
void DrawQualitySettings()
{
qualitySettingsFoldout = EditorGUILayout.Foldout(qualitySettingsFoldout, Styles.qualityLabel, true);
if (qualitySettingsFoldout)
{
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_HDR, Styles.hdrContent);
EditorGUILayout.PropertyField(m_MSAA, Styles.msaaContent);
m_RenderScale.floatValue = EditorGUILayout.Slider(Styles.renderScaleLabel, m_RenderScale.floatValue, k_MinRenderScale, k_MaxRenderScale);
m_MaxPixelLights.intValue = EditorGUILayout.IntSlider(Styles.maxPixelLightsLabel, m_MaxPixelLights.intValue, 0, k_MaxSupportedPixelLights);
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.Space();
}
}
void DrawShadowSettings()
{
shadowsSettingsFoldout = EditorGUILayout.Foldout(shadowsSettingsFoldout, Styles.shadowLabel, true);
if (shadowsSettingsFoldout)
{
// Directional Shadows
EditorGUI.BeginDisabledGroup(!m_DirectionalShadowsSupportedProp.boolValue);
EditorGUI.indentLevel++;
EditorGUILayout.LabelField(Styles.directionalShadowLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_DirectionalShadowAtlasResolutionProp, Styles.directionalShadowAtlasResolution);

EditorGUI.indentLevel--;
EditorGUILayout.Space();
}
EditorGUI.EndDisabledGroup();
bool localShadows = m_LocalShadowSupportedProp.boolValue;
if (localShadows)
{
// Local Shadows
EditorGUI.BeginDisabledGroup(!m_LocalShadowSupportedProp.boolValue);
EditorGUILayout.Space();
EditorGUI.indentLevel--;
EditorGUI.EndDisabledGroup();
if (directionalShadows || localShadows)
EditorGUILayout.PropertyField(m_SoftShadowsSupportedProp, Styles.supportsSoftShadows);
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.Space();
void DrawCapabilitiesSettings()
{
EditorGUILayout.LabelField(Styles.capabilitiesLabel, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(m_SupportsVertexLightProp, Styles.enableVertexLightLabel);
EditorGUILayout.PropertyField(m_RequireDepthTextureProp, Styles.requireDepthTexture);
EditorGUI.BeginDisabledGroup(!m_RequireDepthTextureProp.boolValue);
EditorGUILayout.PropertyField(m_RequireSoftParticlesProp, Styles.requireSoftParticles);
EditorGUI.EndDisabledGroup();
EditorGUILayout.PropertyField(m_RequireOpaqueTextureProp, Styles.requireOpaqueTexture);
EditorGUI.indentLevel++;
EditorGUI.BeginDisabledGroup(!m_RequireOpaqueTextureProp.boolValue);
EditorGUILayout.PropertyField(m_OpaqueDownsamplingProp, Styles.opaqueDownsampling);
EditorGUI.EndDisabledGroup();
EditorGUI.indentLevel--;
EditorGUILayout.PropertyField(m_HDR, Styles.hdrContent);
EditorGUILayout.PropertyField(m_MSAA, Styles.msaaContent);
EditorGUILayout.PropertyField(m_SupportsDynamicBatching, Styles.dynamicBatching);
EditorGUILayout.PropertyField(m_DirectionalShadowsSupportedProp, Styles.supportsDirectionalShadows);
EditorGUILayout.PropertyField(m_LocalShadowSupportedProp, Styles.supportsLocalShadows);
EditorGUI.indentLevel--;
EditorGUILayout.Space();
EditorGUILayout.Space();
}
}
}

8
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightShaderGUI.cs


public static GUIContent twoSidedText = new GUIContent("Two Sided", "Render front and back faces");
public static GUIContent alphaClipText = new GUIContent("Alpha Clip", "Enable Alpha Clip");
public static GUIContent alphaClipThresholdText = new GUIContent("Clip Threshold", "Threshold for alpha clip");
public static GUIContent receiveShadowText = new GUIContent("Receive Shadows", "Enables this material to receive shadows if there is at least one shadow casting light affecting it.");
public static readonly string[] surfaceNames = Enum.GetNames(typeof(SurfaceType));
public static readonly string[] blendNames = Enum.GetNames(typeof(BlendMode));
}

protected MaterialProperty cullingProp;
protected MaterialProperty alphaClipProp;
protected MaterialProperty alphaCutoffProp;
protected MaterialProperty receiveShadowsProp;
private bool m_FirstTimeApply = true;
public abstract void MaterialChanged(Material material);

cullingProp = FindProperty("_Cull", properties);
alphaClipProp = FindProperty("_AlphaClip", properties);
alphaCutoffProp = FindProperty("_Cutoff", properties);
receiveShadowsProp = FindProperty("_ReceiveShadows", properties);
}
public virtual void ShaderPropertiesGUI(Material material)

if (alphaClipProp.floatValue == 1)
m_MaterialEditor.ShaderProperty(alphaCutoffProp, Styles.alphaClipThresholdText, MaterialEditor.kMiniTextureFieldLabelIndentLevel + 1);
EditorGUI.BeginChangeCheck();
bool receiveShadows = EditorGUILayout.Toggle(Styles.receiveShadowText, receiveShadowsProp.floatValue == 1.0f);
if (EditorGUI.EndChangeCheck())
receiveShadowsProp.floatValue = receiveShadows ? 1.0f : 0.0f;
EditorGUILayout.Space();
}

2
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightStandardGUI.cs


CoreUtils.SetKeyword(material, "_PARALLAXMAP", material.GetTexture("_ParallaxMap"));
CoreUtils.SetKeyword(material, "_DETAIL_MULX2", material.GetTexture("_DetailAlbedoMap") || material.GetTexture("_DetailNormalMap"));
CoreUtils.SetKeyword(material, "_RECEIVE_SHADOWS_OFF", material.GetFloat("_ReceiveShadows") == 0.0f);
// A material's GI flag internally keeps track of whether emission is enabled at all, it's enabled but has no effect
// or is enabled and may be modified at runtime. This state depends on the values of the current flag and emissive color.
// The fixup routine makes sure that the material is in the correct state if/when changes are made to the mode or color.

2
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGUI/LightweightStandardSimpleLightingGUI.cs


MaterialEditor.FixupEmissiveFlag(material);
bool shouldEmissionBeEnabled = (material.globalIlluminationFlags & MaterialGlobalIlluminationFlags.EmissiveIsBlack) == 0;
CoreUtils.SetKeyword(material, "_EMISSION", shouldEmissionBeEnabled);
CoreUtils.SetKeyword(material, "_RECEIVE_SHADOWS_OFF", material.GetFloat("_ReceiveShadows") == 0.0f);
}
private void UpdateMaterialSpecularSource(Material material)

1
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/LightWeightPBRSubShader.cs


[FormerName("UnityEditor.ShaderGraph.LightWeightPBRSubShader")]
public class LightWeightPBRSubShader : IPBRSubShader
{
static NeededCoordinateSpace m_VertexCoordinateSpace = NeededCoordinateSpace.Object;
static NeededCoordinateSpace m_PixelCoordinateSpace = NeededCoordinateSpace.World;
struct Pass

1
com.unity.render-pipelines.lightweight/LWRP/Editor/ShaderGraph/LightWeightUnlitSubShader.cs


[FormerName("UnityEditor.ShaderGraph.LightWeightUnlitSubShader")]
public class LightWeightUnlitSubShader : IUnlitSubShader
{
static NeededCoordinateSpace m_VertexCoordinateSpace = NeededCoordinateSpace.Object;
static NeededCoordinateSpace m_PixelCoordinateSpace = NeededCoordinateSpace.World;
struct Pass

2
com.unity.render-pipelines.lightweight/LWRP/IRendererSetup.cs


{
public interface IRendererSetup
{
}
}

40
com.unity.render-pipelines.lightweight/LWRP/LightweightForwardRenderer.cs


using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine.Rendering;
using UnityEngine.Rendering.PostProcessing;

{
get
{
return SystemInfo.supportsComputeShaders &&
SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLCore &&
!Application.isMobilePlatform &&
Application.platform != RuntimePlatform.WebGLPlayer;
// TODO: Graphics Emulation are breaking StructuredBuffers disabling it for now until
// we have a fix for it
return false;
// return SystemInfo.supportsComputeShaders &&
// SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLCore &&
// !Application.isMobilePlatform &&
// Application.platform != RuntimePlatform.WebGLPlayer;
}
}

public ComputeBuffer perObjectLightIndices { get; private set; }
public FilterRenderersSettings opaqueFilterSettings { get; private set; }
public FilterRenderersSettings transparentFilterSettings { get; private set; }
Material[] m_Materials;
readonly Material[] m_Materials;
m_Materials = new Material[(int)MaterialHandles.Count]
this.pipelineAsset = pipelineAsset;
m_Materials = new[]
{
CoreUtils.CreateEngineMaterial("Hidden/InternalErrorShader"),
CoreUtils.CreateEngineMaterial(pipelineAsset.copyDepthShader),

};
postProcessRenderContext = new PostProcessRenderContext();
opaqueFilterSettings = new FilterRenderersSettings(true)
{
renderQueueRange = RenderQueueRange.opaque,
};
transparentFilterSettings = new FilterRenderersSettings(true)
{
renderQueueRange = RenderQueueRange.transparent,
};
public LightweightPipelineAsset pipelineAsset { get; private set; }
public void Dispose()
{

CoreUtils.Destroy(m_Materials[i]);
}
public RenderTextureDescriptor CreateRTDesc(ref CameraData cameraData, float scaler = 1.0f)
public static RenderTextureDescriptor CreateRTDesc(ref CameraData cameraData, float scaler = 1.0f)
{
Camera camera = cameraData.camera;
RenderTextureDescriptor desc;

public void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
{
for (int i = 0; i < m_ActiveRenderPassQueue.Count; ++i)
m_ActiveRenderPassQueue[i].Execute(ref context, ref cullResults, ref renderingData);
m_ActiveRenderPassQueue[i].Execute(this, ref context, ref cullResults, ref renderingData);
public Material GetMaterial(MaterialHandles handle)
{
int handleID = (int)handle;

CommandBuffer cmd = CommandBufferPool.Get("Release Resources");
for (int i = 0; i < m_ActiveRenderPassQueue.Count; ++i)
m_ActiveRenderPassQueue[i].Dispose(cmd);
m_ActiveRenderPassQueue[i].FrameCleanup(cmd);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);

6
com.unity.render-pipelines.lightweight/LWRP/LightweightPipeline.cs


public LightweightPipelineAsset pipelineAsset { get; private set; }
private IRendererSetup m_DefaultRendererSetup;
private IRendererSetup defaultRendererSetup
{
get

List<int> m_LocalLightIndices = new List<int>();
bool m_IsCameraRendering;
private IRendererSetup m_DefaultRendererSetup;
public LightweightPipeline(LightweightPipelineAsset asset)
{

setup = defaultRendererSetup;
setup.Setup(m_Renderer, ref context, ref m_CullResults, ref renderingData);
m_Renderer.Execute(ref context, ref m_CullResults, ref renderingData);
}
#if UNITY_EDITOR

supportedLightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional,
rendererSupportsLightProbeProxyVolumes = false,
rendererSupportsMotionVectors = false,
rendererSupportsReceiveShadows = true,
rendererSupportsReceiveShadows = false,
rendererSupportsReflectionProbes = true
};
SceneViewDrawMode.SetupDrawMode();

7
com.unity.render-pipelines.lightweight/LWRP/LightweightShadowUtils.cs


cascadeIndex, shadowData.directionalLightCascadeCount, shadowData.directionalLightCascades, shadowResolution, shadowNearPlane, out viewMatrix, out projMatrix,
out splitData);
float cullingSphereRadius = splitData.cullingSphere.w;
cascadeSplitDistance.w = cullingSphereRadius * cullingSphereRadius;
shadowSliceData.offsetX = (cascadeIndex % 2) * shadowResolution;
shadowSliceData.offsetY = (cascadeIndex / 2) * shadowResolution;
shadowSliceData.resolution = shadowResolution;

}
public static void RenderShadowSlice(CommandBuffer cmd, ref ScriptableRenderContext context,
ref ShadowSliceData shadowSliceData,
Matrix4x4 proj, Matrix4x4 view, DrawShadowsSettings settings)
ref ShadowSliceData shadowSliceData, ref DrawShadowsSettings settings,
Matrix4x4 proj, Matrix4x4 view)
{
cmd.SetViewport(new Rect(shadowSliceData.offsetX, shadowSliceData.offsetY, shadowSliceData.resolution, shadowSliceData.resolution));
cmd.EnableScissorRect(new Rect(shadowSliceData.offsetX + 4, shadowSliceData.offsetY + 4, shadowSliceData.resolution - 8, shadowSliceData.resolution - 8));

7
com.unity.render-pipelines.lightweight/LWRP/Passes/BeginXRRenderingPass.cs


{
public class BeginXRRenderingPass : ScriptableRenderPass
{
public BeginXRRenderingPass(LightweightForwardRenderer renderer) : base(renderer)
{}
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
{
Camera camera = renderingData.cameraData.camera;
context.StartMultiEye(camera);

21
com.unity.render-pipelines.lightweight/LWRP/Passes/CopyColorPass.cs


{
public class CopyColorPass : ScriptableRenderPass
{
Material m_SamplingMaterial;
public CopyColorPass(LightweightForwardRenderer renderer) : base(renderer)
public CopyColorPass()
m_SamplingMaterial = renderer.GetMaterial(MaterialHandles.Sampling);
m_SampleOffsetShaderHandle = Shader.PropertyToID("_SampleOffset");
}

this.destination = destination;
}
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
{
CommandBuffer cmd = CommandBufferPool.Get("Copy Color");

RenderTextureDescriptor opaqueDesc = renderer.CreateRTDesc(ref renderingData.cameraData, opaqueScaler);
RenderTextureDescriptor opaqueDesc = LightweightForwardRenderer.CreateRTDesc(ref renderingData.cameraData, opaqueScaler);
RenderTargetIdentifier colorRT = source.Identifier();
RenderTargetIdentifier opaqueColorRT = destination.Identifier();

cmd.Blit(colorRT, opaqueColorRT);
break;
case Downsampling._4xBox:
m_SamplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
cmd.Blit(colorRT, opaqueColorRT, m_SamplingMaterial, 0);
Material samplingMaterial = renderer.GetMaterial(MaterialHandles.Sampling);
samplingMaterial.SetFloat(m_SampleOffsetShaderHandle, 2);
cmd.Blit(colorRT, opaqueColorRT, samplingMaterial, 0);
break;
case Downsampling._4xBilinear:
cmd.Blit(colorRT, opaqueColorRT);

}
public override void Dispose(CommandBuffer cmd)
public override void FrameCleanup(CommandBuffer cmd)
{
if (destination != RenderTargetHandle.CameraTarget)
{

}
}
}
}

23
com.unity.render-pipelines.lightweight/LWRP/Passes/CopyDepthPass.cs


{
public class CopyDepthPass : ScriptableRenderPass
{
Material m_DepthCopyMaterial;
public CopyDepthPass(LightweightForwardRenderer renderer) : base(renderer)
{
m_DepthCopyMaterial = renderer.GetMaterial(MaterialHandles.DepthCopy);
}
const string k_DepthCopyTag = "Depth Copy";
public void Setup(RenderTargetHandle source, RenderTargetHandle destination)
{

public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
CommandBuffer cmd = CommandBufferPool.Get("Depth Copy");
CommandBuffer cmd = CommandBufferPool.Get(k_DepthCopyTag);
Material depthCopyMaterial = renderer.GetMaterial(MaterialHandles.DepthCopy);
RenderTextureDescriptor descriptor = renderer.CreateRTDesc(ref renderingData.cameraData);
RenderTextureDescriptor descriptor = LightweightForwardRenderer.CreateRTDesc(ref renderingData.cameraData);
descriptor.colorFormat = RenderTextureFormat.Depth;
descriptor.depthBufferBits = 32; //TODO: fix this ;
descriptor.msaaSamples = 1;

cmd.EnableShaderKeyword(LightweightKeywordStrings.DepthMsaa2);
cmd.DisableShaderKeyword(LightweightKeywordStrings.DepthMsaa4);
}
cmd.Blit(depthSurface, copyDepthSurface, m_DepthCopyMaterial);
cmd.Blit(depthSurface, copyDepthSurface, depthCopyMaterial);
}
else
{

LightweightPipeline.CopyTexture(cmd, depthSurface, copyDepthSurface, m_DepthCopyMaterial);
LightweightPipeline.CopyTexture(cmd, depthSurface, copyDepthSurface, depthCopyMaterial);
public override void Dispose(CommandBuffer cmd)
public override void FrameCleanup(CommandBuffer cmd)
{
if (destination != RenderTargetHandle.CameraTarget)
{

9
com.unity.render-pipelines.lightweight/LWRP/Passes/CreateLightweightRenderTexturesPass.cs


{
public class CreateLightweightRenderTexturesPass : ScriptableRenderPass
{
public CreateLightweightRenderTexturesPass(LightweightForwardRenderer renderer) : base(renderer)
{}
const int k_DepthStencilBufferBits = 32;
private RenderTargetHandle colorAttachmentHandle { get; set; }
private RenderTargetHandle depthAttachmentHandle { get; set; }

descriptor = baseDescriptor;
}
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
{
CommandBuffer cmd = CommandBufferPool.Get("");
if (colorAttachmentHandle != RenderTargetHandle.CameraTarget)

CommandBufferPool.Release(cmd);
}
public override void Dispose(CommandBuffer cmd)
public override void FrameCleanup(CommandBuffer cmd)
{
if (colorAttachmentHandle != RenderTargetHandle.CameraTarget)
{

17
com.unity.render-pipelines.lightweight/LWRP/Passes/DepthOnlyPass.cs


private RenderTargetHandle depthAttachmentHandle { get; set; }
private RenderTextureDescriptor descriptor { get; set; }
public FilterRenderersSettings opaqueFilterSettings { get; private set; }
public void Setup(
RenderTextureDescriptor baseDescriptor,

descriptor = baseDescriptor;
}
public DepthOnlyPass(LightweightForwardRenderer renderer) : base(renderer)
public DepthOnlyPass()
opaqueFilterSettings = new FilterRenderersSettings(true)
{
renderQueueRange = RenderQueueRange.opaque,
};
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
{
CommandBuffer cmd = CommandBufferPool.Get(k_DepthPrepassTag);
using (new ProfilingSample(cmd, k_DepthPrepassTag))

{
Camera camera = renderingData.cameraData.camera;
context.StartMultiEye(camera);
context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings);
context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings);
context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, renderer.opaqueFilterSettings);
context.DrawRenderers(cullResults.visibleRenderers, ref drawSettings, opaqueFilterSettings);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);

public override void Dispose(CommandBuffer cmd)
public override void FrameCleanup(CommandBuffer cmd)
{
if (depthAttachmentHandle != RenderTargetHandle.CameraTarget)
{

28
com.unity.render-pipelines.lightweight/LWRP/Passes/DirectionalShadowsPass.cs


private RenderTargetHandle destination { get; set; }
public DirectionalShadowsPass(LightweightForwardRenderer renderer) : base(renderer)
public DirectionalShadowsPass()
{
RegisterShaderPassName("ShadowCaster");

this.destination = destination;
}
public override void Execute(ref ScriptableRenderContext context, ref CullResults cullResults, ref RenderingData renderingData)
public override void Execute(LightweightForwardRenderer renderer, ref ScriptableRenderContext context,
ref CullResults cullResults,
ref RenderingData renderingData)
{
if (renderingData.shadowData.renderDirectionalShadows)
{

}
public override void Dispose(CommandBuffer cmd)
public override void FrameCleanup(CommandBuffer cmd)
{
if (m_DirectionalShadowmapTexture)
{

success = LightweightShadowUtils.ExtractDirectionalLightMatrix(ref cullResults, ref shadowData, shadowLightIndex, cascadeIndex, shadowResolution, shadowNearPlane, out m_CascadeSplitDistances[cascadeIndex], out m_CascadeSlices[cascadeIndex], out view, out proj);
if (success)
{
settings.splitData.cullingSphere = m_CascadeSplitDistances[cascadeIndex];
LightweightShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex], proj,
view, settings);
LightweightShadowUtils.RenderShadowSlice(cmd, ref context, ref m_CascadeSlices[cascadeIndex], ref settings, proj, view);
}
}

float invHalfShadowAtlasWidth = 0.5f * invShadowAtlasWidth;
float invHalfShadowAtlasHeight = 0.5f * invShadowAtlasHeight;
cmd.SetGlobalTexture(destination.id, m_DirectionalShadowmapTexture);
if (shadowData.directionalLightCascadeCount > 1)
cmd.SetGlobalMatrixArray(DirectionalShadowConstantBuffer._WorldToShadow, m_DirectionalShadowMatrices);
else
cmd.SetGlobalMatrix(DirectionalShadowConstantBuffer._WorldToShadow, m_DirectionalShadowMatrices[0]);
cmd.SetGlobalMatrixArray(DirectionalShadowConstantBuffer._WorldToShadow, m_DirectionalShadowMatrices);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres0, m_CascadeSplitDistances[1]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres0, m_CascadeSplitDistances[2]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres0, m_CascadeSplitDistances[3]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSphereRadii, new Vector4(m_CascadeSplitDistances[0].w, m_CascadeSplitDistances[1].w, m_CascadeSplitDistances[2].w, m_CascadeSplitDistances[3].w));
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres1, m_CascadeSplitDistances[1]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres2, m_CascadeSplitDistances[2]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSpheres3, m_CascadeSplitDistances[3]);
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._DirShadowSplitSphereRadii, new Vector4(m_CascadeSplitDistances[0].w * m_CascadeSplitDistances[0].w,
m_CascadeSplitDistances[1].w * m_CascadeSplitDistances[1].w,
m_CascadeSplitDistances[2].w * m_CascadeSplitDistances[2].w,
m_CascadeSplitDistances[3].w * m_CascadeSplitDistances[3].w));
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._ShadowOffset0, new Vector4(-invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._ShadowOffset1, new Vector4(invHalfShadowAtlasWidth, -invHalfShadowAtlasHeight, 0.0f, 0.0f));
cmd.SetGlobalVector(DirectionalShadowConstantBuffer._ShadowOffset2, new Vector4(-invHalfShadowAtlasWidth, invHalfShadowAtlasHeight, 0.0f, 0.0f));

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存