浏览代码

Merge remote-tracking branch 'origin/master' into HDRP-GraphicTests

/main
Remy 7 年前
当前提交
c22205d7
共有 105 个文件被更改,包括 9728 次插入1071 次删除
  1. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png.meta
  2. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png.meta
  3. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png.meta
  4. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png.meta
  5. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png.meta
  6. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png.meta
  7. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1204_Lit_Transparent_Fog.unity.png.meta
  8. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png.meta
  9. 6
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta
  10. 6
      ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta
  11. 6
      ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta
  12. 6
      ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta
  13. 625
      SampleScenes/HDTest/BasicProfiling.unity
  14. 24
      ScriptableRenderPipeline/Core/CoreUtils.cs
  15. 4
      ScriptableRenderPipeline/Core/EncodeBC6H.cs.meta
  16. 6
      ScriptableRenderPipeline/Core/Resources/EncodeBC6H.compute.meta
  17. 3
      ScriptableRenderPipeline/Core/ShaderLibrary/BC6H.hlsl
  18. 68
      ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl
  19. 5
      ScriptableRenderPipeline/Core/ShaderLibrary/CommonMaterial.hlsl
  20. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/Macros.hlsl
  21. 8
      ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl
  22. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl
  23. 150
      ScriptableRenderPipeline/Core/TextureCache.cs
  24. 6
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader
  25. 8
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader
  26. 1
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.Styles.cs
  27. 17
      ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs
  28. 458
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  29. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset
  30. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset.meta
  31. 9
      ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs
  32. 40
      ScriptableRenderPipeline/HDRenderPipeline/HDUtils.cs
  33. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader
  34. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Deferred.compute
  35. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/DeferredDirectionalShadow.compute
  36. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Shadow.hlsl
  37. 143
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
  38. 87
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl
  39. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute
  40. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl
  41. 48
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl
  42. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl
  43. 4
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader
  44. 6
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute
  45. 10
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader
  46. 2
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader
  47. 29
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/DefaultHDMaterial.mat
  48. 7
      ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/SceneSettings.cs
  49. 2
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/FragInputs.hlsl
  50. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDepthOnly.hlsl
  51. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDistortion.hlsl
  52. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl
  53. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl
  54. 8
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassGBuffer.hlsl
  55. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl
  56. 8
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl
  57. 2
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VaryingMesh.hlsl
  58. 2
      ScriptableRenderPipeline/HDRenderPipeline/Sky/BlacksmithlSky/Resources/SkyBlacksmith.shader
  59. 2
      ScriptableRenderPipeline/HDRenderPipeline/Sky/OpaqueAtmosphericScattering.shader
  60. 58
      ScriptableRenderPipeline/HDRenderPipeline/Sky/RuntimeFilterIBL.cs
  61. 125
      ScriptableRenderPipeline/HDRenderPipeline/Sky/SkyManager.cs
  62. 15
      TestbedPipelines/Fptl/FptlLighting.cs
  63. 74
      TestbedPipelines/OnTileDeferredPipeline/OnTileDeferredRenderPipeline.cs
  64. 69
      SampleScenes/HDTest/GraphicTest/Common/AnimationController/Props.controller
  65. 8
      SampleScenes/HDTest/GraphicTest/Common/AnimationController/Props.controller.meta
  66. 217
      SampleScenes/HDTest/GraphicTest/Common/AnimationController/RealtimeCubemap@main.anim
  67. 8
      SampleScenes/HDTest/GraphicTest/Common/AnimationController/RealtimeCubemap@main.anim.meta
  68. 172
      SampleScenes/HDTest/GraphicTest/Common/Material/Mat_Mirror.mat
  69. 8
      SampleScenes/HDTest/GraphicTest/Common/Material/Mat_Mirror.mat.meta
  70. 10
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.meta
  71. 1001
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.unity
  72. 7
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.unity.meta
  73. 38
      ScriptableRenderPipeline/Core/Resources/BC6H.compute
  74. 10
      ScriptableRenderPipeline/Core/Resources/BC6H.compute.meta
  75. 66
      ScriptableRenderPipeline/Core/Resources/BlitCubeTextureFace.shader
  76. 8
      ScriptableRenderPipeline/Core/Resources/BlitCubeTextureFace.shader.meta
  77. 14
      ScriptableRenderPipeline/HDRenderPipeline/FrameSettings.cs
  78. 11
      ScriptableRenderPipeline/HDRenderPipeline/FrameSettings.cs.meta
  79. 38
      ScriptableRenderPipeline/HDRenderPipeline/GlobalSettings.cs
  80. 11
      ScriptableRenderPipeline/HDRenderPipeline/GlobalSettings.cs.meta
  81. 209
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/ReflectionProbeCache.cs
  82. 11
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/ReflectionProbeCache.cs.meta
  83. 1001
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/LightingData.asset
  84. 10
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/LightingData.asset.meta
  85. 941
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_dir.png
  86. 77
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_dir.png.meta
  87. 1001
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_light.exr
  88. 77
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_light.exr.meta
  89. 845
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-0.exr
  90. 78
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-0.exr.meta
  91. 192
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT1.mat
  92. 8
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT1.mat.meta
  93. 192
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT2.mat
  94. 8
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT2.mat.meta
  95. 8
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/RealtimeCubeMapSky.asset.meta
  96. 1001
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-1.exr
  97. 111
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-1.exr.meta
  98. 1001
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-2.exr
  99. 111
      SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-2.exr.meta

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1102_Unlit_Distortion.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1103_Unlit_Distortion_DepthTest.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1201_Lit_Features.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1202_Lit_DoubleSideNormalMode.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1203_Lit_Transparent.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1204_Lit_Transparent_Fog.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1205_Lit_Transparent_Refraction.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1206_Lit_Transparent_Distortion.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/LightweightPipeline/Scenes/023_Lighting_Mixed.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/LightweightPipeline/Scenes/027_PostProcessing.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

6
ImageTemplates/LightweightPipeline/Scenes/036_Lighting_Scene_DirectionalBakedDirectional.unity.png.meta


sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
userData:
assetBundleName:

625
SampleScenes/HDTest/BasicProfiling.unity
文件差异内容过多而无法显示
查看文件

24
ScriptableRenderPipeline/Core/CoreUtils.cs


public static class CoreUtils
{
// Data useful for various cubemap processes.
// Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/bb204881(v=vs.85).aspx
static public readonly Vector3[] lookAtList =
{
new Vector3(1.0f, 0.0f, 0.0f),
new Vector3(-1.0f, 0.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, -1.0f, 0.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 0.0f, -1.0f),
};
static public readonly Vector3[] upVectorList =
{
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 0.0f, -1.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
};
// Note: Color.Black have alpha channel set to 1. Most of the time we want alpha channel set to 0 as we use black to clear render target
public static Color clearColorAllBlack { get { return new Color(0f, 0f, 0f, 0f); } }

public static void ClearCubemap(CommandBuffer cmd, RenderTargetIdentifier buffer, Color clearColor)
{
for(int i = 0; i < 6; ++i)
SetRenderTarget(cmd, buffer, ClearFlag.Color, clearColorAllBlack, 0, (CubemapFace)i);
SetRenderTarget(cmd, buffer, ClearFlag.Color, clearColor, 0, (CubemapFace)i);
}
// Draws a full screen triangle as a faster alternative to drawing a full screen quad.

4
ScriptableRenderPipeline/Core/EncodeBC6H.cs.meta


fileFormatVersion: 2
guid: ef7e375d470b6404a9e355690703502b
timeCreated: 1507290672
licenseType: Pro
guid: 379274e2dbcfbc247acccd1aab4243ee
MonoImporter:
externalObjects: {}
serializedVersion: 2

6
ScriptableRenderPipeline/Core/Resources/EncodeBC6H.compute.meta


fileFormatVersion: 2
guid: b69b95b3420fd904e8530b79f665a1f8
timeCreated: 1507123133
licenseType: Pro
guid: aa922d239de60304f964e24488559eeb
currentAPIMask: 4
currentAPIMask: 8196
userData:
assetBundleName:
assetBundleVariant:

3
ScriptableRenderPipeline/Core/ShaderLibrary/BC6H.hlsl


// compute endpoints (min/max RGB bbox)
float3 blockMin = texels[ 0 ];
float3 blockMax = texels[ 0 ];
for ( uint i = 1; i < 16; ++i )
uint i;
for (i = 1; i < 16; ++i )
{
blockMin = min( blockMin, texels[ i ] );
blockMax = max( blockMax, texels[ i ] );

68
ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl


// Z buffer to linear depth.
// Correctly handles oblique view frustums. Only valid for projection matrices!
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.
float LinearEyeDepth(float2 positionSS, float deviceDepth, float4 invProjParam)
float LinearEyeDepth(float2 positionNDC, float deviceDepth, float4 invProjParam)
float4 positionCS = float4(positionSS * 2.0 - 1.0, deviceDepth, 1.0);
float4 positionCS = float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float viewSpaceZ = rcp(dot(positionCS, invProjParam));
// The view space uses a right-handed coordinate system.
return -viewSpaceZ;

// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float2 ComputeScreenSpacePosition(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 ComputeNormalizedDeviceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 positionSS = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
float2 positionNDC = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return positionSS;
return positionNDC;
float4 ComputeClipSpacePosition(float2 positionSS, float deviceDepth)
float4 ComputeClipSpacePosition(float2 positionNDC, float deviceDepth)
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return float4(positionSS * 2.0 - 1.0, deviceDepth, 1.0);
return float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float3 ComputeViewSpacePosition(float2 positionSS, float deviceDepth, float4x4 invProjMatrix)
float3 ComputeViewSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, deviceDepth);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;

float3 ComputeWorldSpacePosition(float2 positionSS, float deviceDepth, float4x4 invViewProjMatrix)
float3 ComputeWorldSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invViewProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, deviceDepth);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 hpositionWS = mul(invViewProjMatrix, positionCS);
return hpositionWS.xyz / hpositionWS.w;
}

struct PositionInputs
{
// TODO: improve the naming convention.
// Some options:
// positionNDC, positionSS, tileCoordSS
// pixelCoordUV, pixelCoordSS, tileCoordSS
// pixelCoordSS, pixelIndexSS, tileIndexSS
float3 positionWS; // World space position (could be camera-relative)
float2 positionSS; // Screen space pixel position : [0, 1) (with the half-pixel offset)
uint2 unPositionSS; // Screen space pixel index : [0, NumPixels)
uint2 unTileCoord; // Screen space tile index : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1]
float linearDepth; // View space Z coordinate : [Near, Far]
float3 positionWS; // World space position (could be camera-relative)
float2 positionNDC; // Normalized screen space UVs : [0, 1) (with the half-pixel offset)
uint2 positionSS; // Screen space pixel coordinates : [0, NumPixels)
uint2 tileCoord; // Screen space tile coordinates : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1]
float linearDepth; // View space Z coordinate : [Near, Far]
// If a compute shader call this function unPositionSS is an integer usually calculate like: uint2 unPositionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
// If a compute shader call this function positionSS is an integer usually calculate like: uint2 positionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize, uint2 unTileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize, uint2 tileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
posInput.positionSS = unPositionSS;
posInput.positionNDC = positionSS;
posInput.positionSS.xy += float2(0.5, 0.5);
posInput.positionNDC.xy += float2(0.5, 0.5);
posInput.positionSS *= invScreenSize;
posInput.positionNDC *= invScreenSize;
posInput.unPositionSS = uint2(unPositionSS);
posInput.unTileCoord = unTileCoord;
posInput.positionSS = uint2(positionSS);
posInput.tileCoord = tileCoord;
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize)
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize)
return GetPositionInput(unPositionSS, invScreenSize, uint2(0, 0));
return GetPositionInput(positionSS, invScreenSize, uint2(0, 0));
}
// From forward

void UpdatePositionInput(float deviceDepth, float4x4 invViewProjMatrix, float4x4 viewProjMatrix, inout PositionInputs posInput)
{
posInput.deviceDepth = deviceDepth;
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionSS, deviceDepth, invViewProjMatrix);
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionNDC, deviceDepth, invViewProjMatrix);
// The compiler should optimize this (less expensive than reconstruct depth VS from depth buffer)
posInput.linearDepth = mul(viewProjMatrix, float4(posInput.positionWS, 1.0)).w;

// LOD dithering transition helper
// LOD0 must use this function with ditherFactor 1..0
// LOD1 must use this function with ditherFactor 0..1
void LODDitheringTransition(uint2 unPositionSS, float ditherFactor)
void LODDitheringTransition(uint2 positionSS, float ditherFactor)
float p = GenerateHashedRandomFloat(unPositionSS);
float p = GenerateHashedRandomFloat(positionSS);
// We want to have a symmetry between 0..0.5 ditherFactor and 0.5..1 so no pixels are transparent during the transition
// this is handled by this test which reverse the pattern

#endif // UNITY_COMMON_INCLUDED

5
ScriptableRenderPipeline/Core/ShaderLibrary/CommonMaterial.hlsl


return subsurfaceLighting.b > 0;
}
// MACRO from Legacy Untiy
// Transforms 2D UV by scale/bias property
#define TRANSFORM_TEX(tex, name) ((tex.xy) * name##_ST.xy + name##_ST.zw)
#define GET_TEXELSIZE_NAME(name) (name##_TexelSize)
#endif // UNITY_COMMON_MATERIAL_INCLUDED

6
ScriptableRenderPipeline/Core/ShaderLibrary/Macros.hlsl


void FunctionName(inout bool3 a, inout bool3 b) { bool3 t = a; a = b; b = t; } \
void FunctionName(inout bool4 a, inout bool4 b) { bool4 t = a; a = b; b = t; }
// MACRO from Legacy Untiy
// Transforms 2D UV by scale/bias property
#define TRANSFORM_TEX(tex, name) ((tex.xy) * name##_ST.xy + name##_ST.zw)
#define GET_TEXELSIZE_NAME(name) (name##_TexelSize)
#endif // UNITY_MACROS_INCLUDED

8
ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl


// shadow sampling prototypes
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS );
#include "ShadowSampling.hlsl" // sampling patterns (don't modify)
#include "ShadowAlgorithms.hlsl" // engine default algorithms (don't modify)

return EvalShadow_PunctualDepth(shadowContext, positionWS, normalWS, shadowDataIndex, L);
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_CascadedDepth_Blend( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

6
ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl


float3 GetScreenSpaceTessFactor(float3 p0, float3 p1, float3 p2, float4x4 viewProjectionMatrix, float4 screenSize, float triangleSize)
{
// Get screen space adaptive scale factor
float2 edgeScreenPosition0 = ComputeScreenSpacePosition(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeScreenSpacePosition(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeScreenSpacePosition(p2, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition0 = ComputeNormalizedDeviceCoordinates(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeNormalizedDeviceCoordinates(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeNormalizedDeviceCoordinates(p2, viewProjectionMatrix) * screenSize.xy;
float EdgeScale = 1.0 / triangleSize; // Edge size in reality, but name is simpler
float3 tessFactor;

150
ScriptableRenderPipeline/Core/TextureCache.cs


//#define NATIVE_CODE_FOR_CMD_CONVERT_TEXTURE
using UnityEngine.Rendering;
#if UNITY_EDITOR
using UnityEditor;
#endif

{
private Texture2DArray m_Cache;
public override void TransferToSlice(int sliceIndex, Texture texture)
public override void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
{
var mismatch = (m_Cache.width != texture.width) || (m_Cache.height != texture.height);

if (mismatch)
{
if (!UnityEngine.Graphics.ConvertTexture(texture, 0, m_Cache, sliceIndex))
{
Debug.LogErrorFormat(texture, "Unable to convert texture \"{0}\" to match renderloop settings ({1}x{2} {3})",
texture.name, m_Cache.width, m_Cache.height, m_Cache.format);
}
#if NATIVE_CODE_FOR_CMD_CONVERT_TEXTURE
cmd.ConvertTexture(texture, 0, m_Cache, sliceIndex);
#else
UnityEngine.Graphics.ConvertTexture(texture, 0, m_Cache, sliceIndex);
#endif
UnityEngine.Graphics.CopyTexture(texture, 0, m_Cache, sliceIndex);
cmd.CopyTexture(texture, 0, m_Cache, sliceIndex);
}
}

private int m_CubeMipLevelPropName;
private int m_cubeSrcTexPropName;
public override void TransferToSlice(int sliceIndex, Texture texture)
public override void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
TransferToPanoCache(sliceIndex, texture);
TransferToPanoCache(cmd, sliceIndex, texture);
else
{
var mismatch = (m_Cache.width != texture.width) || (m_Cache.height != texture.height);

if (mismatch)
{
bool failed = false;
if (!UnityEngine.Graphics.ConvertTexture(texture, f, m_Cache, 6 * sliceIndex + f))
{
failed = true;
break;
}
}
if (failed)
{
Debug.LogErrorFormat(texture, "Unable to convert texture \"{0}\" to match renderloop settings ({1}x{2} {3})",
texture.name, m_Cache.width, m_Cache.height, m_Cache.format);
#if NATIVE_CODE_FOR_CMD_CONVERT_TEXTURE
cmd.ConvertTexture(texture, f, m_Cache, 6 * sliceIndex + f);
#else
UnityEngine.Graphics.ConvertTexture(texture, f, m_Cache, 6 * sliceIndex + f);
#endif
UnityEngine.Graphics.CopyTexture(texture, f, m_Cache, 6 * sliceIndex + f);
cmd.CopyTexture(texture, f, m_Cache, 6 * sliceIndex + f);
}
}
}

Texture.DestroyImmediate(m_Cache);
}
private void TransferToPanoCache(int sliceIndex, Texture texture)
private void TransferToPanoCache(CommandBuffer cmd, int sliceIndex, Texture texture)
UnityEngine.Graphics.SetRenderTarget(m_StagingRTs[m]);
UnityEngine.Graphics.Blit(null, m_CubeBlitMaterial, 0);
cmd.Blit(null, m_StagingRTs[m], m_CubeBlitMaterial, 0);
UnityEngine.Graphics.CopyTexture(m_StagingRTs[m], 0, 0, m_CacheNoCubeArray, sliceIndex, m);
cmd.CopyTexture(m_StagingRTs[m], 0, 0, m_CacheNoCubeArray, sliceIndex, m);
}
}

}
}
public static TextureFormat GetPreferredHdrCompressedTextureFormat
public static TextureFormat GetPreferredHDRCompressedTextureFormat
{
get
{

// On editor the texture is uncompressed when operating against mobile build targets
// // On editor the texture is uncompressed when operating against mobile build targets
//#if UNITY_2017_2_OR_NEWER
if (SystemInfo.SupportsTextureFormat(probeFormat) && !UnityEngine.Rendering.GraphicsSettings.HasShaderDefine(UnityEngine.Rendering.BuiltinShaderDefine.UNITY_NO_DXT5nm))
format = probeFormat;

{
public uint texId;
public uint countLRU;
#if UNITY_EDITOR
public Hash128 hash;
#endif
public uint updateCount;
};
private int m_NumTextures;

private static uint g_MaxFrameCount = unchecked((uint)(-1));
private static uint g_InvalidTexID = (uint)0;
public int FetchSlice(Texture texture, bool forceReinject=false)
public uint GetTextureUpdateCount(Texture texture)
var sliceIndex = -1;
uint updateCount = texture.updateCount;
// For baked probes in the editor we need to factor in the actual hash of texture because we can't increment the update count of a texture that's baked on the disk.
// This code leaks logic from reflection probe baking into the texture cache which is not good... TODO: Find a way to do that outside of the texture cache.
#if UNITY_EDITOR
updateCount += (uint)texture.imageContentsHash.GetHashCode();
#endif
return updateCount;
}
public int ReserveSlice(Texture texture, out bool needUpdate)
{
needUpdate = false;
return sliceIndex;
return -1;
#if UNITY_EDITOR
var hash = texture.imageContentsHash;
#endif
//assert(TexID!=g_InvalidTexID);
if (texId == g_InvalidTexID) return 0;
var bSwapSlice = forceReinject;
var bFoundAvailOrExistingSlice = false;
if (texId == g_InvalidTexID)
return -1;
int cachedSlice;
if (m_LocatorInSliceArray.TryGetValue(texId, out cachedSlice))
var sliceIndex = -1;
var foundIndex = -1;
if (m_LocatorInSliceArray.TryGetValue(texId, out foundIndex))
sliceIndex = cachedSlice;
Debug.Assert(m_SliceArray[sliceIndex].texId == texId);
sliceIndex = foundIndex;
bFoundAvailOrExistingSlice = true;
#if UNITY_EDITOR
bSwapSlice = bSwapSlice || (m_SliceArray[sliceIndex].hash != hash);
#endif
var updateCount = GetTextureUpdateCount(texture);
needUpdate |= (m_SliceArray[sliceIndex].updateCount != updateCount);
Debug.Assert(m_SliceArray[sliceIndex].texId == texId);
if (!bFoundAvailOrExistingSlice)
if(sliceIndex == -1)
{
// look for first non zero entry. Will by the least recently used entry
// since the array was pre-sorted (in linear time) in NewFrame()

{
idx = m_SortedIdxArray[j];
if (m_SliceArray[idx].countLRU == 0) ++j; // if entry already snagged by a new texture in this frame then ++j
else bFound = true;
if (m_SliceArray[idx].countLRU == 0)
++j; // if entry already snagged by a new texture in this frame then ++j
else
bFound = true;
needUpdate = true;
// if we are replacing an existing entry delete it from m_locatorInSliceArray.
if (m_SliceArray[idx].texId != g_InvalidTexID)
{

m_SliceArray[idx].texId = texId;
sliceIndex = idx;
bFoundAvailOrExistingSlice = true;
bSwapSlice = true;
// wrap up
Debug.Assert(bFoundAvailOrExistingSlice, "The texture cache doesn't have enough space to store all textures. Please either increase the size of the texture cache, or use fewer unique textures.");
if (bFoundAvailOrExistingSlice)
if(sliceIndex != -1)
}
if (bSwapSlice) // if this was a miss
return sliceIndex;
}
// In case the texture content with which we update the cache is not the input texture, we need to provide the right update count.
public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content, uint updateCount)
#if UNITY_EDITOR
m_SliceArray[sliceIndex].hash = hash;
#endif
// transfer new slice to sliceIndex from source texture
m_SliceArray[sliceIndex].updateCount = updateCount;
TransferToSlice(cmd, sliceIndex, content);
}
// transfer new slice to sliceIndex from source texture
TransferToSlice(sliceIndex, texture);
public void UpdateSlice(CommandBuffer cmd, int sliceIndex, Texture content)
{
UpdateSlice(cmd, sliceIndex, content, GetTextureUpdateCount(content));
public int FetchSlice(CommandBuffer cmd, Texture texture, bool forceReinject=false)
{
bool needUpdate = false;
var sliceIndex = ReserveSlice(texture, out needUpdate);
var bSwapSlice = forceReinject || needUpdate;
// wrap up
Debug.Assert(sliceIndex != -1, "The texture cache doesn't have enough space to store all textures. Please either increase the size of the texture cache, or use fewer unique textures.");
if (sliceIndex != -1 && bSwapSlice)
{
UpdateSlice(cmd, sliceIndex, texture);
}
return sliceIndex;

m_NumMipLevels = 0;
}
public virtual void TransferToSlice(int sliceIndex, Texture texture)
public virtual void TransferToSlice(CommandBuffer cmd, int sliceIndex, Texture texture)
{
}

6
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader


{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, UINT_MAX, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, UINT_MAX, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
// Init to not expected value

8
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader


{
// positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
int2 pixelCoord = posInput.unPositionSS.xy;
int2 pixelCoord = posInput.positionSS.xy;
int2 tileCoord = (float2)pixelCoord / GetTileSize();
int2 mouseTileCoord = _MousePixelCoord / GetTileSize();
int2 offsetInTile = pixelCoord - tileCoord * GetTileSize();

// Tile overlap counter
if (n >= 0)
{
result = OverlayHeatMap(int2(posInput.unPositionSS.xy) & (GetTileSize() - 1), n);
result = OverlayHeatMap(int2(posInput.positionSS.xy) & (GetTileSize() - 1), n);
}
#ifdef SHOW_LIGHT_CATEGORIES

if (tileCoord.y < LIGHTCATEGORY_COUNT && tileCoord.x < maxLights + 3)
{
PositionInputs mousePosInput = GetPositionInput(_MousePixelCoord, _ScreenSize.zw, mouseTileCoord);
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.unPositionSS).x;
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.positionSS).x;
UpdatePositionInput(depthMouse, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, mousePosInput);
uint category = (LIGHTCATEGORY_COUNT - 1) - tileCoord.y;

1
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.Styles.cs


public readonly GUIContent spotCookieSize = new GUIContent("Spot Cookie Size");
public readonly GUIContent pointCookieSize = new GUIContent("Point Cookie Size");
public readonly GUIContent reflectionCubemapSize = new GUIContent("Reflection Cubemap Size");
public readonly GUIContent reflectionCacheCompressed = new GUIContent("Compress Reflection Probe Cache");
public readonly GUIContent sssSettings = new GUIContent("Subsurface Scattering Settings");

17
ScriptableRenderPipeline/HDRenderPipeline/Editor/HDRenderPipelineInspector.cs


SerializedProperty m_SpotCookieSize;
SerializedProperty m_PointCookieSize;
SerializedProperty m_ReflectionCubemapSize;
SerializedProperty m_ReflectionCacheCompressed;
void InitializeProperties()
{

m_ShadowAtlasHeight = properties.Find(x => x.shadowInitParams.shadowAtlasHeight);
// Texture settings
m_SpotCookieSize = properties.Find(x => x.textureSettings.spotCookieSize);
m_PointCookieSize = properties.Find(x => x.textureSettings.pointCookieSize);
m_ReflectionCubemapSize = properties.Find(x => x.textureSettings.reflectionCubemapSize);
m_SpotCookieSize = properties.Find(x => x.globalTextureSettings.spotCookieSize);
m_PointCookieSize = properties.Find(x => x.globalTextureSettings.pointCookieSize);
m_ReflectionCubemapSize = properties.Find(x => x.globalTextureSettings.reflectionCubemapSize);
m_ReflectionCacheCompressed = properties.Find(x => x.globalTextureSettings.reflectionCacheCompressed);
m_RenderingUseForwardOnly = properties.Find(x => x.renderingSettings.useForwardRenderingOnly);
m_RenderingUseDepthPrepass = properties.Find(x => x.renderingSettings.useDepthPrepassWithDeferredRendering);
m_RenderingUseDepthPrepassAlphaTestOnly = properties.Find(x => x.renderingSettings.renderAlphaTestOnlyInDeferredPrepass);
m_RenderingUseForwardOnly = properties.Find(x => x.globalRenderingSettings.useForwardRenderingOnly);
m_RenderingUseDepthPrepass = properties.Find(x => x.globalRenderingSettings.useDepthPrepassWithDeferredRendering);
m_RenderingUseDepthPrepassAlphaTestOnly = properties.Find(x => x.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass);
// Subsurface Scattering Settings
m_SubsurfaceScatteringSettings = properties.Find(x => x.sssSettings);

EditorGUILayout.PropertyField(m_SpotCookieSize, s_Styles.spotCookieSize);
EditorGUILayout.PropertyField(m_PointCookieSize, s_Styles.pointCookieSize);
EditorGUILayout.PropertyField(m_ReflectionCubemapSize, s_Styles.reflectionCubemapSize);
// Commented ou until we have proper realtime BC6H compression
//EditorGUILayout.PropertyField(m_ReflectionCacheCompressed, s_Styles.reflectionCacheCompressed);
if (EditorGUI.EndChangeCheck())
{

458
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


using System.Collections.Generic;
using System.Collections.Generic;
using UnityEngine.Rendering;
using System;
using System.Diagnostics;

namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[Serializable]
public class RenderingSettings
{
public bool useForwardRenderingOnly; // TODO: Currently there is no way to strip the extra forward shaders generated by the shaders compiler, so we can switch dynamically.
public bool useDepthPrepassWithDeferredRendering;
public bool renderAlphaTestOnlyInDeferredPrepass;
// We have to fall back to forward-only rendering when scene view is using wireframe rendering mode --
// as rendering everything in wireframe + deferred do not play well together
public bool ShouldUseForwardRenderingOnly()
{
return useForwardRenderingOnly || GL.wireframe;
}
}
public class GBufferManager
{
public const int k_MaxGbuffer = 8;

for (int gbufferIndex = 0; gbufferIndex < gbufferCount; ++gbufferIndex)
{
cmd.ReleaseTemporaryRT(HDShaderIDs._GBufferTexture[gbufferIndex]);
cmd.GetTemporaryRT(HDShaderIDs._GBufferTexture[gbufferIndex], width, height, 0, FilterMode.Point, rtFormat[gbufferIndex], rtReadWrite[gbufferIndex]);
m_RTIDs[gbufferIndex] = new RenderTargetIdentifier(HDShaderIDs._GBufferTexture[gbufferIndex]);
}

cmd.ReleaseTemporaryRT(HDShaderIDs._ShadowMaskTexture);
cmd.GetTemporaryRT(HDShaderIDs._ShadowMaskTexture, width, height, 0, FilterMode.Point, Builtin.GetShadowMaskBufferFormat(), Builtin.GetShadowMaskBufferReadWrite());
m_RTIDs[gbufferCount++] = new RenderTargetIdentifier(HDShaderIDs._ShadowMaskTexture);
}

// If velocity is in GBuffer then it is in the last RT. Assign a different name to it.
cmd.ReleaseTemporaryRT(HDShaderIDs._VelocityTexture);
cmd.GetTemporaryRT(HDShaderIDs._VelocityTexture, width, height, 0, FilterMode.Point, Builtin.GetVelocityBufferFormat(), Builtin.GetVelocityBufferReadWrite());
m_RTIDs[gbufferCount++] = new RenderTargetIdentifier(HDShaderIDs._VelocityTexture);
}

// Renderer Bake configuration can vary depends on if shadow mask is enabled or no
RendererConfiguration m_currentRendererConfigurationBakedLighting = HDUtils.k_RendererConfigurationBakedLighting;
// Various set of material use in render loop
IBLFilterGGX m_IBLFilterGGX = null;
// Various set of material use in render loop
ComputeShader m_SubsurfaceScatteringCS { get { return m_Asset.renderPipelineResources.subsurfaceScatteringCS; } }
int m_SubsurfaceScatteringKernel;
Material m_CombineLightingPass;

// The pass "SRPDefaultUnlit" is a fall back to legacy unlit rendering and is required to support unity 2d + unity UI that render in the scene.
ShaderPassName[] m_ForwardAndForwardOnlyPassNames = { new ShaderPassName(), new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName};
ShaderPassName[] m_ForwardOnlyPassNames = { new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName };
ShaderPassName[] m_ForwardOnlyPassNames = { new ShaderPassName(), HDShaderPassNames.s_SRPDefaultUnlitName};
ShaderPassName[] m_AllTransparentPassNames = { HDShaderPassNames.s_TransparentBackfaceName,
HDShaderPassNames.s_ForwardOnlyName,

m_MaterialList.ForEach(material => material.Build(asset.renderPipelineResources));
m_LightLoop.Build(asset.renderPipelineResources, asset.renderingSettings, asset.tileSettings, asset.textureSettings, asset.shadowInitParams, m_ShadowSettings);
m_IBLFilterGGX = new IBLFilterGGX(asset.renderPipelineResources);
m_SkyManager.Build(asset.renderPipelineResources);
m_LightLoop.Build(asset.renderPipelineResources, asset.globalRenderingSettings, asset.tileSettings, asset.globalTextureSettings, asset.shadowInitParams, m_ShadowSettings, m_IBLFilterGGX);
m_SkyManager.Build(asset.renderPipelineResources, m_IBLFilterGGX);
m_SkyManager.skySettings = skySettingsToUse;
m_DebugDisplaySettings.RegisterDebug();

void RegisterDebug()
{
// These need to be Runtime Only because those values are hold by the HDRenderPipeline asset so if user change them through the editor debug menu they might change the value in the asset without noticing it.
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Forward Only", () => m_Asset.renderingSettings.useForwardRenderingOnly, (value) => m_Asset.renderingSettings.useForwardRenderingOnly = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass", () => m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering, (value) => m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass ATest Only", () => m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass, (value) => m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass = (bool)value, DebugItemFlag.RuntimeOnly);
// These need to be Runtime Only because those values are held by the HDRenderPipeline asset so if user change them through the editor debug menu they might change the value in the asset without noticing it.
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Forward Only", () => m_Asset.globalRenderingSettings.useForwardRenderingOnly, (value) => m_Asset.globalRenderingSettings.useForwardRenderingOnly = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass", () => m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering, (value) => m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Deferred Depth Prepass ATest Only", () => m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass, (value) => m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Enable Tile/Cluster", () => m_Asset.tileSettings.enableTileAndCluster, (value) => m_Asset.tileSettings.enableTileAndCluster = (bool)value, DebugItemFlag.RuntimeOnly);
DebugMenuManager.instance.AddDebugItem<bool>("HDRP", "Enable Big Tile", () => m_Asset.tileSettings.enableBigTilePrepass, (value) => m_Asset.tileSettings.enableBigTilePrepass = (bool)value, DebugItemFlag.RuntimeOnly);

CullResults m_CullResults;
public override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
base.Render(renderContext, cameras);
#if UNITY_EDITOR
SupportedRenderingFeatures.active = s_NeededFeatures;
#endif
// HD use specific GraphicsSettings. This is init here.
// TODO: This should not be set at each Frame but is there another place for these config setup ?
GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;
if (m_FrameCount != Time.frameCount)
{
HDCamera.CleanUnused();
m_FrameCount = Time.frameCount;
}
foreach (var camera in cameras)
{
base.Render(renderContext, cameras);
#if UNITY_EDITOR
SupportedRenderingFeatures.active = s_NeededFeatures;
#endif
// HD use specific GraphicsSettings. This is init here.
// TODO: This should not be set at each Frame but is there another place for these config setup ?
GraphicsSettings.lightsUseLinearIntensity = true;
GraphicsSettings.lightsUseColorTemperature = true;
if (m_FrameCount != Time.frameCount)
{
HDCamera.CleanUnused();
m_FrameCount = Time.frameCount;
}
foreach (var material in m_MaterialList)
material.RenderInit(cmd);
foreach (var material in m_MaterialList)
material.RenderInit(cmd);
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame();
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame();
// we only want to render one camera for now
// select the most main camera!
Camera camera = null;
foreach (var cam in cameras)
{
if (cam == Camera.main)
{
camera = cam;
break;
}
}
if (camera == null)
{
renderContext.Submit();
continue;
}
if (camera == null && cameras.Length > 0)
camera = cameras[0];
// If we render a reflection view or a preview we should not display any debug information
// This need to be call before ApplyDebugDisplaySettings()
if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
{
// Neutral allow to disable all debug settings
m_CurrentDebugDisplaySettings = s_NeutralDebugDisplaySettings;
}
else
{
m_CurrentDebugDisplaySettings = m_DebugDisplaySettings;
}
if (camera == null)
{
renderContext.Submit();
return;
}
ApplyDebugDisplaySettings(cmd);
UpdateCommonSettings();
// If we render a reflection view or a preview we should not display any debug information
// This need to be call before ApplyDebugDisplaySettings()
if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
{
// Neutral allow to disable all debug settings
m_CurrentDebugDisplaySettings = s_NeutralDebugDisplaySettings;
}
else
{
m_CurrentDebugDisplaySettings = m_DebugDisplaySettings;
}
if (!m_IBLFilterGGX.IsInitialized())
m_IBLFilterGGX.Initialize(cmd);
ApplyDebugDisplaySettings(cmd);
UpdateCommonSettings();
ScriptableCullingParameters cullingParams;
if (!CullResults.GetCullingParameters(camera, out cullingParams))
{
renderContext.Submit();
continue;
}
ScriptableCullingParameters cullingParams;
if (!CullResults.GetCullingParameters(camera, out cullingParams))
{
renderContext.Submit();
return;
}
m_LightLoop.UpdateCullingParameters( ref cullingParams );
m_LightLoop.UpdateCullingParameters( ref cullingParams );
// emit scene view UI
if (camera.cameraType == CameraType.SceneView)
{
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}
// emit scene view UI
if (camera.cameraType == CameraType.SceneView)
{
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}
using (new ProfilingSample(cmd, "CullResults.Cull", GetSampler(CustomSamplerId.CullResultsCull)))
{
CullResults.Cull(ref cullingParams, renderContext,ref m_CullResults);
}
using (new ProfilingSample(cmd, "CullResults.Cull", GetSampler(CustomSamplerId.CullResultsCull)))
{
CullResults.Cull(ref cullingParams, renderContext,ref m_CullResults);
}
Resize(camera);
Resize(camera);
renderContext.SetupCameraProperties(camera);
renderContext.SetupCameraProperties(camera);
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
var hdCamera = HDCamera.Get(camera, postProcessLayer);
PushGlobalParams(hdCamera, cmd, sssSettings);
var postProcessLayer = camera.GetComponent<PostProcessLayer>();
var hdCamera = HDCamera.Get(camera, postProcessLayer);
PushGlobalParams(hdCamera, cmd, sssSettings);
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode
m_MaterialList.ForEach(material => material.Bind());
// TODO: Find a correct place to bind these material textures
// We have to bind the material specific global parameters in this mode
m_MaterialList.ForEach(material => material.Bind());
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
if (additionalCameraData && additionalCameraData.renderingPath == RenderingPathHDRP.Unlit)
{
// TODO: Add another path dedicated to planar reflection / real time cubemap that implement simpler lighting
// It is up to the users to only send unlit object for this camera path
var additionalCameraData = camera.GetComponent<HDAdditionalCameraData>();
if (additionalCameraData && additionalCameraData.renderingPath == RenderingPathHDRP.Unlit)
using (new ProfilingSample(cmd, "Forward", GetSampler(CustomSamplerId.Forward)))
// TODO: Add another path dedicated to planar reflection / real time cubemap that implement simpler lighting
// It is up to the users to only send unlit object for this camera path
using (new ProfilingSample(cmd, "Forward", GetSampler(CustomSamplerId.Forward)))
{
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color | ClearFlag.Depth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, cmd, HDShaderPassNames.s_ForwardName);
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Color | ClearFlag.Depth);
RenderOpaqueRenderList(m_CullResults, camera, renderContext, cmd, HDShaderPassNames.s_ForwardName);
}
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
renderContext.Submit();
return;
// Note: Legacy Unity behave like this for ShadowMask
// When you select ShadowMask in Lighting panel it recompile shaders on the fly with the SHADOW_MASK keyword.
// However there is no C# function that we can query to know what mode have been select in Lighting Panel and it will be wrong anyway. Lighting Panel setup what will be the next bake mode. But until light is bake, it is wrong.
// Currently to know if you need shadow mask you need to go through all visible lights (of CullResult), check the LightBakingOutput struct and look at lightmapBakeType/mixedLightingMode. If one light have shadow mask bake mode, then you need shadow mask features (i.e extra Gbuffer).
// It mean that when we build a standalone player, if we detect a light with bake shadow mask, we generate all shader variant (with and without shadow mask) and at runtime, when a bake shadow mask light is visible, we dynamically allocate an extra GBuffer and switch the shader.
// So the first thing to do is to go through all the light: PrepareLightsForGPU
bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", GetSampler(CustomSamplerId.TPPrepareLightsForGPU)))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(m_ShadowSettings, m_CullResults, camera);
}
ConfigureForShadowMask(enableBakeShadowMask, cmd);
renderContext.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
renderContext.Submit();
continue;
}
InitAndClearBuffer(hdCamera, enableBakeShadowMask, cmd);
// Note: Legacy Unity behave like this for ShadowMask
// When you select ShadowMask in Lighting panel it recompile shaders on the fly with the SHADOW_MASK keyword.
// However there is no C# function that we can query to know what mode have been select in Lighting Panel and it will be wrong anyway. Lighting Panel setup what will be the next bake mode. But until light is bake, it is wrong.
// Currently to know if you need shadow mask you need to go through all visible lights (of CullResult), check the LightBakingOutput struct and look at lightmapBakeType/mixedLightingMode. If one light have shadow mask bake mode, then you need shadow mask features (i.e extra Gbuffer).
// It mean that when we build a standalone player, if we detect a light with bake shadow mask, we generate all shader variant (with and without shadow mask) and at runtime, when a bake shadow mask light is visible, we dynamically allocate an extra GBuffer and switch the shader.
// So the first thing to do is to go through all the light: PrepareLightsForGPU
bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", GetSampler(CustomSamplerId.TPPrepareLightsForGPU)))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, m_ShadowSettings, m_CullResults, camera);
}
ConfigureForShadowMask(enableBakeShadowMask, cmd);
RenderDepthPrepass(m_CullResults, camera, renderContext, cmd);
InitAndClearBuffer(hdCamera, enableBakeShadowMask, cmd);
RenderGBuffer(m_CullResults, camera, renderContext, cmd);
RenderDepthPrepass(m_CullResults, camera, renderContext, cmd);
RenderGBuffer(m_CullResults, camera, renderContext, cmd);
// In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
CopyDepthBufferIfNeeded(cmd);
// In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing.
CopyDepthBufferIfNeeded(cmd);
RenderPyramidDepth(camera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
RenderPyramidDepth(camera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
// Required for the SSS and the shader feature classification pass.
PrepareAndBindStencilTexture(cmd);
// Required for the SSS and the shader feature classification pass.
PrepareAndBindStencilTexture(cmd);
if (m_CurrentDebugDisplaySettings.IsDebugMaterialDisplayEnabled())
if (m_CurrentDebugDisplaySettings.IsDebugMaterialDisplayEnabled())
{
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext, cmd);
}
else
{
using (new ProfilingSample(cmd, "Render SSAO", GetSampler(CustomSamplerId.RenderSSAO)))
RenderDebugViewMaterial(m_CullResults, hdCamera, renderContext, cmd);
// TODO: Everything here (SSAO, Shadow, Build light list, deferred shadow, material and light classification can be parallelize with Async compute)
RenderSSAO(cmd, camera, renderContext, postProcessLayer);
else
{
using (new ProfilingSample(cmd, "Render SSAO", GetSampler(CustomSamplerId.RenderSSAO)))
{
// TODO: Everything here (SSAO, Shadow, Build light list, deferred shadow, material and light classification can be parallelize with Async compute)
RenderSSAO(cmd, camera, renderContext, postProcessLayer);
}
using (new ProfilingSample(cmd, "Render shadows", GetSampler(CustomSamplerId.RenderShadows)))
using (new ProfilingSample(cmd, "Render shadows", GetSampler(CustomSamplerId.RenderShadows)))
m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// TODO: check if statement below still apply
renderContext.SetupCameraProperties(camera); // Need to recall SetupCameraProperties after RenderShadows as it modify our view/proj matrix
}
m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// TODO: check if statement below still apply
renderContext.SetupCameraProperties(camera); // Need to recall SetupCameraProperties after RenderShadows as it modify our view/proj matrix
}
using (new ProfilingSample(cmd, "Deferred directional shadows", GetSampler(CustomSamplerId.RenderDeferredDirectionalShadow)))
{
cmd.GetTemporaryRT(m_DeferredShadowBuffer, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1, true);
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBufferRT, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(cmd, m_DeferredShadowBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.DeferredShadows);
}
using (new ProfilingSample(cmd, "Deferred directional shadows", GetSampler(CustomSamplerId.RenderDeferredDirectionalShadow)))
{
cmd.ReleaseTemporaryRT(m_DeferredShadowBuffer);
cmd.GetTemporaryRT(m_DeferredShadowBuffer, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear, 1 , true);
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBufferRT, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(cmd, m_DeferredShadowBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.DeferredShadows);
}
using (new ProfilingSample(cmd, "Build Light list", GetSampler(CustomSamplerId.BuildLightList)))
{
m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT, GetStencilTexture());
}
using (new ProfilingSample(cmd, "Build Light list", GetSampler(CustomSamplerId.BuildLightList)))
{
m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT, GetStencilTexture());
}
// Don't update the sky environment if we are rendering a cubemap (it should be update already)
if (camera.cameraType != CameraType.Reflection)
{
// Caution: We require sun light here as some sky use the sun light to render, mean UpdateSkyEnvironment
// must be call after BuildGPULightLists.
// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
}
// Caution: We require sun light here as some sky use the sun light to render, mean UpdateSkyEnvironment
// must be call after BuildGPULightLists.
// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
RenderDeferredLighting(hdCamera, cmd);
RenderDeferredLighting(hdCamera, cmd);
// We compute subsurface scattering here. Therefore, no objects rendered afterwards will exhibit SSS.
// Currently, there is no efficient way to switch between SRT and MRT for the forward pass;
// therefore, forward-rendered objects do not output split lighting required for the SSS pass.
SubsurfaceScatteringPass(hdCamera, cmd, sssSettings);
// We compute subsurface scattering here. Therefore, no objects rendered afterwards will exhibit SSS.
// Currently, there is no efficient way to switch between SRT and MRT for the forward pass;
// therefore, forward-rendered objects do not output split lighting required for the SSS pass.
SubsurfaceScatteringPass(hdCamera, cmd, sssSettings);
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Opaque);
RenderSky(hdCamera, cmd);
RenderSky(hdCamera, cmd);
// Render pre refraction objects
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
// Render pre refraction objects
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.PreRefraction);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.PreRefractionColorPyramid);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.PreRefractionColorPyramid);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
// Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects.
RenderForward(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
RenderForwardError(m_CullResults, camera, renderContext, cmd, ForwardPass.Transparent);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, camera, renderContext, FullScreenDebugMode.NanTracker);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, camera, renderContext, FullScreenDebugMode.NanTracker);
// Planar and real time cubemap doesn't need post process and render in FP16
if (camera.cameraType == CameraType.Reflection)
// Planar and real time cubemap doesn't need post process and render in FP16
if (camera.cameraType == CameraType.Reflection)
{
using (new ProfilingSample(cmd, "Blit to final RT", GetSampler(CustomSamplerId.BlitToFinalRT)))
using (new ProfilingSample(cmd, "Blit to final RT", GetSampler(CustomSamplerId.BlitToFinalRT)))
{
// Simple blit
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
}
// Simple blit
cmd.Blit(m_CameraColorBufferRT, BuiltinRenderTextureType.CameraTarget);
else
{
RenderVelocity(m_CullResults, hdCamera, renderContext, cmd); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
}
else
{
RenderVelocity(m_CullResults, hdCamera, renderContext, cmd); // Note we may have to render velocity earlier if we do temporalAO, temporal volumetric etc... Mean we will not take into account forward opaque in case of deferred rendering ?
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.FinalColorPyramid);
RenderGaussianPyramidColor(camera, cmd, renderContext, FullScreenDebugMode.FinalColorPyramid);
// TODO: Check with VFX team.
// Rendering distortion here have off course lot of artifact.
// But resolving at each objects that write in distortion is not possible (need to sort transparent, render those that do not distort, then resolve, then etc...)
// Instead we chose to apply distortion at the end after we cumulate distortion vector and desired blurriness.
AccumulateDistortion(m_CullResults, camera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources);
// TODO: Check with VFX team.
// Rendering distortion here have off course lot of artifact.
// But resolving at each objects that write in distortion is not possible (need to sort transparent, render those that do not distort, then resolve, then etc...)
// Instead we chose to apply distortion at the end after we cumulate distortion vector and desired blurriness.
AccumulateDistortion(m_CullResults, camera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources);
}
}
RenderDebug(hdCamera, cmd);
RenderDebug(hdCamera, cmd);
// Make sure to unbind every render texture here because in the next iteration of the loop we might have to reallocate render texture (if the camera size is different)
cmd.SetRenderTarget(new RenderTargetIdentifier(-1), new RenderTargetIdentifier(-1));
// bind depth surface for editor grid/gizmo/selection rendering
if (camera.cameraType == CameraType.SceneView)
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
// We still need to bind correctly default camera target with our depth buffer in case we are currently rendering scene view. It should be the last camera here
// bind depth surface for editor grid/gizmo/selection rendering
if (camera.cameraType == CameraType.SceneView)
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget, m_CameraDepthStencilBufferRT);
renderContext.ExecuteCommandBuffer(cmd);
renderContext.ExecuteCommandBuffer(cmd);
} // For each camera
}
void RenderOpaqueRenderList(CullResults cull,

int w = camera.pixelWidth;
int h = camera.pixelHeight;
cmd.ReleaseTemporaryRT(m_DistortionBuffer);
cmd.GetTemporaryRT(m_DistortionBuffer, w, h, 0, FilterMode.Point, Builtin.GetDistortionBufferFormat(), Builtin.GetDistortionBufferReadWrite());
cmd.SetRenderTarget(m_DistortionBufferRT, m_CameraDepthStencilBufferRT);
cmd.ClearRenderTarget(false, true, Color.clear);

// It must also have a "DepthForwardOnly" and no "DepthOnly" pass as forward material (either deferred or forward only rendering) have always a depth pass.
// In case of forward only rendering we have a depth prepass. In case of deferred renderer, it is optional
bool addFullDepthPrepass = m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() || m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering;
bool addAlphaTestedOnly = !m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() && m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering && m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass;
bool addFullDepthPrepass = m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() || m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering;
bool addAlphaTestedOnly = !m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() && m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering && m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass;
using (new ProfilingSample(cmd, addAlphaTestedOnly ? "Depth Prepass alpha test" : "Depth Prepass", GetSampler(CustomSamplerId.DepthPrepass)))
{

// during Gbuffer pass. This is handled in the shader and the depth test (equal and no depth write) is done here.
void RenderGBuffer(CullResults cull, Camera camera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
using (new ProfilingSample(cmd, m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() ? "GBufferDebugDisplay" : "GBuffer", GetSampler(CustomSamplerId.GBuffer)))

}
else
{
if (m_Asset.renderingSettings.useDepthPrepassWithDeferredRendering)
if (m_Asset.globalRenderingSettings.useDepthPrepassWithDeferredRendering)
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, rangeOpaqueNoAlphaTest, m_Asset.renderingSettings.renderAlphaTestOnlyInDeferredPrepass ? m_DepthStateOpaque : m_DepthStateOpaqueWithPrepass);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, rangeOpaqueNoAlphaTest, m_Asset.globalRenderingSettings.renderAlphaTestOnlyInDeferredPrepass ? m_DepthStateOpaque : m_DepthStateOpaqueWithPrepass);
// but for opaque alpha tested object we use a depth equal and no depth write. And we rely on the shader pass GbufferWithDepthPrepass
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferWithPrepassName, m_currentRendererConfigurationBakedLighting, rangeOpaqueAlphaTest, m_DepthStateOpaqueWithPrepass);
}

{
using (new ProfilingSample(cmd, "DisplayDebug ViewMaterial", GetSampler(CustomSamplerId.DisplayDebugViewMaterial)))
{
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
{
using (new ProfilingSample(cmd, "DebugViewMaterialGBuffer", GetSampler(CustomSamplerId.DebugViewMaterialGBuffer)))
{

if (settings.IsEnabledAndSupported(null))
{
cmd.ReleaseTemporaryRT(HDShaderIDs._AmbientOcclusionTexture);
cmd.GetTemporaryRT(HDShaderIDs._AmbientOcclusionTexture, new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight, RenderTextureFormat.R8, 0)
{
sRGB = false,

void RenderDeferredLighting(HDCamera hdCamera, CommandBuffer cmd)
{
if (m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
m_MRTCache2[0] = m_CameraColorBufferRT;

void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, SubsurfaceScatteringSettings sssParameters)
{
// Currently, forward-rendered objects do not output split lighting required for the SSS pass.
if (!m_CurrentDebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission || m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_CurrentDebugDisplaySettings.renderingDebugSettings.enableSSSAndTransmission || m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
return;
using (new ProfilingSample(cmd, "Subsurface Scattering", GetSampler(CustomSamplerId.SubsurfaceScattering)))

cmd.SetComputeFloatParam(m_SubsurfaceScatteringCS, HDShaderIDs._TexturingModeFlags, *(float*)&texturingModeFlags);
}
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, sssParameters.worldScales);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._FilterKernels, sssParameters.filterKernels);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._WorldScales, sssParameters.worldScales);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._FilterKernels, sssParameters.filterKernels);
cmd.SetComputeVectorArrayParam(m_SubsurfaceScatteringCS, HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._GBufferTexture0, m_GbufferManager.GetGBuffers()[0]);
cmd.SetComputeTextureParam(m_SubsurfaceScatteringCS, m_SubsurfaceScatteringKernel, HDShaderIDs._GBufferTexture1, m_GbufferManager.GetGBuffers()[1]);

if (pass == ForwardPass.Opaque)
{
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled())
{
m_ForwardAndForwardOnlyPassNames[0] = m_ForwardOnlyPassNames[0] = HDShaderPassNames.s_ForwardOnlyDebugDisplayName;
m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardDebugDisplayName;
}
else
{
m_ForwardAndForwardOnlyPassNames[0] = m_ForwardOnlyPassNames[0] = HDShaderPassNames.s_ForwardOnlyName;
m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardName;
}
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled())
{
m_ForwardAndForwardOnlyPassNames[0] = m_ForwardOnlyPassNames[0] = HDShaderPassNames.s_ForwardOnlyDebugDisplayName;
m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardDebugDisplayName;
}
else
{
m_ForwardAndForwardOnlyPassNames[0] = m_ForwardOnlyPassNames[0] = HDShaderPassNames.s_ForwardOnlyName;
m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardName;
}
var passNames = m_Asset.renderingSettings.ShouldUseForwardRenderingOnly() ? m_ForwardAndForwardOnlyPassNames : m_ForwardOnlyPassNames;
var passNames = m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly() ? m_ForwardAndForwardOnlyPassNames : m_ForwardOnlyPassNames;
// Forward opaque material always have a prepass (whether or not we use deferred, whether or not there is option like alpha test only) so we pass the right depth state here.
RenderOpaqueRenderList(cullResults, camera, renderContext, cmd, passNames, m_currentRendererConfigurationBakedLighting, null, m_DepthStateOpaqueWithPrepass);
}

m_CameraMotionVectorsMaterial.SetVector(HDShaderIDs._CameraPosDiff, hdcam.prevCameraPos - hdcam.cameraPos);
cmd.ReleaseTemporaryRT(m_VelocityBuffer);
cmd.GetTemporaryRT(m_VelocityBuffer, w, h, 0, FilterMode.Point, Builtin.GetVelocityBufferFormat(), Builtin.GetVelocityBufferReadWrite());
CoreUtils.DrawFullScreen(cmd, m_CameraMotionVectorsMaterial, m_VelocityBufferRT, null, 0);
cmd.SetRenderTarget(m_VelocityBufferRT, m_CameraDepthStencilBufferRT);

{
mipSize >>= 1;
cmd.ReleaseTemporaryRT(HDShaderIDs._GaussianPyramidColorMips[i + 1]);
cmd.GetTemporaryRT(HDShaderIDs._GaussianPyramidColorMips[i + 1], mipSize, mipSize, 0, FilterMode.Bilinear, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true);
cmd.SetComputeTextureParam(m_GaussianPyramidCS, m_GaussianPyramidKernel, "_Source", last);
cmd.SetComputeTextureParam(m_GaussianPyramidCS, m_GaussianPyramidKernel, "_Result", HDShaderIDs._GaussianPyramidColorMips[i + 1]);

cmd.SetGlobalVector(HDShaderIDs._DepthPyramidMipSize, new Vector4(size, size, lodCount, 0));
cmd.ReleaseTemporaryRT(HDShaderIDs._DepthPyramidMips[0]);
cmd.GetTemporaryRT(HDShaderIDs._DepthPyramidMips[0], size, size, 0, FilterMode.Bilinear, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear, 1, true);
m_GPUCopy.SampleCopyChannel_xyzw2x(cmd, GetDepthTexture(), HDShaderIDs._DepthPyramidMips[0], new Vector2(size, size));
cmd.CopyTexture(HDShaderIDs._DepthPyramidMips[0], 0, 0, m_DepthPyramidBuffer, 0, 0);

{
mipSize >>= 1;
cmd.ReleaseTemporaryRT(HDShaderIDs._DepthPyramidMips[i + 1]);
cmd.GetTemporaryRT(HDShaderIDs._DepthPyramidMips[i + 1], mipSize, mipSize, 0, FilterMode.Bilinear, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear, 1, true);
cmd.SetComputeTextureParam(m_DepthPyramidCS, m_DepthPyramidKernel, "_Source", HDShaderIDs._DepthPyramidMips[i]);
cmd.SetComputeTextureParam(m_DepthPyramidCS, m_DepthPyramidKernel, "_Result", HDShaderIDs._DepthPyramidMips[i + 1]);

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cb.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cb.GetTemporaryRT(m_DebugFullScreenTempRT, camera.pixelWidth, camera.pixelHeight, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
cb.Blit(textureID, m_DebugFullScreenTempRT);
}

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cmd.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cmd.GetTemporaryRT(m_DebugFullScreenTempRT, width >> mipIndex, height >> mipIndex, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
cmd.CopyTexture(textureID, 0, mipIndex, m_DebugFullScreenTempRT, 0, 0);
}

if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{
m_FullScreenDebugPushed = true; // We need this flag because otherwise if no fullscreen debug is pushed, when we render the result in RenderDebug the temporary RT will not exist.
cmd.ReleaseTemporaryRT(m_DebugFullScreenTempRT);
cmd.GetTemporaryRT(m_DebugFullScreenTempRT, width >> mipIndex, height >> mipIndex, 0, FilterMode.Point, RenderTextureFormat.RFloat, RenderTextureReadWrite.Linear);
cmd.CopyTexture(textureID, 0, mipIndex, m_DebugFullScreenTempRT, 0, 0);
}

int w = camera.camera.pixelWidth;
int h = camera.camera.pixelHeight;
cmd.ReleaseTemporaryRT(m_CameraColorBuffer);
cmd.ReleaseTemporaryRT(m_CameraSssDiffuseLightingBuffer);
cmd.GetTemporaryRT(m_CameraColorBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
cmd.GetTemporaryRT(m_CameraSssDiffuseLightingBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
if (NeedTemporarySubsurfaceBuffer())

int s = CalculatePyramidSize(w, h);
m_GaussianPyramidColorBufferDesc.width = s;
m_GaussianPyramidColorBufferDesc.height = s;
cmd.ReleaseTemporaryRT(m_GaussianPyramidColorBuffer);
cmd.ReleaseTemporaryRT(m_DepthPyramidBuffer);
if (!m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
m_GbufferManager.InitGBuffers(w, h, m_DeferredMaterial, enableBakeShadowMask, cmd);
CoreUtils.SetRenderTarget(cmd, m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, ClearFlag.Depth);

}
// Clear GBuffers
if (!m_Asset.renderingSettings.ShouldUseForwardRenderingOnly())
if (!m_Asset.globalRenderingSettings.ShouldUseForwardRenderingOnly())
{
using (new ProfilingSample(cmd, "Clear GBuffer", GetSampler(CustomSamplerId.ClearGBuffer)))
{

16
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset


m_EditorClassIdentifier:
m_RenderPipelineResources: {fileID: 11400000, guid: 42086e81f4f0c724f96f7f09cc995354,
type: 2}
renderingSettings:
globalRenderingSettings:
globalTextureSettings:
spotCookieSize: 128
pointCookieSize: 512
reflectionCubemapSize: 128
reflectionCacheCompressed: 0
sssSettings: {fileID: 11400000, guid: 873499ce7a6f749408981f512a9683f7, type: 2}
tileSettings:
enableTileAndCluster: 1

enableClustered: 1
enableFptlForOpaqueWhenClustered: 1
enableFptlForForwardOpaque: 1
diffuseGlobalDimmer: 1
specularGlobalDimmer: 1
textureSettings:
spotCookieSize: 128
pointCookieSize: 512
reflectionCubemapSize: 128
m_DefaultDiffuseMaterial: {fileID: 2100000, guid: 73c176f402d2c2f4d929aa5da7585d17,
type: 2}
m_DefaultShader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}

3
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.asset.meta


fileFormatVersion: 2
guid: 449281dd2b4fbee49b8397de0541ea3c
timeCreated: 1496931629
licenseType: Pro
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:

9
ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs


// NOTE: All those properties are public because of how HDRenderPipelineInspector retrieves those properties via serialization/reflection
// Doing it this way allows to change parameters name and still retrieve correct serialized values
// Renderer Settings
public RenderingSettings renderingSettings = new RenderingSettings();
// Global Renderer Settings
public GlobalRenderingSettings globalRenderingSettings = new GlobalRenderingSettings();
public GlobalTextureSettings globalTextureSettings = new GlobalTextureSettings();
public SubsurfaceScatteringSettings sssSettings;
public TileSettings tileSettings = new TileSettings();

// Texture Settings
public TextureSettings textureSettings = new TextureSettings();
[SerializeField]
Material m_DefaultDiffuseMaterial;
[SerializeField]

40
ScriptableRenderPipeline/HDRenderPipeline/HDUtils.cs


s_OverlayLineHeight = -1.0f;
}
}
public static Matrix4x4 ComputePixelCoordToWorldSpaceViewDirectionMatrix(float verticalFoV, Vector4 screenSize, Matrix4x4 worldToViewMatrix, bool renderToCubemap)
{
// Compose the view space version first.
// V = -(X, Y, Z), s.t. Z = 1,
// X = (2x / resX - 1) * tan(vFoV / 2) * ar = x * [(2 / resX) * tan(vFoV / 2) * ar] + [-tan(vFoV / 2) * ar] = x * [-m00] + [-m20]
// Y = (2y / resY - 1) * tan(vFoV / 2) = y * [(2 / resY) * tan(vFoV / 2)] + [-tan(vFoV / 2)] = y * [-m11] + [-m21]
float tanHalfVertFoV = Mathf.Tan(0.5f * verticalFoV);
float aspectRatio = screenSize.x * screenSize.w;
// Compose the matrix.
float m21 = tanHalfVertFoV;
float m20 = tanHalfVertFoV * aspectRatio;
float m00 = -2.0f * screenSize.z * m20;
float m11 = -2.0f * screenSize.w * m21;
float m33 = -1.0f;
if (renderToCubemap)
{
// Flip Y.
m11 = -m11;
m21 = -m21;
}
var viewSpaceRasterTransform = new Matrix4x4(new Vector4(m00, 0.0f, 0.0f, 0.0f),
new Vector4(0.0f, m11, 0.0f, 0.0f),
new Vector4(m20, m21, m33, 0.0f),
new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
// Remove the translation component.
var homogeneousZero = new Vector4(0, 0, 0, 1);
worldToViewMatrix.SetColumn(3, homogeneousZero);
// Flip the Z to make the coordinate system left-handed.
worldToViewMatrix.SetRow(2, -worldToViewMatrix.GetRow(2));
// Transpose for HLSL.
return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
}
}

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader


// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Deferred.compute


PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw, tileCoord);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
// For indirect case: we can still overlap inside a tile with the sky/background, reject it
// Can't rely on stencil as we are in compute shader

BSDFData bsdfData;
BakeLightingData bakeLightingData;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/DeferredDirectionalShadow.compute


PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw, tileCoord);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
ShadowContext shadowContext = InitShadowContext();

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Shadow.hlsl


return EvalShadow_CascadedDepth_Blend( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_PunctualDepth( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
#endif
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

143
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs


using UnityEngine.Rendering;
using UnityEngine.Rendering;
using System.Collections.Generic;
using System;

RenderTargetIdentifier[] tex;
sc.GetTex2DArrays(out tex, out offset, out count);
// bind buffers
cb.SetGlobalBuffer(HDShaderIDs._ShadowDatasExp, s_ShadowDataBuffer);
cb.SetGlobalBuffer(HDShaderIDs._ShadowPayloads, s_ShadowPayloadBuffer);
// bind textures
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_0, tex[0]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_1, tex[1]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_2, tex[2]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_PCF, tex[3]);
// bind buffers
cb.SetGlobalBuffer(HDShaderIDs._ShadowDatasExp, s_ShadowDataBuffer);
cb.SetGlobalBuffer(HDShaderIDs._ShadowPayloads, s_ShadowPayloadBuffer);
// bind textures
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_0, tex[0]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_1, tex[1]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_VSM_2, tex[2]);
cb.SetGlobalTexture(HDShaderIDs._ShadowmapExp_PCF, tex[3]);
// TODO: Currently samplers are hard coded in ShadowContext.hlsl, so we can't really set them here
};

// clustered light list specific buffers and data begin
public bool enableBigTilePrepass;
[Range(0.0f, 1.0f)]
public float diffuseGlobalDimmer = 1.0f;
[Range(0.0f, 1.0f)]
public float specularGlobalDimmer = 1.0f;
public enum TileClusterDebug : int
{
None,

enableFptlForForwardOpaque = true;
enableBigTilePrepass = true;
diffuseGlobalDimmer = 1.0f;
specularGlobalDimmer = 1.0f;
}
}

static Texture2DArray m_DefaultTexture2DArray;
static Cubemap m_DefaultTextureCube;
TextureCacheCubemap m_CubeReflTexArray;
int m_CubeReflTexArraySize = 128;
ReflectionProbeCache m_ReflectionProbeCache;
int m_ReflectionProbeCacheSize = 128;
LightingSettings m_LightingSettings = new LightingSettings();
public class LightList
{

}
public void Build( RenderPipelineResources renderPipelineResources,
RenderingSettings renderingSettings,
GlobalRenderingSettings renderingSettings,
TextureSettings textureSettings,
ShadowInitParameters shadowInit, ShadowSettings shadowSettings)
GlobalTextureSettings textureSettings,
ShadowInitParameters shadowInit, ShadowSettings shadowSettings, IBLFilterGGX iblFilterGGX)
{
// Deferred opaque are always using Fptl. Forward opaque can use Fptl or Cluster, transparent use cluster.
// When MSAA is enabled we disable Fptl as it become expensive compare to cluster

m_CookieTexArray.AllocTextureArray(m_CookieTexArraySize, textureSettings.spotCookieSize, textureSettings.spotCookieSize, TextureFormat.RGBA32, true);
m_CubeCookieTexArray = new TextureCacheCubemap();
m_CubeCookieTexArray.AllocTextureArray(m_CubeCookieTexArraySize, textureSettings.pointCookieSize, TextureFormat.RGBA32, true);
m_CubeReflTexArray = new TextureCacheCubemap();
m_CubeReflTexArray.AllocTextureArray(m_CubeReflTexArraySize, textureSettings.reflectionCubemapSize, TextureCache.GetPreferredHdrCompressedTextureFormat, true);
TextureFormat probeCacheFormat = textureSettings.reflectionCacheCompressed ? TextureFormat.BC6H : TextureFormat.RGBAHalf;
m_ReflectionProbeCache = new ReflectionProbeCache(iblFilterGGX, m_ReflectionProbeCacheSize, textureSettings.reflectionCubemapSize, probeCacheFormat, true);
s_GenAABBKernel = buildScreenAABBShader.FindKernel("ScreenBoundsAABB");

{
for (int shadowMask = 0; shadowMask < 2; ++shadowMask)
{
for (int debugDisplay = 0; debugDisplay < 2; ++debugDisplay)
{
for (int debugDisplay = 0; debugDisplay < 2; ++debugDisplay)
{
int index = GetDeferredLightingMaterialIndex(outputSplitLighting, lightLoopTilePass, shadowMask, debugDisplay);
m_deferredLightingMaterial[index] = CoreUtils.CreateEngineMaterial(m_Resources.deferredShader);

CoreUtils.SafeRelease(s_EnvLightDatas);
CoreUtils.SafeRelease(s_shadowDatas);
if (m_CubeReflTexArray != null)
if (m_ReflectionProbeCache != null)
m_CubeReflTexArray.Release();
m_CubeReflTexArray = null;
m_ReflectionProbeCache.Release();
m_ReflectionProbeCache = null;
}
if (m_CookieTexArray != null)
{

{
for (int shadowMask = 0; shadowMask < 2; ++shadowMask)
{
for (int debugDisplay = 0; debugDisplay < 2; ++debugDisplay)
{
for (int debugDisplay = 0; debugDisplay < 2; ++debugDisplay)
{
}
}
CoreUtils.Destroy(m_DebugViewTilesMaterial);
}

m_CookieTexArray.NewFrame();
m_CubeCookieTexArray.NewFrame();
m_CubeReflTexArray.NewFrame();
m_ReflectionProbeCache.NewFrame();
}
public bool NeedResize()

return new Vector3(light.finalColor.r, light.finalColor.g, light.finalColor.b);
}
public bool GetDirectionalLightData(ShadowSettings shadowSettings, GPULightType gpuLightType, VisibleLight light, HDAdditionalLightData additionalData, AdditionalShadowData additionalShadowData, int lightIndex)
public bool GetDirectionalLightData(CommandBuffer cmd, ShadowSettings shadowSettings, GPULightType gpuLightType, VisibleLight light, HDAdditionalLightData additionalData, AdditionalShadowData additionalShadowData, int lightIndex)
float diffuseDimmer = m_TileSettings.diffuseGlobalDimmer * additionalData.lightDimmer;
float specularDimmer = m_TileSettings.specularGlobalDimmer * additionalData.lightDimmer;
float diffuseDimmer = m_LightingSettings.diffuseGlobalDimmer * additionalData.lightDimmer;
float specularDimmer = m_LightingSettings.specularGlobalDimmer * additionalData.lightDimmer;
if (diffuseDimmer <= 0.0f && specularDimmer <= 0.0f)
return false;

if (light.light.cookie != null)
{
directionalLightData.tileCookie = light.light.cookie.wrapMode == TextureWrapMode.Repeat;
directionalLightData.cookieIndex = m_CookieTexArray.FetchSlice(light.light.cookie);
directionalLightData.cookieIndex = m_CookieTexArray.FetchSlice(cmd, light.light.cookie);
}
// fix up shadow information
int shadowIdx;

return 1.0f - Mathf.Clamp01(distanceToCamera * scale + bias);
}
public bool GetLightData(ShadowSettings shadowSettings, Camera camera, GPULightType gpuLightType, VisibleLight light, HDAdditionalLightData additionalLightData, AdditionalShadowData additionalshadowData, int lightIndex)
public bool GetLightData(CommandBuffer cmd, ShadowSettings shadowSettings, Camera camera, GPULightType gpuLightType, VisibleLight light, HDAdditionalLightData additionalLightData, AdditionalShadowData additionalshadowData, int lightIndex)
{
var lightData = new LightData();

float distanceFade = ComputeLinearDistanceFade(distanceToCamera, additionalLightData.fadeDistance);
float lightScale = additionalLightData.lightDimmer * distanceFade;
lightData.diffuseScale = additionalLightData.affectDiffuse ? lightScale * m_TileSettings.diffuseGlobalDimmer : 0.0f;
lightData.specularScale = additionalLightData.affectSpecular ? lightScale * m_TileSettings.specularGlobalDimmer : 0.0f;
lightData.diffuseScale = additionalLightData.affectDiffuse ? lightScale * m_LightingSettings.diffuseGlobalDimmer : 0.0f;
lightData.specularScale = additionalLightData.affectSpecular ? lightScale * m_LightingSettings.specularGlobalDimmer : 0.0f;
if (lightData.diffuseScale <= 0.0f && lightData.specularScale <= 0.0f)
return false;

switch (light.lightType)
{
case LightType.Spot:
lightData.cookieIndex = m_CookieTexArray.FetchSlice(light.light.cookie);
lightData.cookieIndex = m_CookieTexArray.FetchSlice(cmd, light.light.cookie);
lightData.cookieIndex = m_CubeCookieTexArray.FetchSlice(light.light.cookie);
lightData.cookieIndex = m_CubeCookieTexArray.FetchSlice(cmd, light.light.cookie);
break;
}
}

lightData.cookieIndex = m_CookieTexArray.FetchSlice(Texture2D.whiteTexture);
lightData.cookieIndex = m_CookieTexArray.FetchSlice(cmd, Texture2D.whiteTexture);
}
if (additionalshadowData)

m_lightList.lightVolumes.Add(lightVolumeData);
}
public void GetEnvLightData(VisibleReflectionProbe probe)
public bool GetEnvLightData(CommandBuffer cmd, Camera camera, VisibleReflectionProbe probe)
// For now we won't display real time probe when rendering one.
// TODO: We may want to display last frame result but in this case we need to be careful not to update the atlas before all realtime probes are rendered (for frame coherency).
// Unfortunately we don't have this information at the moment.
if (probe.probe.mode == ReflectionProbeMode.Realtime && camera.cameraType == CameraType.Reflection)
return false;
int envIndex = m_ReflectionProbeCache.FetchSlice(cmd, probe.texture);
// -1 means that the texture is not ready yet (ie not convolved/compressed yet)
if (envIndex == -1)
return false;
var envLightData = new EnvLightData();
// CAUTION: localToWorld is the transform for the widget of the reflection probe. i.e the world position of the point use to do the cubemap capture (mean it include the local offset)

float maxBlendDist = Mathf.Min(probe.bounds.extents.x, Mathf.Min(probe.bounds.extents.y, probe.bounds.extents.z));
float blendDistance = Mathf.Min(maxBlendDist, probe.blendDistance);
envLightData.innerDistance = probe.bounds.extents - new Vector3(blendDistance, blendDistance, blendDistance);
envLightData.envIndex = m_CubeReflTexArray.FetchSlice(probe.texture);
envLightData.envIndex = envIndex;
return true;
}
public void GetEnvLightVolumeDataAndBound(VisibleReflectionProbe probe, LightVolumeType lightVolumeType, Matrix4x4 worldToView)

}
// Return true if BakedShadowMask are enabled
public bool PrepareLightsForGPU(ShadowSettings shadowSettings, CullResults cullResults, Camera camera)
public bool PrepareLightsForGPU(CommandBuffer cmd, ShadowSettings shadowSettings, CullResults cullResults, Camera camera)
{
using (new ProfilingSample(cmd, "Prepare Lights For GPU"))
// If any light require it, we need to enabled bake shadow mask feature
m_enableBakeShadowMask = false;

}
}
float oldSpecularGlobalDimmer = m_TileSettings.specularGlobalDimmer;
float oldSpecularGlobalDimmer = m_LightingSettings.specularGlobalDimmer;
m_TileSettings.specularGlobalDimmer = 0.0f;
m_LightingSettings.specularGlobalDimmer = 0.0f;
}
// 1. Count the number of lights and sort all lights by category, type and volume - This is required for the fptl/cluster shader code

// Directional rendering side, it is separated as it is always visible so no volume to handle here
if (gpuLightType == GPULightType.Directional)
{
if (GetDirectionalLightData(shadowSettings, gpuLightType, light, additionalLightData, additionalShadowData, lightIndex))
if (GetDirectionalLightData(cmd, shadowSettings, gpuLightType, light, additionalLightData, additionalShadowData, lightIndex))
{
directionalLightcount++;

}
// Punctual, area, projector lights - the rendering side.
if (GetLightData(shadowSettings, camera, gpuLightType, light, additionalLightData, additionalShadowData, lightIndex))
if (GetLightData(cmd, shadowSettings, camera, gpuLightType, light, additionalLightData, additionalShadowData, lightIndex))
{
switch (lightCategory)
{

VisibleReflectionProbe probe = cullResults.visibleReflectionProbes[probeIndex];
GetEnvLightData(probe);
if (GetEnvLightData(cmd, camera, probe))
{
GetEnvLightVolumeDataAndBound(probe, lightVolumeType, worldToView);
// We make the light position camera-relative as late as possible in order

}
}
// Sanity check
Debug.Assert(m_lightList.envLights.Count == envLightCount);
}
m_TileSettings.specularGlobalDimmer = oldSpecularGlobalDimmer;
m_LightingSettings.specularGlobalDimmer = oldSpecularGlobalDimmer;
}
m_lightCount = m_lightList.lights.Count + m_lightList.envLights.Count;

m_maxShadowDistance = shadowSettings.maxShadowDistance;
return m_enableBakeShadowMask;
}
}
void VoxelLightListGeneration(CommandBuffer cmd, Camera camera, Matrix4x4 projscr, Matrix4x4 invProjscr, RenderTargetIdentifier cameraDepthBufferRT)

}
// Cluster
VoxelLightListGeneration(cmd, camera, projscr, invProjscr, cameraDepthBufferRT);
VoxelLightListGeneration(cmd, camera, projscr, invProjscr, cameraDepthBufferRT);
if (enableFeatureVariants)
{

cmd.SetGlobalTexture(HDShaderIDs._CookieTextures, m_CookieTexArray.GetTexCache());
cmd.SetGlobalTexture(HDShaderIDs._CookieCubeTextures, m_CubeCookieTexArray.GetTexCache());
cmd.SetGlobalTexture(HDShaderIDs._EnvTextures, m_CubeReflTexArray.GetTexCache());
cmd.SetGlobalTexture(HDShaderIDs._EnvTextures, m_ReflectionProbeCache.GetTexCache());
cmd.SetGlobalBuffer(HDShaderIDs._DirectionalLightDatas, s_DirectionalLightDatas);
cmd.SetGlobalInt(HDShaderIDs._DirectionalLightCount, m_lightList.directionalLights.Count);

cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredX, GetNumTileClusteredX(camera));
cmd.SetGlobalInt(HDShaderIDs._NumTileClusteredY, GetNumTileClusteredY(camera));
if (m_TileSettings.enableBigTilePrepass)
if (m_TileSettings.enableBigTilePrepass)
{
{
cmd.SetGlobalFloat(HDShaderIDs.g_fClustScale, m_ClustScale);
cmd.SetGlobalFloat(HDShaderIDs.g_fClustBase, k_ClustLogBase);
cmd.SetGlobalFloat(HDShaderIDs.g_fNearPlane, camera.nearClipPlane);

cmd.SetGlobalBuffer(HDShaderIDs.g_vLayeredOffsetsBuffer, s_PerVoxelOffset);
if (k_UseDepthBuffer)
{
if (k_UseDepthBuffer)
{
}
}
}
}
}
}

else // Pixel shader evaluation
{
int index = GetDeferredLightingMaterialIndex( options.outputSplitLighting ? 1 : 0,
m_TileSettings.enableTileAndCluster ? 1 : 0,
m_enableBakeShadowMask ? 1 : 0,
m_TileSettings.enableTileAndCluster ? 1 : 0,
m_enableBakeShadowMask ? 1 : 0,
debugDisplaySettings.IsDebugDisplayEnabled() ? 1 : 0);
Material currentLightingMaterial = m_deferredLightingMaterial[index];

87
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl


// Calculate the offset in global light index light for current light category
int GetTileOffset(PositionInputs posInput, uint lightCategory)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
return (tileIndex.y + lightCategory * _NumTileFtplY) * _NumTileFtplX + tileIndex.x;
}

void GetCountAndStartCluster(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
float logBase = g_fClustBase;
if (g_isLogBaseBufferEnabled)

#endif // LIGHTLOOP_TILE_PASS
LightData FetchLight(uint start, uint i)
{
#ifdef LIGHTLOOP_TILE_PASS
int j = FetchIndex(start, i);
#else
int j = start + i;
#endif
return _LightDatas[j];
}
// bakeDiffuseLighting is part of the prototype so a user is able to implement a "base pass" with GI and multipass direct light (aka old unity rendering path)
void LightLoop( float3 V, PositionInputs posInput, PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, uint featureFlags,
out float3 diffuseLighting,

if (featureFlags & LIGHTFEATUREFLAGS_PUNCTUAL)
{
#ifdef LIGHTLOOP_TILE_PASS
uint lightCount, lightStart;
// TODO: Convert the for loop below to a while on each type as we know we are sorted and compare performance.
uint punctualLightStart;
uint punctualLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_PUNCTUAL, punctualLightStart, punctualLightCount);
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStart(posInput, LIGHTCATEGORY_PUNCTUAL, lightStart, lightCount);
#else
lightCount = _PunctualLightCount;
lightStart = 0;
#endif
for (i = 0; i < punctualLightCount; ++i)
for (i = 0; i < lightCount; i++)
int punctualIndex = FetchIndex(punctualLightStart, i);
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, _LightDatas[punctualIndex], bsdfData, bakeLightingData);
AccumulateDirectLighting(lighting, aggregateLighting);
}
#else
LightData lightData = FetchLight(lightStart, i);
for (i = 0; i < _PunctualLightCount; ++i)
{
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, _LightDatas[i], bsdfData, bakeLightingData);
DirectLighting lighting = EvaluateBSDF_Punctual(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
#endif
#ifdef LIGHTLOOP_TILE_PASS
uint lightCount, lightStart;
uint areaLightStart;
uint areaLightCount;
GetCountAndStart(posInput, LIGHTCATEGORY_AREA, areaLightStart, areaLightCount);
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStart(posInput, LIGHTCATEGORY_AREA, lightStart, lightCount);
#else
lightCount = _AreaLightCount;
lightStart = _PunctualLightCount;
#endif
// COMPILER BEHAVIOR WARNING!
// If rectangle lights are before line lights, the compiler will duplicate light matrices in VGPR because they are used differently between the two types of lights.

i = 0;
if (areaLightCount > 0)
if (lightCount > 0)
uint areaIndex = FetchIndex(areaLightStart, 0);
uint lightType = _LightDatas[areaIndex].lightType;
i = 0;
uint last = lightCount - 1;
LightData lightData = FetchLight(lightStart, i);
while (i < areaLightCount && lightType == GPULIGHTTYPE_LINE)
while (i <= last && lightData.lightType == GPULIGHTTYPE_LINE)
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[areaIndex], bsdfData, bakeLightingData, GPULIGHTTYPE_LINE);
DirectLighting lighting = EvaluateBSDF_Line(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
i++;
areaIndex = i < areaLightCount ? FetchIndex(areaLightStart, i) : 0;
lightType = i < areaLightCount ? _LightDatas[areaIndex].lightType : 0xFF;
lightData = FetchLight(lightStart, min(++i, last));
while (i < areaLightCount && lightType == GPULIGHTTYPE_RECTANGLE)
while (i <= last && lightData.lightType == GPULIGHTTYPE_RECTANGLE)
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[areaIndex], bsdfData, bakeLightingData, GPULIGHTTYPE_RECTANGLE);
DirectLighting lighting = EvaluateBSDF_Rect(context, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
i++;
areaIndex = i < areaLightCount ? FetchIndex(areaLightStart, i) : 0;
lightType = i < areaLightCount ? _LightDatas[areaIndex].lightType : 0xFF;
lightData = FetchLight(lightStart, min(++i, last));
#else
for (i = _PunctualLightCount; i < _PunctualLightCount + _AreaLightCount; ++i)
{
DirectLighting lighting = EvaluateBSDF_Area(context, V, posInput, preLightData, _LightDatas[i], bsdfData, bakeLightingData, _LightDatas[i].lightType);
AccumulateDirectLighting(lighting, aggregateLighting);
}
#endif
}
float reflectionHierarchyWeight = 0.0; // Max: 1.0

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute


if (UnpackByte(LOAD_TEXTURE2D(_StencilTexture, uCrd).r) != STENCILLIGHTINGUSAGE_NO_LIGHTING) // This test is we are the sky/background or not
{
PositionInputs posInput = GetPositionInput(uCrd, invScreenSize);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.unPositionSS);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.positionSS);
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

48
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
// Get the depth of the approximated back plane
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionSS * (depthSize >> 2), 2).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionNDC * (depthSize >> 2), 2).r;
float depth = LinearEyeDepth(pyramidDepth, _ZBufferParams);
// Distance from point to the back plane

// Must be in sync with RT declared in HDRenderPipeline.cs ::Rebuild
void EncodeIntoGBuffer( SurfaceData surfaceData,
float3 bakeDiffuseLighting,
uint2 unPositionSS,
uint2 positionSS,
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
out GBufferType0 outGBufferU0,
out GBufferType1 outGBufferU1

}
void DecodeFromGBuffer(
uint2 unPositionSS,
uint2 positionSS,
GBufferType0 inGBufferU0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBufferU1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
GBufferType0 inGBufferU0 = LOAD_TEXTURE2D(_GBufferTexture0, positionSS);
GBufferType1 inGBufferU1 = LOAD_TEXTURE2D(_GBufferTexture1, positionSS);
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, unPositionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, unPositionSS);
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, positionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, positionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, positionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, positionSS);
#endif
ZERO_INITIALIZE(BSDFData, bsdfData);

// Function call from the material classification compute shader
// Note that as we store materialId on two buffer (for anisotropy case), the code need to load 2 RGBA8 buffer
uint MaterialFeatureFlagsFromGBuffer(uint2 unPositionSS)
uint MaterialFeatureFlagsFromGBuffer(uint2 positionSS)
unPositionSS,
positionSS,
UINT_MAX,
bsdfData,
unused

[branch] if (lightData.shadowIndex >= 0)
{
#ifdef _SURFACE_TYPE_TRANSPARENT
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, N, lightData.shadowIndex, L, posInput.unPositionSS);
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, N, lightData.shadowIndex, L, posInput.positionSS);
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.unPositionSS).x;
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.positionSS).x;
#endif
#ifdef SHADOWS_SHADOWMASK

// TODO: make projector lights cast shadows.
float3 offset = float3(0.0, 0.0, 0.0); // GetShadowPosOffset(nDotL, normal);
float4 L_dist = float4(L, sqrt(distSq));
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, N, lightData.shadowIndex, L_dist, posInput.unPositionSS);
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, N, lightData.shadowIndex, L_dist, posInput.positionSS);
#ifdef SHADOWS_SHADOWMASK
// Note: Legacy Unity have two shadow mask mode. ShadowMask (ShadowMask contain static objects shadow and ShadowMap contain only dynamic objects shadow, final result is the minimun of both value)
// and ShadowMask_Distance (ShadowMask contain static objects shadow and ShadowMap contain everything and is blend with ShadowMask based on distance (Global distance setup in QualitySettigns)).

#endif
}
// Projector lights always have a cookies, so we can perform clipping inside the if().
// Projector lights always have cookies, so we can perform clipping inside the if().
[branch] if (lightData.cookieIndex >= 0)
{
float3 lightToSample = positionWS - lightData.positionWS;

return lighting;
}
DirectLighting EvaluateBSDF_Area( LightLoopContext lightLoopContext,
float3 V, PositionInputs posInput,
PreLightData preLightData, LightData lightData, BSDFData bsdfData, BakeLightingData bakeLightingData, int GPULightType)
{
if (GPULightType == GPULIGHTTYPE_LINE)
{
return EvaluateBSDF_Line(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
else
{
return EvaluateBSDF_Rect(lightLoopContext, V, posInput, preLightData, lightData, bsdfData, bakeLightingData);
}
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_SSLighting for screen space lighting
// ----------------------------------------------------------------------------

// Calculate screen space coordinates of refracted point in back plane
float4 refractedBackPointCS = mul(UNITY_MATRIX_VP, float4(refractedBackPointWS, 1.0));
float2 refractedBackPointSS = ComputeScreenSpacePosition(refractedBackPointCS);
float2 refractedBackPointSS = ComputeNormalizedDeviceCoordinates(refractedBackPointCS);
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointSS * depthSize, 0).r, _ZBufferParams);

// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.unPositionSS).x;
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else

2
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

4
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader


[earlydepthstencil]
float4 Frag(Varyings input) : SV_Target // use SV_StencilRef in D3D 11.3+
{
uint2 positionSS = (uint2)input.positionCS.xy;
uint2 positionNDC = (uint2)input.positionCS.xy;
_HTile[positionSS / 8] = _StencilRef;
_HTile[positionNDC / 8] = _StencilRef;
#endif
return PackByte(_StencilRef);

6
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute


#if SSS_USE_TANGENT_PLANE
float3 relPosVS = vec.x * tangentX + vec.y * tangentY;
float3 positionVS = centerPosVS + relPosVS;
float2 positionSS = ComputeScreenSpacePosition(positionCS, projMatrix);
float2 positionNDC = ComputeNormalizedDeviceCoordinates(positionCS, projMatrix);
position = (int2)(positionSS * _ScreenSize.xy);
position = (int2)(positionNDC * _ScreenSize.xy);
xy2 = dot(relPosVS.xy, relPosVS.xy);
#else
position = (int2)(centerCoord + vec * pixelsPerMm);

float maxDistance = _ShapeParams[profileID].a;
// Reconstruct the view-space position corresponding to the central sample.
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);
float3 cornerPosVS = ComputeViewSpacePosition(cornerPosSS, centerDepth, UNITY_MATRIX_I_P);

10
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader


BSDFData bsdfData;
float3 unused;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, unused);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, unused);
int profileID = bsdfData.subsurfaceProfile;
float distScale = bsdfData.subsurfaceRadius;

// TODO: copy its neighborhood into LDS.
float2 centerPosition = posInput.unPositionSS;
float2 centerPosition = posInput.positionSS;
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float centerDepth = LOAD_TEXTURE2D(_MainDepthTexture, centerPosition).r;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);

#endif
// Take the first (central) sample.
float2 samplePosition = posInput.unPositionSS;
float2 samplePosition = posInput.positionSS;
float3 sampleWeight = _FilterKernelsBasic[profileID][0].rgb;
float3 sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

[unroll]
for (int i = 1; i < SSS_BASIC_N_SAMPLES; i++)
{
samplePosition = posInput.unPositionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
samplePosition = posInput.positionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
sampleWeight = _FilterKernelsBasic[profileID][i].rgb;
sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

2
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader


float4 Frag(Varyings input) : SV_Target
{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
float4 worldPos = float4(posInput.positionWS, 1.0);
float4 prevPos = worldPos;

29
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/DefaultHDMaterial.mat


m_PrefabInternal: {fileID: 0}
m_Name: DefaultHDMaterial
m_Shader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}
m_ShaderKeywords: _BLENDMODE_PRESERVE_SPECULAR_LIGHTING _NORMALMAP_TANGENT_SPACE
m_ShaderKeywords: _ALBEDOAFFECTEMISSIVE_OFF _ALPHACUTOFFENABLE_OFF _BLENDMODE_ALPHA
_BLENDMODE_PRESERVE_SPECULAR_LIGHTING _DEPTHOFFSETENABLE_OFF _DISTORTIONDEPTHTEST_OFF
_DISTORTIONENABLE_OFF _DISTORTIONONLY_OFF _DOUBLESIDEDENABLE_OFF _ENABLESPECULAROCCLUSION_OFF
_ENABLEWIND_OFF _ENABLE_FOG_ON_TRANSPARENT _NORMALMAP_TANGENT_SPACE _PREREFRACTIONPASS_OFF
_SURFACE_TYPE_TRANSPARENT
m_CustomRenderQueue: -1
stringTagMap: {}
m_CustomRenderQueue: 3000
stringTagMap:
RenderType: Transparent
disabledShaderPasses:
- DistortionVectors
m_SavedProperties:

m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}

- _DoubleSidedMirrorEnable: 1
- _DoubleSidedNormalMode: 1
- _Drag: 1
- _DstBlend: 0
- _DstBlend: 10
- _EmissiveColorMode: 1
- _EmissiveIntensity: 0
- _EnableBlendModeAccurateLighting: 1

- _HeightMax: 1
- _HeightMin: -1
- _HorizonFade: 1
- _IOR: 1
- _IOR: 1.097
- _InitialBend: 1
- _InvTilingScale: 1
- _LinkDetailsWithBase: 1

- _NormalMapSpace: 0
- _NormalScale: 1
- _NormalScale: 1.088
- _OcclusionStrength: 1
- _PPDLodThreshold: 5
- _PPDMaxSamples: 15

- _RefractionMode: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2
- _Smoothness: 0.5
- _Smoothness: 0.712
- _SmoothnessRemapMax: 1
- _SmoothnessRemapMin: 0
- _SmoothnessTextureChannel: 0

- _Stiffness: 1
- _SubsurfaceProfile: 0
- _SubsurfaceRadius: 1
- _SurfaceType: 0
- _SurfaceType: 1
- _TexWorldScale: 1
- _Thickness: 1
- _ThicknessMultiplier: 1

- _ZTestMode: 8
- _ZWrite: 1
- _ZWrite: 0
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 0.33823532, g: 0.33823532, b: 0.33823532, a: 1}
- _DoubleSidedConstants: {r: 1, g: 1, b: -1, a: 0}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _EmissiveColor: {r: 0, g: 0, b: 0, a: 1}

7
ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/SceneSettings.cs


void OnEnable()
{
SceneSettingsManager.instance.AddSceneSettings(this);
HDRenderPipeline hdPipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;
if (hdPipeline != null)
{
hdPipeline.OnSceneLoad();
}
}
void OnDisable()

2
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/FragInputs.hlsl


// Contain value return by SV_POSITION (That is name positionCS in PackedVarying).
// xy: unormalized screen position (offset by 0.5), z: device depth, w: depth in view space
// Note: SV_POSITION is the result of the clip space position provide to the vertex shaders that is transform by the viewport
float4 unPositionSS; // In case depth offset is use, positionWS.w is equal to depth offset
float4 positionSS; // In case depth offset is use, positionWS.w is equal to depth offset
float3 positionWS;
float2 texCoord0;
float2 texCoord1;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDepthOnly.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDistortion.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
// Perform alpha testing + get distortion

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw, uint2(input.unPositionSS.xy) / GetTileSize());
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw, uint2(input.positionSS.xy) / GetTileSize());
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

8
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassGBuffer.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

float3 bakeDiffuseLighting = GetBakedDiffuseLigthing(surfaceData, builtinData, bsdfData, preLightData);
ENCODE_INTO_GBUFFER(surfaceData, bakeDiffuseLighting, posInput.unPositionSS, outGBuffer);
ENCODE_INTO_GBUFFER(surfaceData, bakeDiffuseLighting, posInput.positionSS, outGBuffer);
ENCODE_SHADOWMASK_INTO_GBUFFER(float4(builtinData.shadowMask0, builtinData.shadowMask1, builtinData.shadowMask2, builtinData.shadowMask3), outShadowMaskBuffer);
ENCODE_VELOCITY_INTO_GBUFFER(builtinData.velocity, outVelocityBuffer);

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
// No position and depth in case of light transport
float3 V = float3(0.0, 0.0, 1.0); // No vector view in case of light transport

8
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl


struct VaryingsPassToPS
{
// Note: Z component is not use currently
// This is the clip space position. Warning, do not confuse with the value of positionCS in PackedVarying which is SV_POSITION and store in unPositionSS
// This is the clip space position. Warning, do not confuse with the value of positionCS in PackedVarying which is SV_POSITION and store in positionSS
float4 positionCS;
float4 previousPositionCS;
};

{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
// Perform alpha testing + get velocity

2
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VaryingMesh.hlsl


{
FragInputs output = InitializeFragInputs();
output.unPositionSS = input.positionCS; // input.positionCS is SV_Position
output.positionSS = input.positionCS; // input.positionCS is SV_Position
#ifdef VARYINGS_NEED_POSITION_WS
output.positionWS.xyz = input.interpolators0.xyz;

2
ScriptableRenderPipeline/HDRenderPipeline/Sky/BlacksmithlSky/Resources/SkyBlacksmith.shader


#ifdef PERFORM_SKY_OCCLUSION_TEST
// Determine whether the sky is occluded by the scene geometry.
// Do not perform blending with the environment map if the sky is occluded.
float deviceDepth = max(_SkyDepth, LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).r);
float deviceDepth = max(_SkyDepth, LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).r);
float skyTexWeight = (deviceDepth > _SkyDepth) ? 0.0 : 1.0;
#else
float deviceDepth = _SkyDepth;

2
ScriptableRenderPipeline/HDRenderPipeline/Sky/OpaqueAtmosphericScattering.shader


float4 Frag(Varyings input) : SV_Target
{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
return EvaluateAtmosphericScattering(posInput);

58
ScriptableRenderPipeline/HDRenderPipeline/Sky/RuntimeFilterIBL.cs


Material m_GgxConvolveMaterial; // Convolves a cubemap with GGX
Matrix4x4[] m_faceWorldToViewMatrixMatrices = new Matrix4x4[6];
RenderPipelineResources m_RenderPipelinesResources;
public bool supportMis

cmd.DispatchCompute(m_ComputeGgxIblSampleDataCS, m_ComputeGgxIblSampleDataKernel, 1, 1, 1);
}
}
for (int i = 0; i < 6; ++i)
{
var lookAt = Matrix4x4.LookAt(Vector3.zero, CoreUtils.lookAtList[i], CoreUtils.upVectorList[i]);
m_faceWorldToViewMatrixMatrices[i] = lookAt * Matrix4x4.Scale(new Vector3(1.0f, 1.0f, -1.0f)); // Need to scale -1.0 on Z to match what is being done in the camera.wolrdToCameraMatrix API. ...
}
void FilterCubemapCommon(CommandBuffer cmd,
Texture source, RenderTexture target, int mipCount,
Matrix4x4[] worldToViewMatrices)
void FilterCubemapCommon( CommandBuffer cmd,
Texture source, RenderTexture target,
Matrix4x4[] worldToViewMatrices)
int mipCount = 1 + (int)Mathf.Log(source.width, 2.0f);
if (mipCount < ((int)EnvConstants.SpecCubeLodStep + 1))
{
Debug.LogWarning("RenderCubemapGGXConvolution: Cubemap size is too small for GGX convolution, needs at least " + ((int)EnvConstants.SpecCubeLodStep + 1) + " mip levels");
return;
}
// Copy the first mip
using (new ProfilingSample(cmd, "Copy Original Mip"))
{
for (int f = 0; f < 6; f++)
{
cmd.CopyTexture(source, f, 0, target, f, 0);
}
}
m_GgxConvolveMaterial.SetTexture("_MainTex", source);
m_GgxConvolveMaterial.SetFloat("_InvOmegaP", invOmegaP);
var props = new MaterialPropertyBlock();
props.SetTexture("_MainTex", source);
props.SetFloat("_InvOmegaP", invOmegaP);
props.SetFloat("_Level", mip);
var transform = SkyManager.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, faceSize, worldToViewMatrices[face], true);
var transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, faceSize, worldToViewMatrices[face], true);
var props = new MaterialPropertyBlock();
props.SetFloat("_Level", mip);
props.SetMatrix(HDShaderIDs._PixelCoordToViewDirWS, transform);
CoreUtils.SetRenderTarget(cmd, target, ClearFlag.None, mip, (CubemapFace)face);

}
// Filters MIP map levels (other than 0) with GGX using BRDF importance sampling.
public void FilterCubemap(CommandBuffer cmd,
Texture source, RenderTexture target, int mipCount,
Matrix4x4[] worldToViewMatrices)
public void FilterCubemap(CommandBuffer cmd, Texture source, RenderTexture target)
FilterCubemapCommon(cmd, source, target, mipCount, worldToViewMatrices);
FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
public void FilterCubemapMIS(CommandBuffer cmd,
Texture source, RenderTexture target, int mipCount,
RenderTexture conditionalCdf, RenderTexture marginalRowCdf,
Matrix4x4[] worldToViewMatrices)
public void FilterCubemapMIS( CommandBuffer cmd,
Texture source, RenderTexture target,
RenderTexture conditionalCdf, RenderTexture marginalRowCdf)
{
// Bind the input cubemap.
m_BuildProbabilityTablesCS.SetTexture(m_ConditionalDensitiesKernel, "envMap", source);

m_GgxConvolveMaterial.SetTexture("_ConditionalDensities", conditionalCdf);
m_GgxConvolveMaterial.SetTexture("_MarginalRowDensities", marginalRowCdf);
FilterCubemapCommon(cmd, source, target, mipCount, worldToViewMatrices);
FilterCubemapCommon(cmd, source, target, m_faceWorldToViewMatrixMatrices);
}
}
}

125
ScriptableRenderPipeline/HDRenderPipeline/Sky/SkyManager.cs


IBLFilterGGX m_iblFilterGgx;
Vector4 m_CubemapScreenSize;
Matrix4x4[] m_faceWorldToViewMatrixMatrices = new Matrix4x4[6];
Matrix4x4[] m_facePixelCoordToViewDirMatrices = new Matrix4x4[6];
Matrix4x4[] m_faceCameraInvViewProjectionMatrix = new Matrix4x4[6];

bool m_NeedLowLevelUpdateEnvironment;
int m_UpdatedFramesRequired = 2; // The first frame after the scene load is currently not rendered correctly
float m_CurrentUpdateTime;
int m_LastFrameUpdated = -1;
// Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/bb204881(v=vs.85).aspx
readonly Vector3[] m_LookAtList =
{
new Vector3(1.0f, 0.0f, 0.0f),
new Vector3(-1.0f, 0.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, -1.0f, 0.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 0.0f, -1.0f),
};
readonly Vector3[] m_UpVectorList =
{
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 0.0f, -1.0f),
new Vector3(0.0f, 0.0f, 1.0f),
new Vector3(0.0f, 1.0f, 0.0f),
new Vector3(0.0f, 1.0f, 0.0f),
};
SkySettings m_SkySettings;
public SkySettings skySettings

}
m_UpdatedFramesRequired = 2; // Special case. Even if update mode is set to OnDemand, we need to regenerate the environment after destroying the texture.
m_LastFrameUpdated = -1;
}
m_CubemapScreenSize = new Vector4((float)resolution, (float)resolution, 1.0f / (float)resolution, 1.0f / (float)resolution);

for (int i = 0; i < 6; ++i)
{
var lookAt = Matrix4x4.LookAt(Vector3.zero, m_LookAtList[i], m_UpVectorList[i]);
var lookAt = Matrix4x4.LookAt(Vector3.zero, CoreUtils.lookAtList[i], CoreUtils.upVectorList[i]);
m_faceWorldToViewMatrixMatrices[i] = worldToView;
m_facePixelCoordToViewDirMatrices[i] = ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, screenSize, worldToView, true);
m_facePixelCoordToViewDirMatrices[i] = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(0.5f * Mathf.PI, screenSize, worldToView, true);
public static Matrix4x4 ComputePixelCoordToWorldSpaceViewDirectionMatrix(float verticalFoV, Vector4 screenSize, Matrix4x4 worldToViewMatrix, bool renderToCubemap)
{
// Compose the view space version first.
// V = -(X, Y, Z), s.t. Z = 1,
// X = (2x / resX - 1) * tan(vFoV / 2) * ar = x * [(2 / resX) * tan(vFoV / 2) * ar] + [-tan(vFoV / 2) * ar] = x * [-m00] + [-m20]
// Y = (2y / resY - 1) * tan(vFoV / 2) = y * [(2 / resY) * tan(vFoV / 2)] + [-tan(vFoV / 2)] = y * [-m11] + [-m21]
float tanHalfVertFoV = Mathf.Tan(0.5f * verticalFoV);
float aspectRatio = screenSize.x * screenSize.w;
// Compose the matrix.
float m21 = tanHalfVertFoV;
float m20 = tanHalfVertFoV * aspectRatio;
float m00 = -2.0f * screenSize.z * m20;
float m11 = -2.0f * screenSize.w * m21;
float m33 = -1.0f;
if (renderToCubemap)
{
// Flip Y.
m11 = -m11;
m21 = -m21;
}
var viewSpaceRasterTransform = new Matrix4x4(new Vector4( m00, 0.0f, 0.0f, 0.0f),
new Vector4(0.0f, m11, 0.0f, 0.0f),
new Vector4( m20, m21, m33, 0.0f),
new Vector4(0.0f, 0.0f, 0.0f, 1.0f));
// Remove the translation component.
var homogeneousZero = new Vector4(0, 0, 0, 1);
worldToViewMatrix.SetColumn(3, homogeneousZero);
// Flip the Z to make the coordinate system left-handed.
worldToViewMatrix.SetRow(2, -worldToViewMatrix.GetRow(2));
// Transpose for HLSL.
return Matrix4x4.Transpose(worldToViewMatrix.transpose * viewSpaceRasterTransform);
}
// Sets the global MIP-mapped cubemap '_SkyTexture' in the shader.
// The texture being set is the sky (environment) map pre-convolved with GGX.
public void SetGlobalSkyTexture(CommandBuffer cmd)

RebuildSkyMatrices(nearPlane, farPlane);
}
public void Build(RenderPipelineResources renderPipelinesResources)
public void Build(RenderPipelineResources renderPipelinesResources, IBLFilterGGX iblFilterGGX)
// Create unititialized. Lazy initialization is performed later.
m_iblFilterGgx = new IBLFilterGGX(renderPipelinesResources);
m_iblFilterGgx = iblFilterGGX;
// TODO: We need to have an API to send our sky information to Enlighten. For now use a workaround through skybox/cubemap material...
m_StandardSkyboxMaterial = CoreUtils.CreateEngineMaterial(renderPipelinesResources.skyboxCubemap);

cmd.GenerateMips(dest);
}
void RenderCubemapGGXConvolution(CommandBuffer cmd, BuiltinSkyParameters builtinParams, SkySettings skyParams, Texture input, RenderTexture target)
void RenderCubemapGGXConvolution(CommandBuffer cmd, Texture input, RenderTexture target)
int mipCount = 1 + (int)Mathf.Log(input.width, 2.0f);
if (mipCount < ((int)EnvConstants.SpecCubeLodStep + 1))
{
Debug.LogWarning("RenderCubemapGGXConvolution: Cubemap size is too small for GGX convolution, needs at least " + ((int)EnvConstants.SpecCubeLodStep + 1) + " mip levels");
return;
}
if (!m_iblFilterGgx.IsInitialized())
m_iblFilterGgx.Initialize(cmd);
// Copy the first mip
using (new ProfilingSample(cmd, "Copy Original Mip"))
{
for (int f = 0; f < 6; f++)
{
cmd.CopyTexture(input, f, 0, target, f, 0);
}
}
using (new ProfilingSample(cmd, "GGX Convolution"))
{
if (m_useMIS && m_iblFilterGgx.supportMis)
m_iblFilterGgx.FilterCubemapMIS(cmd, input, target, mipCount, m_SkyboxConditionalCdfRT, m_SkyboxMarginalRowCdfRT, m_faceWorldToViewMatrixMatrices);
else
m_iblFilterGgx.FilterCubemap(cmd, input, target, mipCount, m_faceWorldToViewMatrixMatrices);
}
if (m_useMIS && m_iblFilterGgx.supportMis)
m_iblFilterGgx.FilterCubemapMIS(cmd, input, target, m_SkyboxConditionalCdfRT, m_SkyboxMarginalRowCdfRT);
else
m_iblFilterGgx.FilterCubemap(cmd, input, target);
}
}

public void UpdateEnvironment(HDCamera camera, Light sunLight, CommandBuffer cmd)
{
if (m_LastFrameUpdated == Time.frameCount)
return;
m_LastFrameUpdated = Time.frameCount;
// We need one frame delay for this update to work since DynamicGI.UpdateEnvironment is executed directly but the renderloop is not (so we need to wait for the sky texture to be rendered first)
if (m_NeedLowLevelUpdateEnvironment)
{

m_BuiltinParameters.screenSize = m_CubemapScreenSize;
m_BuiltinParameters.cameraPosWS = camera.camera.transform.position;
int sunHash = 0;
if(sunLight != null)
sunHash = (sunLight.GetHashCode() * 23 + sunLight.transform.position.GetHashCode()) * 23 + sunLight.transform.rotation.GetHashCode();
int skyHash = sunHash * 23 + skySettings.GetHashCode();
(skySettings.updateMode == EnvironementUpdateMode.OnChanged && skySettings.GetHashCode() != m_SkyParametersHash) ||
(skySettings.updateMode == EnvironementUpdateMode.OnChanged && skyHash != m_SkyParametersHash) ||
(skySettings.updateMode == EnvironementUpdateMode.Realtime && m_CurrentUpdateTime > skySettings.updatePeriod))
{
using (new ProfilingSample(cmd, "Sky Environment Pass"))

}
// Convolve downsampled cubemap
RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
RenderCubemapGGXConvolution(cmd, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
m_SkyParametersHash = skySettings.GetHashCode();
m_SkyParametersHash = skyHash;
m_CurrentUpdateTime = 0.0f;
#if UNITY_EDITOR
// In the editor when we change the sky we want to make the GI dirty so when baking again the new sky is taken into account.

{
// Clear temp cubemap and redo GGX from black and then feed it to enlighten for default light probe.
CoreUtils.ClearCubemap(cmd, m_SkyboxCubemapRT, Color.black);
RenderCubemapGGXConvolution(cmd, m_BuiltinParameters, skySettings, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
RenderCubemapGGXConvolution(cmd, m_SkyboxCubemapRT, m_SkyboxGGXCubemapRT);
m_SkyParametersHash = 0;
m_NeedLowLevelUpdateEnvironment = true;

m_BuiltinParameters.commandBuffer = cmd;
m_BuiltinParameters.sunLight = sunLight;
m_BuiltinParameters.pixelCoordToViewDirMatrix = ComputePixelCoordToWorldSpaceViewDirectionMatrix(camera.camera.fieldOfView * Mathf.Deg2Rad, camera.screenSize, camera.viewMatrix, false);
m_BuiltinParameters.pixelCoordToViewDirMatrix = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(camera.camera.fieldOfView * Mathf.Deg2Rad, camera.screenSize, camera.viewMatrix, false);
m_BuiltinParameters.invViewProjMatrix = camera.viewProjMatrix.inverse;
m_BuiltinParameters.screenSize = camera.screenSize;
m_BuiltinParameters.cameraPosWS = camera.camera.transform.position;

15
TestbedPipelines/Fptl/FptlLighting.cs


m_CubeReflTexArray = new TextureCacheCubemap();
m_CookieTexArray.AllocTextureArray(8, m_TextureSettings.spotCookieSize, m_TextureSettings.spotCookieSize, TextureFormat.RGBA32, true);
m_CubeCookieTexArray.AllocTextureArray(4, m_TextureSettings.pointCookieSize, TextureFormat.RGBA32, true);
m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHdrCompressedTextureFormat, true);
m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHDRCompressedTextureFormat, true);
//m_DeferredMaterial.SetTexture("_spotCookieTextures", m_cookieTexArray.GetTexCache());
//m_DeferredMaterial.SetTexture("_pointCookieTextures", m_cubeCookieTexArray.GetTexCache());

return dirLightCount;
}
int GenerateSourceLightBuffers(Camera camera, CullResults inputs)
int GenerateSourceLightBuffers(CommandBuffer cmd, Camera camera, CullResults inputs)
{
// 0. deal with shadows
{

var isCircularSpot = !bHasCookie;
if (!isCircularSpot) // square spots always have cookie
{
light.sliceIndex = m_CookieTexArray.FetchSlice(cl.light.cookie);
light.sliceIndex = m_CookieTexArray.FetchSlice(cmd, cl.light.cookie);
}
Vector3 lightDir = lightToWorld.GetColumn(2); // Z axis in world space

{
if (bHasCookie)
{
light.sliceIndex = m_CubeCookieTexArray.FetchSlice(cl.light.cookie);
light.sliceIndex = m_CubeCookieTexArray.FetchSlice(cmd, cl.light.cookie);
}
var lightToView = worldToView * lightToWorld;

lgtData.lightIntensity = decodeVals.x;
lgtData.decodeExp = decodeVals.y;
lgtData.sliceIndex = m_CubeReflTexArray.FetchSlice(cubemap);
lgtData.sliceIndex = m_CubeReflTexArray.FetchSlice(cmd, cubemap);
var delta = combinedExtent - e;
lgtData.boxInnerDist = e;

var invProjscr = projscr.inverse;
// build per tile light lists
var numLights = GenerateSourceLightBuffers(camera, cullResults);
CommandBuffer cmdGenerateLightBuffers = CommandBufferPool.Get();
var numLights = GenerateSourceLightBuffers(cmdGenerateLightBuffers, camera, cullResults);
loop.ExecuteCommandBuffer(cmdGenerateLightBuffers);
CommandBufferPool.Release(cmdGenerateLightBuffers);
GPUFence postLightListFence;

74
TestbedPipelines/OnTileDeferredPipeline/OnTileDeferredRenderPipeline.cs


using System;
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;

foreach (Material _material in _materials) {
if (_material == null)
continue;
if (_material.shader.name.Contains ("Standard (Specular setup)")) {
_material.shader = Shader.Find("Standard-SRP (Specular setup)");
} else if (_material.shader.name.Contains ("Standard")) {

{
return new OnTileDeferredRenderPipelineInstance(this);
}
[SerializeField] ShadowSettings m_ShadowSettings = new ShadowSettings();
ShadowSetup m_ShadowSetup;

// TODO: When graphics/renderpass lands, replace code that uses boolean below with SystemInfo.supportsReadOnlyDepth
#if UNITY_EDITOR || UNITY_STANDALONE
static bool s_SupportsReadOnlyDepth = true;
#else
#else
static bool s_SupportsReadOnlyDepth = false;
#endif

s_GBufferEmission = new RenderPassAttachment(RenderTextureFormat.ARGBHalf) { hideFlags = HideFlags.HideAndDontSave };
s_Depth = new RenderPassAttachment(RenderTextureFormat.Depth) { hideFlags = HideFlags.HideAndDontSave };
s_CameraTarget = s_GBufferAlbedo;
s_GBufferEmission.Clear(new Color(0.0f, 0.0f, 0.0f, 0.0f), 1.0f, 0);
s_Depth.Clear(new Color(), 1.0f, 0);

m_ReflectionNearAndFarClipMaterial.SetInt("_DstABlend", (int)BlendMode.Zero);
m_ReflectionNearAndFarClipMaterial.SetInt("_CullMode", (int)CullMode.Off);
m_ReflectionNearAndFarClipMaterial.SetInt("_CompareFunc", (int)CompareFunction.Always);
m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHdrCompressedTextureFormat, true);
m_CubeReflTexArray.AllocTextureArray(64, m_TextureSettings.reflectionCubemapSize, TextureCache.GetPreferredHDRCompressedTextureFormat, true);
s_LightDataBuffer = new ComputeBuffer(k_MaxLights, System.Runtime.InteropServices.Marshal.SizeOf(typeof(SFiniteLightData)));

void ExecuteRenderLoop(Camera camera, CullResults cullResults, ScriptableRenderContext loop)
{
using (RenderPass rp = new RenderPass (loop, camera.pixelWidth, camera.pixelHeight, 1, s_SupportsReadOnlyDepth ?
using (RenderPass rp = new RenderPass (loop, camera.pixelWidth, camera.pixelHeight, 1, s_SupportsReadOnlyDepth ?
using (new RenderPass.SubPass (rp, s_SupportsReadOnlyDepth ?
using (new RenderPass.SubPass (rp, s_SupportsReadOnlyDepth ?
new[] { s_GBufferAlbedo, s_GBufferSpecRough, s_GBufferNormal, s_GBufferEmission } :
new[] { s_GBufferAlbedo, s_GBufferSpecRough, s_GBufferNormal, s_GBufferEmission, s_GBufferRedF32 }, null)) {
using (var cmd = new CommandBuffer { name = "Create G-Buffer" }) {

loop.ExecuteCommandBuffer (cmd);
// render opaque objects using Deferred pass
var drawSettings = new DrawRendererSettings (camera, new ShaderPassName ("Deferred")) {
sorting = { flags = SortFlags.CommonOpaque },

}
//Lighting Pass
using (new RenderPass.SubPass(rp, new[] { s_GBufferEmission },
using (new RenderPass.SubPass(rp, new[] { s_GBufferEmission },
new[] { s_GBufferAlbedo, s_GBufferSpecRough, s_GBufferNormal, s_SupportsReadOnlyDepth ? s_Depth : s_GBufferRedF32 }, true))
{
using (var cmd = new CommandBuffer { name = "Deferred Lighting and Reflections Pass"} )

}
}
}
// Utilites
static Matrix4x4 GetFlipMatrix()
{

{
return camera.projectionMatrix * GetFlipMatrix();
}
Matrix4x4 PerspectiveCotanMatrix(float cotangent, float zNear, float zFar )
{
float deltaZ = zNear - zFar;

Matrix4x4 scaled = Matrix4x4.Scale (combinedExtent * 2.0f);
mat = mat * Matrix4x4.Translate (boxOffset) * scaled;
var probeRadius = combinedExtent.magnitude;
var viewDistance = eyePlane.GetDistanceToPoint(boxOffset);
bool intersectsNear = viewDistance - probeRadius <= nearDistanceFudged;

// draw the base probe
// TODO: (cleanup) dont use builtins like unity_SpecCube0
{
{
var props = new MaterialPropertyBlock ();
props.SetFloat ("_LightAsQuad", 1.0f);

Matrix4x4 temp3 = PerspectiveCotanMatrix (chsa, 0.0f, range);
return temp2 * temp1 * temp3 * worldToLight;
}
void RenderSpotlight(VisibleLight light, CommandBuffer cmd, MaterialPropertyBlock properties, bool renderAsQuad, bool intersectsNear, bool deferred)
{
float range = light.range;

// Setup Spot Rendering mesh matrix
float sideLength = range / chsa;
// scalingFactor corrosoponds to the scale factor setting (and wether file scale is used) of mesh in Unity mesh inspector.
// A scale factor setting in Unity of 0.01 would require this to be set to 100. A scale factor setting of 1, is just 1 here.
// scalingFactor corrosoponds to the scale factor setting (and wether file scale is used) of mesh in Unity mesh inspector.
// A scale factor setting in Unity of 0.01 would require this to be set to 100. A scale factor setting of 1, is just 1 here.
lightToWorld = lightToWorld * Matrix4x4.Scale (new Vector3(sideLength*SpotLightMeshScaleFactor, sideLength*SpotLightMeshScaleFactor, range*SpotLightMeshScaleFactor));
//set default cookie for spot light if there wasnt one added to the light manually

Vector3 lightPos = light.localToWorld.GetColumn (3); //position
float range = light.range;
// scalingFactor corrosoponds to the scale factor setting (and wether file scale is used) of mesh in Unity mesh inspector.
// A scale factor setting in Unity of 0.01 would require this to be set to 100. A scale factor setting of 1, is just 1 here.
// scalingFactor corrosoponds to the scale factor setting (and wether file scale is used) of mesh in Unity mesh inspector.
// A scale factor setting in Unity of 0.01 would require this to be set to 100. A scale factor setting of 1, is just 1 here.
if (cookie != null)
if (cookie != null)
else
else
if (renderAsQuad)
if (renderAsQuad)
else if (intersectsNear)
else if (intersectsNear)
else
else
cmd.DrawMesh (m_PointLightMesh, matrix, m_FiniteDeferredLightingMaterial, 0, 0, properties);
}

void RenderLightsDeferred (Camera camera, CullResults inputs, CommandBuffer cmd, ScriptableRenderContext loop)
{
int lightCount = inputs.visibleLights.Count;
for (int lightNum = 0; lightNum < lightCount; lightNum++)
for (int lightNum = 0; lightNum < lightCount; lightNum++)
{
VisibleLight light = inputs.visibleLights[lightNum];

Vector3 lightPos = light.localToWorld.GetColumn (3); //position
Vector3 lightDir = light.localToWorld.GetColumn (2); //z axis
float range = light.range;

m_LightData[i].x = LightDefinitions.SPHERE_LIGHT;
if (light.light.cookie != null)
m_LightData[i].z = m_CubeCookieTexArray.FetchSlice(light.light.cookie);
m_LightData[i].z = m_CubeCookieTexArray.FetchSlice(cmd, light.light.cookie);
} else if (light.lightType == LightType.Spot) {
m_LightData[i].x = LightDefinitions.SPOT_LIGHT;

// Setup Light Matrix
m_LightMatrix[i] = SpotlightMatrix (light, worldToLight, range, chsa);
m_LightMatrix[i] = SpotlightMatrix (light, worldToLight, range, chsa);
m_LightData[i].z = m_CookieTexArray.FetchSlice (light.light.cookie);
m_LightData[i].z = m_CookieTexArray.FetchSlice (cmd, light.light.cookie);
m_LightData [i].z = m_CookieTexArray.FetchSlice (m_DefaultSpotCookie);
m_LightData [i].z = m_CookieTexArray.FetchSlice (cmd, m_DefaultSpotCookie);
} else if (light.lightType == LightType.Directional) {
m_LightData[i].x = LightDefinitions.DIRECTIONAL_LIGHT;

if (light.light.cookie != null)
m_LightData[i].z = m_CookieTexArray.FetchSlice (light.light.cookie);
m_LightData[i].z = m_CookieTexArray.FetchSlice (cmd, light.light.cookie);
}
}

var decodeVals = rl.hdr;
// C is reflection volume center in world space (NOT same as cube map capture point)
var e = bnds.extents;
var C = mat.MultiplyPoint(boxOffset);
var e = bnds.extents;
var C = mat.MultiplyPoint(boxOffset);
var combinedExtent = e + new Vector3(blendDistance, blendDistance, blendDistance);
Vector3 vx = mat.GetColumn(0);

lgtData.lightIntensity = decodeVals.x;
lgtData.decodeExp = decodeVals.y;
lgtData.sliceIndex = m_CubeReflTexArray.FetchSlice(cubemap);
lgtData.sliceIndex = m_CubeReflTexArray.FetchSlice(cmd, cubemap);
var delta = combinedExtent - e;
lgtData.boxInnerDist = e;

cmd.SetGlobalFloat ("_useLegacyCookies", UseLegacyCookies?1.0f:0.0f);
cmd.SetGlobalFloat ("_transparencyShadows", TransparencyShadows ? 1.0f : 0.0f);
}
}
}

69
SampleScenes/HDTest/GraphicTest/Common/AnimationController/Props.controller


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!91 &9100000
AnimatorController:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Props
serializedVersion: 5
m_AnimatorParameters: []
m_AnimatorLayers:
- serializedVersion: 5
m_Name: Base Layer
m_StateMachine: {fileID: 1107241851017037382}
m_Mask: {fileID: 0}
m_Motions: []
m_Behaviours: []
m_BlendingMode: 0
m_SyncedLayerIndex: -1
m_DefaultWeight: 0
m_IKPass: 0
m_SyncedLayerAffectsTiming: 0
m_Controller: {fileID: 9100000}
--- !u!1102 &1102050023641306020
AnimatorState:
serializedVersion: 5
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: RealtimeCubemap@main
m_Speed: 1
m_CycleOffset: 0
m_Transitions: []
m_StateMachineBehaviours: []
m_Position: {x: 50, y: 50, z: 0}
m_IKOnFeet: 0
m_WriteDefaultValues: 1
m_Mirror: 0
m_SpeedParameterActive: 0
m_MirrorParameterActive: 0
m_CycleOffsetParameterActive: 0
m_TimeParameterActive: 0
m_Motion: {fileID: 7400000, guid: 4c1d940fc475d5643b21c5a591ba2c89, type: 2}
m_Tag:
m_SpeedParameter:
m_MirrorParameter:
m_CycleOffsetParameter:
m_TimeParameter:
--- !u!1107 &1107241851017037382
AnimatorStateMachine:
serializedVersion: 5
m_ObjectHideFlags: 1
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Base Layer
m_ChildStates:
- serializedVersion: 1
m_State: {fileID: 1102050023641306020}
m_Position: {x: 200, y: 0, z: 0}
m_ChildStateMachines: []
m_AnyStateTransitions: []
m_EntryTransitions: []
m_StateMachineTransitions: {}
m_StateMachineBehaviours: []
m_AnyStatePosition: {x: 50, y: 20, z: 0}
m_EntryPosition: {x: 50, y: 120, z: 0}
m_ExitPosition: {x: 800, y: 120, z: 0}
m_ParentStateMachinePosition: {x: 800, y: 20, z: 0}
m_DefaultState: {fileID: 1102050023641306020}

8
SampleScenes/HDTest/GraphicTest/Common/AnimationController/Props.controller.meta


fileFormatVersion: 2
guid: 1a9531bcadbe98e4f9bfbcbf7f07c2c7
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 9100000
userData:
assetBundleName:
assetBundleVariant:

217
SampleScenes/HDTest/GraphicTest/Common/AnimationController/RealtimeCubemap@main.anim


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!74 &7400000
AnimationClip:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: RealtimeCubemap@main
serializedVersion: 6
m_Legacy: 0
m_Compressed: 0
m_UseHighQualityCurve: 1
m_RotationCurves: []
m_CompressedRotationCurves: []
m_EulerCurves: []
m_PositionCurves:
- curve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: {x: -3.35, y: 0, z: -2.93}
inSlope: {x: 0.41249996, y: 0, z: 1.325}
outSlope: {x: 0.41249996, y: 0, z: 1.325}
tangentMode: 0
- serializedVersion: 2
time: 4
value: {x: -1.7, y: 0, z: 2.37}
inSlope: {x: 0.76625, y: 0, z: 0}
outSlope: {x: 0.76625, y: 0, z: 0}
tangentMode: 0
- serializedVersion: 2
time: 8
value: {x: 2.78, y: 0, z: 0.48}
inSlope: {x: 0, y: 0, z: -0.83428574}
outSlope: {x: 0, y: 0, z: -0.83428574}
tangentMode: 0
- serializedVersion: 2
time: 11
value: {x: -0.34, y: 0, z: -3.47}
inSlope: {x: -1.5325, y: 0, z: 0}
outSlope: {x: -1.5325, y: 0, z: 0}
tangentMode: 0
- serializedVersion: 2
time: 12
value: {x: -3.35, y: 0, z: -2.93}
inSlope: {x: -3.01, y: 0, z: 0.53999996}
outSlope: {x: -3.01, y: 0, z: 0.53999996}
tangentMode: 0
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
path:
m_ScaleCurves: []
m_FloatCurves: []
m_PPtrCurves: []
m_SampleRate: 60
m_WrapMode: 0
m_Bounds:
m_Center: {x: 0, y: 0, z: 0}
m_Extent: {x: 0, y: 0, z: 0}
m_ClipBindingConstant:
genericBindings:
- serializedVersion: 2
path: 0
attribute: 1
script: {fileID: 0}
typeID: 4
customType: 0
isPPtrCurve: 0
pptrCurveMapping: []
m_AnimationClipSettings:
serializedVersion: 2
m_AdditiveReferencePoseClip: {fileID: 0}
m_AdditiveReferencePoseTime: 0
m_StartTime: 0
m_StopTime: 12
m_OrientationOffsetY: 0
m_Level: 0
m_CycleOffset: 0
m_HasAdditiveReferencePose: 0
m_LoopTime: 1
m_LoopBlend: 0
m_LoopBlendOrientation: 0
m_LoopBlendPositionY: 0
m_LoopBlendPositionXZ: 0
m_KeepOriginalOrientation: 0
m_KeepOriginalPositionY: 1
m_KeepOriginalPositionXZ: 0
m_HeightFromFeet: 0
m_Mirror: 0
m_EditorCurves:
- curve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: -3.35
inSlope: 0.41249996
outSlope: 0.41249996
tangentMode: 34
- serializedVersion: 2
time: 4
value: -1.7
inSlope: 0.76625
outSlope: 0.76625
tangentMode: 136
- serializedVersion: 2
time: 8
value: 2.78
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 11
value: -0.34
inSlope: -1.5325
outSlope: -1.5325
tangentMode: 136
- serializedVersion: 2
time: 12
value: -3.35
inSlope: -3.01
outSlope: -3.01
tangentMode: 34
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
attribute: m_LocalPosition.x
path:
classID: 4
script: {fileID: 0}
- curve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: 0
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 4
value: 0
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 8
value: 0
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 11
value: 0
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 12
value: 0
inSlope: 0
outSlope: 0
tangentMode: 136
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
attribute: m_LocalPosition.y
path:
classID: 4
script: {fileID: 0}
- curve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: -2.93
inSlope: 1.325
outSlope: 1.325
tangentMode: 34
- serializedVersion: 2
time: 4
value: 2.37
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 8
value: 0.48
inSlope: -0.83428574
outSlope: -0.83428574
tangentMode: 136
- serializedVersion: 2
time: 11
value: -3.47
inSlope: 0
outSlope: 0
tangentMode: 136
- serializedVersion: 2
time: 12
value: -2.93
inSlope: 0.53999996
outSlope: 0.53999996
tangentMode: 34
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
attribute: m_LocalPosition.z
path:
classID: 4
script: {fileID: 0}
m_EulerEditorCurves: []
m_HasGenericRootTransform: 1
m_HasMotionFloatCurves: 0
m_GenerateMotionCurves: 0
m_Events: []

8
SampleScenes/HDTest/GraphicTest/Common/AnimationController/RealtimeCubemap@main.anim.meta


fileFormatVersion: 2
guid: 4c1d940fc475d5643b21c5a591ba2c89
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 7400000
userData:
assetBundleName:
assetBundleVariant:

172
SampleScenes/HDTest/GraphicTest/Common/Material/Mat_Mirror.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Mat_Mirror
m_Shader: {fileID: 4800000, guid: 6e4ae4064600d784cac1e41a9e6f2e59, type: 3}
m_ShaderKeywords: _ALBEDOAFFECTEMISSIVE_OFF _ALPHACUTOFFENABLE_OFF _DEPTHOFFSETENABLE_OFF
_DISTORTIONENABLE_OFF _DOUBLESIDEDENABLE_OFF _ENABLESPECULAROCCLUSION_OFF _ENABLEWIND_OFF
_PREREFRACTIONPASS_OFF
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _AnisotropyMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BaseColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DistortionVectorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissiveColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _HeightMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecularColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SubsurfaceRadiusMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ThicknessMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _ATDistance: 1
- _AlbedoAffectEmissive: 0
- _AlphaCutoff: 0.5
- _AlphaCutoffEnable: 0
- _Anisotropy: 0
- _BlendMode: 0
- _CoatCoverage: 1
- _CoatIOR: 0.5
- _CullMode: 2
- _DepthOffsetEnable: 0
- _DetailAlbedoScale: 1
- _DetailNormalScale: 1
- _DetailSmoothnessScale: 1
- _DisplacementLockObjectScale: 1
- _DisplacementLockTilingScale: 1
- _DisplacementMode: 0
- _DistortionBlendMode: 0
- _DistortionBlurBlendMode: 0
- _DistortionBlurDstBlend: 0
- _DistortionBlurRemapMax: 1
- _DistortionBlurRemapMin: 0
- _DistortionBlurScale: 1
- _DistortionBlurSrcBlend: 0
- _DistortionDepthTest: 1
- _DistortionDstBlend: 0
- _DistortionEnable: 0
- _DistortionScale: 1
- _DistortionSrcBlend: 0
- _DoubleSidedEnable: 0
- _DoubleSidedNormalMode: 1
- _Drag: 1
- _DstBlend: 0
- _EmissiveColorMode: 1
- _EmissiveIntensity: 0
- _EnableBlendModePreserveSpecularLighting: 1
- _EnableFogOnTransparent: 1
- _EnableSpecularOcclusion: 0
- _EnableWind: 0
- _HeightAmplitude: 0.02
- _HeightCenter: 0.5
- _HeightMax: 1
- _HeightMin: -1
- _IOR: 1
- _InitialBend: 1
- _InvTilingScale: 1
- _LinkDetailsWithBase: 1
- _MaterialID: 1
- _Metallic: 0
- _NormalMapSpace: 0
- _NormalScale: 1
- _PPDLodThreshold: 5
- _PPDMaxSamples: 15
- _PPDMinSamples: 5
- _PPDPrimitiveLength: 1
- _PPDPrimitiveWidth: 1
- _PreRefractionPass: 0
- _RefractionMode: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2
- _Smoothness: 1
- _SmoothnessRemapMax: 1
- _SmoothnessRemapMin: 0
- _SrcBlend: 1
- _StencilRef: 2
- _Stiffness: 1
- _SubsurfaceProfile: 0
- _SubsurfaceRadius: 1
- _SurfaceType: 0
- _TexWorldScale: 1
- _Thickness: 1
- _ThicknessMultiplier: 1
- _UVBase: 0
- _UVDetail: 0
- _ZTestMode: 8
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _DoubleSidedConstants: {r: 1, g: 1, b: -1, a: 0}
- _EmissionColor: {r: 1, g: 1, b: 1, a: 1}
- _EmissiveColor: {r: 0, g: 0, b: 0, a: 1}
- _InvPrimScale: {r: 1, g: 1, b: 0, a: 0}
- _SpecularColor: {r: 1, g: 1, b: 1, a: 1}
- _TransmittanceColor: {r: 1, g: 1, b: 1, a: 1}
- _UVDetailsMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UVMappingMask: {r: 1, g: 0, b: 0, a: 0}

8
SampleScenes/HDTest/GraphicTest/Common/Material/Mat_Mirror.mat.meta


fileFormatVersion: 2
guid: d2f7f8b7fb883914fabf4d849b344e47
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

10
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.meta


fileFormatVersion: 2
guid: 50d80d2cb8fa29c499c5f0a6545805f5
folderAsset: yes
timeCreated: 1508945640
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

1001
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.unity
文件差异内容过多而无法显示
查看文件

7
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps.unity.meta


fileFormatVersion: 2
guid: 59771346590d7a245af131b3a6a1a241
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

38
ScriptableRenderPipeline/Core/Resources/BC6H.compute


#include "../ShaderLibrary/BC6H.hlsl"
Texture2D<float4> _Source;
RWTexture2D<uint4> _Target;
#pragma kernel KEncodeFast
[numthreads(4, 4, 1)]
void KEncodeFast(uint2 groupId : SV_GroupID, uint2 groupThreadId : SV_GroupThreadID, uint2 dispatchThreadId : SV_DispatchThreadID)
{
// Load 4x4 pixel block
float3 texels[16];
uint2 topLeftSourceID = dispatchThreadId << 2;
texels[0] = _Source.Load(uint3(topLeftSourceID , 0)).rgb;
texels[1] = _Source.Load(uint3(topLeftSourceID + uint2(1, 0), 0)).rgb;
texels[2] = _Source.Load(uint3(topLeftSourceID + uint2(2, 0), 0)).rgb;
texels[3] = _Source.Load(uint3(topLeftSourceID + uint2(3, 0), 0)).rgb;
texels[4] = _Source.Load(uint3(topLeftSourceID + uint2(0, 1), 0)).rgb;
texels[5] = _Source.Load(uint3(topLeftSourceID + uint2(1, 1), 0)).rgb;
texels[6] = _Source.Load(uint3(topLeftSourceID + uint2(2, 1), 0)).rgb;
texels[7] = _Source.Load(uint3(topLeftSourceID + uint2(3, 1), 0)).rgb;
texels[8] = _Source.Load(uint3(topLeftSourceID + uint2(0, 2), 0)).rgb;
texels[9] = _Source.Load(uint3(topLeftSourceID + uint2(1, 2), 0)).rgb;
texels[10] = _Source.Load(uint3(topLeftSourceID + uint2(2, 2), 0)).rgb;
texels[11] = _Source.Load(uint3(topLeftSourceID + uint2(3, 2), 0)).rgb;
texels[12] = _Source.Load(uint3(topLeftSourceID + uint2(0, 3), 0)).rgb;
texels[13] = _Source.Load(uint3(topLeftSourceID + uint2(1, 3), 0)).rgb;
texels[14] = _Source.Load(uint3(topLeftSourceID + uint2(2, 3), 0)).rgb;
texels[15] = _Source.Load(uint3(topLeftSourceID + uint2(3, 3), 0)).rgb;
uint4 block = uint4(0, 0, 0, 0);
float blockMSLE = 0;
EncodeMode11(block, blockMSLE, texels);
_Target[dispatchThreadId] = block;
}

10
ScriptableRenderPipeline/Core/Resources/BC6H.compute.meta


fileFormatVersion: 2
guid: b69b95b3420fd904e8530b79f665a1f8
timeCreated: 1507123133
licenseType: Pro
ComputeShaderImporter:
externalObjects: {}
currentAPIMask: 4
userData:
assetBundleName:
assetBundleVariant:

66
ScriptableRenderPipeline/Core/Resources/BlitCubeTextureFace.shader


Shader "Hidden/SRP/BlitCubeTextureFace"
{
SubShader
{
// Cubemap blit. Takes a face index.
Pass
{
ZTest Always
ZWrite Off
Cull Off
HLSLPROGRAM
#include "../ShaderLibrary/Common.hlsl"
#pragma vertex vert
#pragma fragment frag
#pragma target 3.0
TEXTURECUBE(_InputTex);
SAMPLERCUBE(sampler_InputTex);
float _FaceIndex;
float _LoD;
struct Attributes
{
uint vertexID : SV_VertexID;
};
struct Varyings
{
float4 positionCS : SV_POSITION;
float3 texcoord : TEXCOORD0;
};
static const float3 faceU[6] = { float3(0, 0, -1), float3(0, 0, 1), float3(1, 0, 0), float3(1, 0, 0), float3(1, 0, 0), float3(-1, 0, 0) };
static const float3 faceV[6] = { float3(0, -1, 0), float3(0, -1, 0), float3(0, 0, 1), float3(0, 0, -1), float3(0, -1, 0), float3(0, -1, 0) };
Varyings vert (Attributes input)
{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
float2 uv = GetFullScreenTriangleTexCoord(input.vertexID);
uv = uv * 2 - 1;
int idx = (int)_FaceIndex;
float3 transformU = faceU[idx];
float3 transformV = faceV[idx];
float3 n = cross(transformV, transformU);
output.texcoord = n + uv.x * transformU + uv.y * transformV;
return output;
}
float4 frag (Varyings input) : SV_Target
{
return SAMPLE_TEXTURECUBE_LOD(_InputTex, sampler_InputTex, input.texcoord, _LoD);
}
ENDHLSL
}
}
Fallback Off
}

8
ScriptableRenderPipeline/Core/Resources/BlitCubeTextureFace.shader.meta


fileFormatVersion: 2
guid: d850d0a2481878d4bbf17e5126b04163
ShaderImporter:
externalObjects: {}
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:

14
ScriptableRenderPipeline/HDRenderPipeline/FrameSettings.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
// The settings here are per frame settings. They can be changed by the renderer based on its need each frame.
public class LightingSettings
{
public float diffuseGlobalDimmer = 1.0f;
public float specularGlobalDimmer = 1.0f;
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/FrameSettings.cs.meta


fileFormatVersion: 2
guid: 2fc90a1ac27c42f40bf2c56b45b4d374
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

38
ScriptableRenderPipeline/HDRenderPipeline/GlobalSettings.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
// All the structures here represent global engine settings.
// It means that they are supposed to be setup once and not changed during the game.
// All of these will be serialized in the HDRenderPipelineInstance used for the project.
[Serializable]
public class GlobalTextureSettings
{
public const int kHDDefaultSpotCookieSize = 128;
public const int kHDDefaultPointCookieSize = 512;
public const int kHDDefaultReflectionCubemapSize = 128;
public int spotCookieSize = kHDDefaultSpotCookieSize;
public int pointCookieSize = kHDDefaultPointCookieSize;
public int reflectionCubemapSize = kHDDefaultReflectionCubemapSize;
public bool reflectionCacheCompressed = false;
}
[Serializable]
public class GlobalRenderingSettings
{
public bool useForwardRenderingOnly; // TODO: Currently there is no way to strip the extra forward shaders generated by the shaders compiler, so we can switch dynamically.
public bool useDepthPrepassWithDeferredRendering;
public bool renderAlphaTestOnlyInDeferredPrepass;
// We have to fall back to forward-only rendering when scene view is using wireframe rendering mode --
// as rendering everything in wireframe + deferred do not play well together
public bool ShouldUseForwardRenderingOnly()
{
return useForwardRenderingOnly || GL.wireframe;
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/GlobalSettings.cs.meta


fileFormatVersion: 2
guid: 0e59d5f3fb6a6744dbb195534816c029
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

209
ScriptableRenderPipeline/HDRenderPipeline/Lighting/ReflectionProbeCache.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class ReflectionProbeCache
{
internal static readonly int s_InputTexID = Shader.PropertyToID("_InputTex");
internal static readonly int s_LoDID = Shader.PropertyToID("_LoD");
internal static readonly int s_FaceIndexID = Shader.PropertyToID("_FaceIndex");
enum ProbeFilteringState
{
Convolving,
Ready
}
int m_ProbeSize;
int m_CacheSize;
IBLFilterGGX m_IBLFilterGGX;
TextureCacheCubemap m_TextureCache;
RenderTexture m_TempRenderTexture;
RenderTexture m_ConvolutionTargetTexture;
ProbeFilteringState[] m_ProbeBakingState;
Material m_ConvertTextureMaterial;
MaterialPropertyBlock m_ConvertTextureMPB;
public ReflectionProbeCache(IBLFilterGGX iblFilter, int cacheSize, int probeSize, TextureFormat probeFormat, bool isMipmaped)
{
Debug.Assert(probeFormat == TextureFormat.BC6H || probeFormat == TextureFormat.RGBAHalf, "Reflection Probe Cache format for HDRP can only be BC6H or FP16.");
m_ProbeSize = probeSize;
m_CacheSize = cacheSize;
m_TextureCache = new TextureCacheCubemap();
m_TextureCache.AllocTextureArray(cacheSize, probeSize, probeFormat, isMipmaped);
m_IBLFilterGGX = iblFilter;
InitializeProbeBakingStates();
}
void Initialize()
{
if(m_TempRenderTexture == null)
{
// Temporary RT used for convolution and compression
m_TempRenderTexture = new RenderTexture(m_ProbeSize, m_ProbeSize, 1, RenderTextureFormat.ARGBHalf);
m_TempRenderTexture.dimension = TextureDimension.Cube;
m_TempRenderTexture.useMipMap = true;
m_TempRenderTexture.autoGenerateMips = false;
m_TempRenderTexture.Create();
m_ConvolutionTargetTexture = new RenderTexture(m_ProbeSize, m_ProbeSize, 1, RenderTextureFormat.ARGBHalf);
m_ConvolutionTargetTexture.dimension = TextureDimension.Cube;
m_ConvolutionTargetTexture.useMipMap = true;
m_ConvolutionTargetTexture.autoGenerateMips = false;
m_ConvolutionTargetTexture.Create();
m_ConvertTextureMaterial = CoreUtils.CreateEngineMaterial("Hidden/SRP/BlitCubeTextureFace");
m_ConvertTextureMPB = new MaterialPropertyBlock();
InitializeProbeBakingStates();
}
}
void InitializeProbeBakingStates()
{
m_ProbeBakingState = new ProbeFilteringState[m_CacheSize];
for (int i = 0; i < m_CacheSize; ++i)
m_ProbeBakingState[i] = ProbeFilteringState.Convolving;
}
public void Release()
{
if(m_TextureCache != null)
{
m_TextureCache.Release();
m_TextureCache = null;
}
if(m_TempRenderTexture != null)
{
m_TempRenderTexture.Release();
m_TempRenderTexture = null;
}
m_ProbeBakingState = null;
}
public void NewFrame()
{
Initialize();
m_TextureCache.NewFrame();
}
// This method is used to convert inputs that are either compressed or not of the right size.
// We can't use Graphics.ConvertTexture here because it does not work with a RenderTexture as destination.
void ConvertTexture(CommandBuffer cmd, Texture input, RenderTexture target)
{
m_ConvertTextureMPB.SetTexture(s_InputTexID, input);
m_ConvertTextureMPB.SetFloat(s_LoDID, 0.0f); // We want to convert mip 0 to whatever the size of the destination cache is.
for (int f = 0 ; f < 6 ; ++f)
{
m_ConvertTextureMPB.SetFloat(s_FaceIndexID, (float)f);
CoreUtils.SetRenderTarget(cmd, target, ClearFlag.None, Color.black, 0, (CubemapFace)f);
CoreUtils.DrawFullScreen(cmd, m_ConvertTextureMaterial, m_ConvertTextureMPB);
}
}
Texture ConvolveProbeTexture(CommandBuffer cmd, Texture texture)
{
// Probes can be either Cubemaps (for baked probes) or RenderTextures (for realtime probes)
Cubemap cubeTexture = texture as Cubemap;
RenderTexture renderTexture = texture as RenderTexture;
RenderTexture convolutionSourceTexture = null;
if (cubeTexture != null)
{
// if the size if different from the cache probe size or if the input texture format is compressed, we need to convert it
// 1) to a format for which we can generate mip maps
// 2) to the proper reflection probe cache size
bool sizeMismatch = cubeTexture.width != m_ProbeSize || cubeTexture.height != m_ProbeSize;
bool formatMismatch = cubeTexture.format != TextureFormat.RGBAHalf; // Temporary RT for convolution is always FP16
if (formatMismatch || sizeMismatch)
{
if (sizeMismatch)
{
Debug.LogWarningFormat("Baked Reflection Probe {0} does not match HDRP Reflection Probe Cache size of {1}. Consider baking it at the same size for better loading performance.", texture.name, m_ProbeSize);
}
else if (cubeTexture.format == TextureFormat.BC6H)
{
Debug.LogWarningFormat("Baked Reflection Probe {0} is compressed but the HDRP Reflection Probe Cache is not. Consider removing compression from the input texture for better quality.", texture.name);
}
ConvertTexture(cmd, cubeTexture, m_TempRenderTexture);
}
else
{
for (int f = 0; f < 6; f++)
{
cmd.CopyTexture(cubeTexture, f, 0, m_TempRenderTexture, f, 0);
}
}
// Ideally if input is not compressed and has mipmaps, don't do anything here. Problem is, we can't know if mips have been already convolved offline...
cmd.GenerateMips(m_TempRenderTexture);
convolutionSourceTexture = m_TempRenderTexture;
}
else
{
Debug.Assert(renderTexture != null);
if (renderTexture.dimension != TextureDimension.Cube)
{
Debug.LogError("Realtime reflection probe should always be a Cube RenderTexture.");
return null;
}
// TODO: Do a different case for downsizing, in this case, instead of doing ConvertTexture just use the relevant mipmaps.
bool sizeMismatch = renderTexture.width != m_ProbeSize || renderTexture.height != m_ProbeSize;
if (sizeMismatch)
{
ConvertTexture(cmd, renderTexture, m_TempRenderTexture);
convolutionSourceTexture = m_TempRenderTexture;
}
else
{
convolutionSourceTexture = renderTexture;
}
// Generate unfiltered mipmaps as a base for convolution
// TODO: Make sure that we don't first convolve everything on the GPU with the legacy code path executed after rendering the probe.
cmd.GenerateMips(convolutionSourceTexture);
}
m_IBLFilterGGX.FilterCubemap(cmd, convolutionSourceTexture, m_ConvolutionTargetTexture);
return m_ConvolutionTargetTexture;
}
public int FetchSlice(CommandBuffer cmd, Texture texture)
{
bool needUpdate;
var sliceIndex = m_TextureCache.ReserveSlice(texture, out needUpdate);
if (sliceIndex != -1)
{
if(needUpdate || m_ProbeBakingState[sliceIndex] != ProbeFilteringState.Ready)
{
using (new ProfilingSample(cmd, "Convolve Reflection Probe"))
{
// For now baking is done directly but will be time sliced in the future. Just preparing the code here.
m_ProbeBakingState[sliceIndex] = ProbeFilteringState.Convolving;
Texture result = ConvolveProbeTexture(cmd, texture);
if (result == null)
return -1;
m_TextureCache.UpdateSlice(cmd, sliceIndex, result, m_TextureCache.GetTextureUpdateCount(texture)); // Be careful to provide the update count from the input texture, not the temporary one used for convolving.
m_ProbeBakingState[sliceIndex] = ProbeFilteringState.Ready;
}
}
}
return sliceIndex;
}
public Texture GetTexCache()
{
return m_TextureCache.GetTexCache();
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/Lighting/ReflectionProbeCache.cs.meta


fileFormatVersion: 2
guid: be2e03c1701055f4c86bda65b083b809
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

1001
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/LightingData.asset
文件差异内容过多而无法显示
查看文件

10
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/LightingData.asset.meta


fileFormatVersion: 2
guid: 060c32dabfc0dc04f920367fe55abc46
timeCreated: 1508945643
licenseType: Pro
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 25800000
userData:
assetBundleName:
assetBundleVariant:

941
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_dir.png

之前 之后
宽度: 1024  |  高度: 1024  |  大小: 273 KiB

77
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_dir.png.meta


fileFormatVersion: 2
guid: 721b5cd83c0a7fd46b4e001ab805c728
timeCreated: 1508945642
licenseType: Pro
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 0
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 1
aniso: 3
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 2
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

1001
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_light.exr
文件差异内容过多而无法显示
查看文件

77
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Lightmap-0_comp_light.exr.meta


fileFormatVersion: 2
guid: 06e763c4bed56e4488b02877bf360c47
timeCreated: 1508945641
licenseType: Pro
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 1
aniso: 3
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 0
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 6
textureShape: 1
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 2
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

845
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-0.exr
文件差异内容过多而无法显示
查看文件

78
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-0.exr.meta


fileFormatVersion: 2
guid: 5496b95b55b0f374da2bbe8cfd1a28e3
timeCreated: 1508945642
licenseType: Pro
TextureImporter:
fileIDToRecycleName:
8900000: generatedCubemap
externalObjects: {}
serializedVersion: 4
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 1
seamlessCubemap: 1
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 2
aniso: 0
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 2
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

192
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT1.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Material RT1
m_Shader: {fileID: 4800000, guid: c4edd00ff2db5b24391a4fcb1762e459, type: 3}
m_ShaderKeywords: _ALBEDOAFFECTEMISSIVE_OFF _ALPHACUTOFFENABLE_OFF _DEPTHOFFSETENABLE_OFF
_DISTORTIONENABLE_OFF _DISTORTIONONLY_OFF _DOUBLESIDEDENABLE_OFF _ENABLESPECULAROCCLUSION_OFF
_ENABLEWIND_OFF _PREREFRACTIONPASS_OFF
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses:
- DistortionVectors
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _AnisotropyMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BaseColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DistortionVectorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissiveColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _HeightMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecularColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SubsurfaceRadiusMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ThicknessMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TransmittanceColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _UnlitColorMap:
m_Texture: {fileID: 8400000, guid: 1511b1781db1047459b5bd86d4fcbe81, type: 2}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _ATDistance: 1
- _AlbedoAffectEmissive: 0
- _AlphaCutoff: 0.5
- _AlphaCutoffEnable: 0
- _Anisotropy: 0
- _BlendMode: 0
- _CoatCoverage: 1
- _CoatIOR: 0.5
- _CullMode: 2
- _Cutoff: 0.5
- _DepthOffsetEnable: 0
- _DetailAlbedoScale: 1
- _DetailNormalScale: 1
- _DetailSmoothnessScale: 1
- _DisplacementLockObjectScale: 1
- _DisplacementLockTilingScale: 1
- _DisplacementMode: 0
- _DistortionBlendMode: 0
- _DistortionBlurBlendMode: 0
- _DistortionBlurDstBlend: 1
- _DistortionBlurRemapMax: 1
- _DistortionBlurRemapMin: 0
- _DistortionBlurScale: 1
- _DistortionBlurSrcBlend: 1
- _DistortionDepthTest: 1
- _DistortionDstBlend: 1
- _DistortionEnable: 0
- _DistortionOnly: 0
- _DistortionScale: 1
- _DistortionSrcBlend: 1
- _DistortionVectorBias: -1
- _DistortionVectorScale: 2
- _DoubleSidedEnable: 0
- _DoubleSidedNormalMode: 1
- _Drag: 1
- _DstBlend: 0
- _EmissiveColorMode: 1
- _EmissiveIntensity: 0
- _EnableBlendModePreserveSpecularLighting: 1
- _EnableFogOnTransparent: 1
- _EnableSpecularOcclusion: 0
- _EnableWind: 0
- _HeightAmplitude: 0.02
- _HeightCenter: 0.5
- _HeightMax: 1
- _HeightMin: -1
- _IOR: 1
- _InitialBend: 1
- _InvTilingScale: 1
- _LinkDetailsWithBase: 1
- _MaterialID: 1
- _Metallic: 0
- _NormalMapSpace: 0
- _NormalScale: 1
- _PPDLodThreshold: 5
- _PPDMaxSamples: 15
- _PPDMinSamples: 5
- _PPDPrimitiveLength: 1
- _PPDPrimitiveWidth: 1
- _PreRefractionPass: 0
- _RefractionMode: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2
- _Smoothness: 1
- _SmoothnessRemapMax: 1
- _SmoothnessRemapMin: 0
- _SrcBlend: 1
- _StencilRef: 2
- _Stiffness: 1
- _SubsurfaceProfile: 0
- _SubsurfaceRadius: 1
- _SurfaceType: 0
- _TexWorldScale: 1
- _Thickness: 1
- _ThicknessMultiplier: 1
- _UVBase: 0
- _UVDetail: 0
- _ZTestMode: 4
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _DoubleSidedConstants: {r: 1, g: 1, b: -1, a: 0}
- _EmissionColor: {r: 1, g: 1, b: 1, a: 1}
- _EmissiveColor: {r: 1, g: 1, b: 1, a: 1}
- _InvPrimScale: {r: 1, g: 1, b: 0, a: 0}
- _SpecularColor: {r: 1, g: 1, b: 1, a: 1}
- _ThicknessRemap: {r: 0, g: 1, b: 0, a: 0}
- _TransmittanceColor: {r: 1, g: 1, b: 1, a: 1}
- _UVDetailsMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UVMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UnlitColor: {r: 1, g: 1, b: 1, a: 1}

8
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT1.mat.meta


fileFormatVersion: 2
guid: c28052c708ef8d14fb05a46fe6983c7b
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

192
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT2.mat


%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: Material RT2
m_Shader: {fileID: 4800000, guid: c4edd00ff2db5b24391a4fcb1762e459, type: 3}
m_ShaderKeywords: _ALBEDOAFFECTEMISSIVE_OFF _ALPHACUTOFFENABLE_OFF _DEPTHOFFSETENABLE_OFF
_DISTORTIONENABLE_OFF _DISTORTIONONLY_OFF _DOUBLESIDEDENABLE_OFF _ENABLESPECULAROCCLUSION_OFF
_ENABLEWIND_OFF _PREREFRACTIONPASS_OFF
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses:
- DistortionVectors
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _AnisotropyMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BaseColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BentNormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DistortionVectorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissiveColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _HeightMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecularColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SubsurfaceRadiusMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TangentMapOS:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ThicknessMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _TransmittanceColorMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _UnlitColorMap:
m_Texture: {fileID: 8400000, guid: 2f596f33da3ac0040a90ca8e13a615bb, type: 2}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _ATDistance: 1
- _AlbedoAffectEmissive: 0
- _AlphaCutoff: 0.5
- _AlphaCutoffEnable: 0
- _Anisotropy: 0
- _BlendMode: 0
- _CoatCoverage: 1
- _CoatIOR: 0.5
- _CullMode: 2
- _Cutoff: 0.5
- _DepthOffsetEnable: 0
- _DetailAlbedoScale: 1
- _DetailNormalScale: 1
- _DetailSmoothnessScale: 1
- _DisplacementLockObjectScale: 1
- _DisplacementLockTilingScale: 1
- _DisplacementMode: 0
- _DistortionBlendMode: 0
- _DistortionBlurBlendMode: 0
- _DistortionBlurDstBlend: 1
- _DistortionBlurRemapMax: 1
- _DistortionBlurRemapMin: 0
- _DistortionBlurScale: 1
- _DistortionBlurSrcBlend: 1
- _DistortionDepthTest: 1
- _DistortionDstBlend: 1
- _DistortionEnable: 0
- _DistortionOnly: 0
- _DistortionScale: 1
- _DistortionSrcBlend: 1
- _DistortionVectorBias: -1
- _DistortionVectorScale: 2
- _DoubleSidedEnable: 0
- _DoubleSidedNormalMode: 1
- _Drag: 1
- _DstBlend: 0
- _EmissiveColorMode: 1
- _EmissiveIntensity: 0
- _EnableBlendModePreserveSpecularLighting: 1
- _EnableFogOnTransparent: 1
- _EnableSpecularOcclusion: 0
- _EnableWind: 0
- _HeightAmplitude: 0.02
- _HeightCenter: 0.5
- _HeightMax: 1
- _HeightMin: -1
- _IOR: 1
- _InitialBend: 1
- _InvTilingScale: 1
- _LinkDetailsWithBase: 1
- _MaterialID: 1
- _Metallic: 0
- _NormalMapSpace: 0
- _NormalScale: 1
- _PPDLodThreshold: 5
- _PPDMaxSamples: 15
- _PPDMinSamples: 5
- _PPDPrimitiveLength: 1
- _PPDPrimitiveWidth: 1
- _PreRefractionPass: 0
- _RefractionMode: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2
- _Smoothness: 1
- _SmoothnessRemapMax: 1
- _SmoothnessRemapMin: 0
- _SrcBlend: 1
- _StencilRef: 2
- _Stiffness: 1
- _SubsurfaceProfile: 0
- _SubsurfaceRadius: 1
- _SurfaceType: 0
- _TexWorldScale: 1
- _Thickness: 1
- _ThicknessMultiplier: 1
- _UVBase: 0
- _UVDetail: 0
- _ZTestMode: 4
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _DoubleSidedConstants: {r: 1, g: 1, b: -1, a: 0}
- _EmissionColor: {r: 1, g: 1, b: 1, a: 1}
- _EmissiveColor: {r: 1, g: 1, b: 1, a: 1}
- _InvPrimScale: {r: 1, g: 1, b: 0, a: 0}
- _SpecularColor: {r: 1, g: 1, b: 1, a: 1}
- _ThicknessRemap: {r: 0, g: 1, b: 0, a: 0}
- _TransmittanceColor: {r: 1, g: 1, b: 1, a: 1}
- _UVDetailsMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UVMappingMask: {r: 1, g: 0, b: 0, a: 0}
- _UnlitColor: {r: 1, g: 1, b: 1, a: 1}

8
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/Material RT2.mat.meta


fileFormatVersion: 2
guid: 77c412dc0c40ab34c924551fa2eb085a
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

8
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/RealtimeCubeMapSky.asset.meta


fileFormatVersion: 2
guid: 339f284b3c64b9041ba357d66294f265
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 11400000
userData:
assetBundleName:
assetBundleVariant:

1001
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-1.exr
文件差异内容过多而无法显示
查看文件

111
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-1.exr.meta


fileFormatVersion: 2
guid: ff66033ed37fa6b4998a8c7abf6eac37
TextureImporter:
fileIDToRecycleName:
8900000: generatedCubemap
externalObjects: {}
serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 1
seamlessCubemap: 1
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 2
aniso: 0
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 2
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: PS4
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

1001
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-2.exr
文件差异内容过多而无法显示
查看文件

111
SampleScenes/HDTest/GraphicTest/RealtimeCubemaps/ReflectionProbe-2.exr.meta


fileFormatVersion: 2
guid: 85e9712356da6ea4d82e221c5c7bb03b
TextureImporter:
fileIDToRecycleName:
8900000: generatedCubemap
externalObjects: {}
serializedVersion: 5
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 1
seamlessCubemap: 1
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: 2
aniso: 0
mipBias: 0
wrapU: 1
wrapV: 1
wrapW: 1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: 0
textureShape: 2
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: iPhone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: PS4
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 100
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:

部分文件因为文件数量过多而无法显示

正在加载...
取消
保存