浏览代码

Merge branch 'master' into feature/SSR

# Conflicts:
#	ScriptableRenderPipeline/Core/CoreRP/Textures/RTHandleSystem.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDUtils.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderingContext.cs
/main
Frédéric Vauchelles 7 年前
当前提交
d9f5b4f5
共有 67 个文件被更改,包括 7367 次插入4965 次删除
  1. 10
      CHANGELOG.md
  2. 995
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png
  3. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering.unity.png
  4. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1302_SSS_MaxRadius.unity.png
  5. 3
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1302_SSS_MaxRadius.unity.png.meta
  6. 999
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1303_SSS_Pre-Post.unity.png
  7. 3
      ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1303_SSS_Pre-Post.unity.png.meta
  8. 999
      ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2101_GI_Metapass.unity.png
  9. 998
      ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2102_GI_Emission.unity.png
  10. 999
      ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2103_BakeMixed.unity.png
  11. 998
      ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2401_Light_on_Tesselation.unity.png
  12. 999
      ImageTemplates/HDRenderPipeline/Scenes/9xxx_Other/9002_Deferred-and-Forward.unity.png
  13. 125
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl
  14. 7
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl
  15. 7
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl
  16. 22
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Sampling/Sampling.hlsl
  17. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl
  18. 11
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl
  19. 15
      ScriptableRenderPipeline/Core/CoreRP/Textures/RTHandleSystem.cs
  20. 30
      ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs
  21. 21
      ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeComponent.cs
  22. 16
      ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeManager.cs
  23. 40
      ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md
  24. 68
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  25. 29
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs
  26. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs
  27. 26
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs
  28. 278
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs
  29. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/ExponentialFogEditor.cs
  30. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs
  31. 35
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  32. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  33. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs
  34. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl
  35. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  36. 37
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  37. 187
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  38. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumeVoxelization.compute
  39. 160
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute
  40. 167
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  41. 38
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  42. 22
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/PreIntegratedFGD/PreIntegratedFGD.hlsl
  43. 82
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs
  44. 92
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl
  45. 289
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl
  46. 32
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader
  47. 242
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl
  48. 23
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl
  49. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute
  50. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs
  51. 30
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs
  52. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs
  53. 27
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs
  54. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs.hlsl
  55. 59
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
  56. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/OpaqueAtmosphericScattering.shader
  57. 36
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs
  58. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderingContext.cs
  59. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs
  60. 838
      Tests/GraphicsTests/RenderPipeline/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering.unity
  61. 4
      Tests/GraphicsTests/RenderPipeline/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering/GroundLeaf_Transmission-None.mat
  62. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DecalsDebug.cs
  63. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DecalsDebug.cs.meta
  64. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs
  65. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs.meta
  66. 67
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs
  67. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs.meta

10
CHANGELOG.md


and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
### Changed
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
## [0.1.6] - 2018-xx-yy

995
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1101_Unlit.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1302_SSS_MaxRadius.unity.png
文件差异内容过多而无法显示
查看文件

3
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1302_SSS_MaxRadius.unity.png.meta


spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

999
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1303_SSS_Pre-Post.unity.png
文件差异内容过多而无法显示
查看文件

3
ImageTemplates/HDRenderPipeline/Scenes/1xxx_Materials/1303_SSS_Pre-Post.unity.png.meta


spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spritePixelsToUnits: 100
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 0
spriteTessellationDetail: -1

999
ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2101_GI_Metapass.unity.png
文件差异内容过多而无法显示
查看文件

998
ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2102_GI_Emission.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2103_BakeMixed.unity.png
文件差异内容过多而无法显示
查看文件

998
ImageTemplates/HDRenderPipeline/Scenes/2xxx_Lighting/2401_Light_on_Tesselation.unity.png
文件差异内容过多而无法显示
查看文件

999
ImageTemplates/HDRenderPipeline/Scenes/9xxx_Other/9002_Deferred-and-Forward.unity.png
文件差异内容过多而无法显示
查看文件

125
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/BSDF.hlsl


real F_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (f90 - f0) * x5 + f0; // sub mul mul mul sub mad

real3 F_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return f0 * (1.0 - x5) + (f90 * x5); // sub mul mul mul sub mul mad*3

// Does not handle TIR.
real F_Transm_Schlick(real f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad

// Does not handle TIR.
real3 F_Transm_Schlick(real3 f0, real f90, real u)
{
real x = 1.0 - u;
real x = 1.0 - u;
real x2 = x * x;
real x5 = x * x2 * x2;
return (1.0 - f90 * x5) - f0 * (1.0 - x5); // sub mul mul mul mad sub mad*3

// Ref: https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/
// Fresnel dieletric / dielectric
real F_Fresnel(real ior, real u)
real F_FresnelDieletric(real ior, real u)
// Fresnel dieletric / conductor
// Note: etak2 = etak * etak (optimization for Artist Friendly Metallic Fresnel below)
// eta = eta_t / eta_i and etak = k_t / n_i
real3 F_FresnelConductor(real3 eta, real3 etak2, real cosTheta)
{
real cosTheta2 = cosTheta * cosTheta;
real sinTheta2 = 1.0 - cosTheta2;
real3 eta2 = eta * eta;
real3 t0 = eta2 - etak2 - sinTheta2;
real3 a2plusb2 = sqrt(t0 * t0 + 4.0 * eta2 * etak2);
real3 t1 = a2plusb2 + cosTheta2;
real3 a = sqrt(0.5 * (a2plusb2 + t0));
real3 t2 = 2.0 * a * cosTheta;
real3 Rs = (t1 - t2) / (t1 + t2);
real3 t3 = cosTheta2 * a2plusb2 + sinTheta2 * sinTheta2;
real3 t4 = t2 * sinTheta2;
real3 Rp = Rs * (t3 - t4) / (t3 + t4);
return 0.5 * (Rp + Rs);
}
// Conversion FO/IOR
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor, real incidentIor = 1.0)
{
return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor));
}
// Assume air interface for top
// Note: Don't handle the case fresnel0 == 1
real Fresnel0ToIor(real fresnel0)
{
real sqrtF0 = sqrt(fresnel0);
return (1.0 + sqrtF0) / (1.0 - sqrtF0);
}
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
// Artist Friendly Metallic Fresnel Ref: http://jcgt.org/published/0003/04/03/paper.pdf
real3 GetIorN(real3 f0, real3 edgeTint)
{
real3 sqrtF0 = sqrt(f0);
return lerp((1.0 - f0) / (1.0 + f0), (1.0 + sqrtF0) / (1.0 - sqrt(f0)), edgeTint);
}
real3 getIorK2(real3 f0, real3 n)
{
real3 nf0 = Sq(n + 1.0) * f0 - Sq(f0 - 1.0);
return nf0 / (1.0 - f0);
}
//-----------------------------------------------------------------------------
// Specular BRDF
//-----------------------------------------------------------------------------

real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
return a2 / (s * s);
}

real DV_SmithJointGGX(real NdotH, real NdotL, real NdotV, real roughness, real partLambdaV)
{
real a2 = Sq(roughness);
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real s = (NdotH * a2 - NdotH) * NdotH + 1.0;
real lambdaV = NdotL * partLambdaV;
real lambdaL = NdotV * sqrt((-NdotL * a2 + NdotL) * NdotL + a2);

// Inline D_GGXAniso() * V_SmithJointGGXAniso() together for better code generation.
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB, real partLambdaV)
{
real a2 = roughnessT * roughnessB;
real3 v = real3(roughnessB * TdotH, roughnessT * BdotH, a2 * NdotH);

}
real DV_SmithJointGGXAniso(real TdotH, real BdotH, real NdotH,
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
real TdotV, real BdotV, real NdotV,
real TdotL, real BdotL, real NdotL,
real roughnessT, real roughnessB)
roughnessT, roughnessB, partLambdaV);
roughnessT, roughnessB, partLambdaV);
}
//-----------------------------------------------------------------------------

real fd90 = 0.5 + (perceptualRoughness + perceptualRoughness * LdotV);
// Two schlick fresnel term
real lightScatter = F_Schlick(1.0, fd90, NdotL);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
real viewScatter = F_Schlick(1.0, fd90, NdotV);
// Normalize the BRDF for polar view angles of up to (Pi/4).
// We use the worst case of (roughness = albedo = 1), and, for each view angle,

// Ref: Diffuse Lighting for GGX + Smith Microsurfaces, p. 113.
real3 DiffuseGGXNoPI(real3 albedo, real NdotV, real NdotL, real NdotH, real LdotV, real roughness)
{
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real facing = 0.5 + 0.5 * LdotV; // (LdotH)^2
real rough = facing * (0.9 - 0.4 * facing) * (0.5 / NdotH + 1);
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
real smooth = transmitL * transmitV * 1.05; // Normalize F_t over the hemisphere
real single = lerp(smooth, rough, roughness); // Rescaled by PI
real multiple = roughness * (0.1159 * PI); // Rescaled by PI
return single + albedo * multiple;
}

// Note that we could save 2 cycles by inlining the multiplication by INV_PI.
return INV_PI * DiffuseGGXNoPI(albedo, NdotV, NdotL, NdotH, LdotV, roughness);
}
//-----------------------------------------------------------------------------
// Conversion FO/IOR
//-----------------------------------------------------------------------------
// ior is a value between 1.0 and 3.0. 1.0 is air interface
real IorToFresnel0(real transmittedIor, real incidentIor = 1.0)
{
return Sq((transmittedIor - incidentIor) / (transmittedIor + incidentIor));
}
// Assume air interface for top
// Note: Don't handle the case fresnel0 == 1
real Fresnel0ToIor(real fresnel0)
{
real sqrtF0 = sqrt(fresnel0);
return (1.0 + sqrtF0) / (1.0 - sqrtF0);
}
// This function is a coarse approximation of computing fresnel0 for a different top than air (here clear coat of IOR 1.5) when we only have fresnel0 with air interface
// This function is equivalent to IorToFresnel0(Fresnel0ToIor(fresnel0), 1.5)
// mean
// real sqrtF0 = sqrt(fresnel0);
// return Sq(1.0 - 5.0 * sqrtF0) / Sq(5.0 - sqrtF0);
// Optimization: Fit of the function (3 mad) for range [0.04 (should return 0), 1 (should return 1)]
TEMPLATE_1_REAL(ConvertF0ForAirInterfaceToF0ForClearCoat15, fresnel0, return saturate(-0.0256868 + fresnel0 * (0.326846 + (0.978946 - 0.283835 * fresnel0) * fresnel0)))
//-----------------------------------------------------------------------------
// Iridescence

7
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl


// Intersection functions
//-----------------------------------------------------------------------------
// This implementation does not attempt to explicitly handle NaNs.
// Ref: https://tavianator.com/fast-branchless-raybounding-box-intersections-part-2-nans/
float3 rayDirInv = rcp(rayDirection); // Could be precomputed
// Could be precomputed. Clamp to avoid INF. clamp() is a single ALU on GCN.
// rcp(FLT_EPS) = 16,777,216, which is large enough for our purposes,
// yet doesn't cause a lot of numerical issues associated with FLT_MAX.
float3 rayDirInv = clamp(rcp(rayDirection), -rcp(FLT_EPS), rcp(FLT_EPS));
// Perform ray-slab intersection (component-wise).
float3 t0 = boxMin * rayDirInv - (rayOrigin * rayDirInv);

7
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl


out real NdotL,
out real weightOverPdf)
{
#if 0
#else
real3 N = localToWorld[2];
L = SampleHemisphereCosine(u.x, u.y, N);
NdotL = saturate(dot(N, L));
#endif
// Importance sampling weight for each sample
// pdf = N.L / PI

22
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Sampling/Sampling.hlsl


return r * real2(cosPhi, sinPhi);
}
real3 SampleSphereUniform(real u1, real u2)
{
real phi = TWO_PI * u2;
real cosTheta = 1.0 - 2.0 * u1;
return SphericalToCartesian(phi, cosTheta);
}
// Performs cosine-weighted sampling of the hemisphere.
// Ref: PBRT v3, p. 780.
real3 SampleHemisphereCosine(real u1, real u2)

return localL;
}
real3 SampleHemisphereUniform(real u1, real u2)
// Cosine-weighted sampling without the tangent frame.
// Ref: http://www.amietia.com/lambertnotangent.html
real3 SampleHemisphereCosine(real u1, real u2, real3 normal)
real phi = TWO_PI * u2;
real cosTheta = 1.0 - u1;
return SphericalToCartesian(phi, cosTheta);
real3 pointOnSphere = SampleSphereUniform(u1, u2);
return normalize(normal + pointOnSphere);
real3 SampleSphereUniform(real u1, real u2)
real3 SampleHemisphereUniform(real u1, real u2)
real cosTheta = 1.0 - 2.0 * u1;
real cosTheta = 1.0 - u1;
return SphericalToCartesian(phi, cosTheta);
}

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl


return x;
}
// "Insert" two 0 bits after each of the 10 low bits of x/
// "Insert" two 0 bits after each of the 10 low bits of x.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Part1By2(uint x)
{

return x;
}
// Inverse of Part1By1 - "delete" all odd-indexed bits/
// Inverse of Part1By1 - "delete" all odd-indexed bits.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Compact1By1(uint x)
{

return x;
}
// Inverse of Part1By2 - "delete" all bits not at positions divisible by 3/
// Inverse of Part1By2 - "delete" all bits not at positions divisible by 3.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Compact1By2(uint x)
{

11
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl


// Samples the interval of homogeneous participating medium using the closed-form tracking approach
// (proportionally to the transmittance).
// Returns the offset from the start of the interval and the weight = (transmittance / pdf).
// Ref: Production Volume Rendering, 3.6.1.
// Ref: Monte Carlo Methods for Volumetric Light Transport Simulation, p. 5.
// pdf = extinction * exp(-extinction * t) / (1 - exp(-intervalLength * extinction))
// pdf = extinction * exp(extinction * (intervalLength - t)) / (exp(intervalLength * extinction - 1)
// weight = (1 - exp(-extinction * intervalLength)) / extinction;
// weight = (1 - exp(-extinction * intervalLength)) / extinction
real c = rcp(extinction);
weight = x * rcp(extinction);
offset = -log(1 - rndVal * x) * rcp(extinction);
weight = x * c;
offset = -log(1 - rndVal * x) * c;
}
// Implements equiangular light sampling.

15
ScriptableRenderPipeline/Core/CoreRP/Textures/RTHandleSystem.cs


public partial class RTHandleSystem : IDisposable
{
public enum ResizeMode
{
{
}
}
internal enum RTCategory
{

m_MaxWidths[i] = 1;
m_MaxHeights[i] = 1;
}
}
}
public void Dispose()
{

{
Assert.AreEqual(this, rth.m_Owner);
rth.Release();
}
}
}
public void SetReferenceSize(int width, int height, bool msaa, MSAASamples msaaSamples)

newRT.m_EnableRandomWrite = enableRandomWrite;
newRT.m_EnableMSAA = enableMSAA;
newRT.m_Name = name;
newRT.referenceSize = new Vector2Int(width, height);
return newRT;
}

memoryless,
name
);
rth.referenceSize = new Vector2Int(width, height);
rth.scaleFactor = scaleFactor;
return rth;

memoryless,
name
);
rth.referenceSize = new Vector2Int(width, height);
rth.scaleFunc = scaleFunc;
return rth;

30
ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs


{
string msg = "AR/VR devices are not supported, no rendering will occur";
DisplayUnsupportedMessage(msg);
}
}
// Returns 'true' if "Animated Materials" are enabled for the view associated with the given camera.
public static bool AreAnimatedMaterialsEnabled(Camera camera)

}
}
}
// TODO: how to handle reflection views? We don't know the parent window they are being rendered into,
// so we don't know whether we can animate them...
//

#endif
return animateMaterials;
}
public static bool IsSceneViewFogEnabled(Camera camera)
{
bool fogEnable = true;
#if UNITY_EDITOR
fogEnable = Application.isPlaying;
if (camera.cameraType == CameraType.SceneView)
{
fogEnable = false;
// Determine whether the "Animated Materials" checkbox is checked for the current view.
foreach (UnityEditor.SceneView sv in Resources.FindObjectsOfTypeAll(typeof(UnityEditor.SceneView)))
{
if (sv.camera == camera && sv.sceneViewState.showFog)
{
fogEnable = true;
break;
}
}
}
#endif
return fogEnable;
}
}
}

21
ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeComponent.cs


parameter.OnDisable();
}
// You can override this to do your own blending. Either loop through the `parameters` list
// or reference direct fields (you'll need to cast `state` to your custom type and don't
// forget to use `SetValue` on parameters, do not assign directly to the state object - and
// of course you'll need to check for the `overrideState` manually).
public virtual void Override(VolumeComponent state, float interpFactor)
{
int count = parameters.Count;
for (int i = 0; i < count; i++)
{
var stateParam = state.parameters[i];
var toParam = parameters[i];
// Keep track of the override state for debugging purpose
stateParam.overrideState = toParam.overrideState;
if (toParam.overrideState)
stateParam.Interp(stateParam, toParam, interpFactor);
}
}
public void SetAllOverridesTo(bool state)
{
SetAllOverridesTo(parameters, state);

16
ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeManager.cs


if (!component.active)
continue;
var target = stack.GetComponent(component.GetType());
int count = component.parameters.Count;
for (int i = 0; i < count; i++)
{
var fromParam = target.parameters[i];
var toParam = component.parameters[i];
// Keep track of the override state for debugging purpose
fromParam.overrideState = toParam.overrideState;
if (toParam.overrideState)
fromParam.Interp(fromParam, toParam, interpFactor);
}
var state = stack.GetComponent(component.GetType());
component.Override(state, interpFactor);
}
}

40
ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md


The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
### Changed
### Changelog starting
### New features and functionality
-
Started Changelog
### Bug fixes
- Fix fog flags in scene view is now taken into account
- Fix sky in preview windows that were disappearing after a load of a new level
- Fix numerical issues in IntersectRayAABB().
- Fix alpha blending of volumetric lighting with transparent objects.
- Fix the near plane of the V-Buffer causing out-of-bounds look-ups in the clustered data structure.
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
### Changed, Removals and deprecations
- EnableShadowMask in FrameSettings (But shadowMaskSupport still disable by default)
- Forced Planar Probe update modes to (Realtime, Every Update, Mirror Camera)
- Removed Planar Probe mirror plane position and normal fields in inspector, always display mirror plane and normal gizmos
- Screen Space Refraction proxy model uses the proxy of the first environment light (Reflection probe/Planar probe) or the sky
- Moved RTHandle static methods to RTHandles
- Renamed RTHandle to RTHandleSystem.RTHandle
### Improvements
- Port Global Density Volumes to the Interpolation Volume System.
- Optimize ImportanceSampleLambert() to not require the tangent frame.
- Generalize SampleVBuffer() to handle different sampling and reconstruction methods.
- Improve the quality of volumetric lighting reprojection.
- Optimize Morton Order code in the Subsurface Scattering pass.
- Planar Reflection Probe support roughness (gaussian convolution of captured probe)
- Only store decal textures to atlas if decal is visible, debounce out of memory decal atlas warning.
### Features
- Screen Space Refraction projection model (Proxy raycasting, HiZ raymarching)
- Screen Space Refraction settings as volume component
- Added buffered frame history per camera

68
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Matrix4x4 nonJitteredProjMatrix;
public Vector4 worldSpaceCameraPos;
public float detViewMatrix;
public Vector4 screenSize;
public Frustum frustum;
public Vector4 screenSize;
public Frustum frustum;
public Camera camera;
public uint taaFrameIndex;
public Vector2 taaFrameRotation;
public Camera camera;
public uint taaFrameIndex;
public Vector2 taaFrameRotation;
public Vector4 zBufferParams;
public Vector4 unity_OrthoParams;
public Vector4 projectionParams;

HDAdditionalCameraData m_AdditionalCameraData;
// Fallback used when the HDAdditionalCameraData is missing
BufferedRTHandleSystem m_HistoryRTSystem = null;
BufferedRTHandleSystem historyRTSystem
{
get
{
if (m_HistoryRTSystem == null)
m_HistoryRTSystem = new BufferedRTHandleSystem();
return m_HistoryRTSystem;
}
}
BufferedRTHandleSystem m_HistoryRTSystem = new BufferedRTHandleSystem();
public HDCamera(Camera cam)
{

// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
if (m_HistoryRTSystem != null)
{
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.Swap();
}
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.Swap();
int maxWidth = RTHandles.maxWidth;
int maxHeight = RTHandles.maxHeight;

screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);
}

public static void ClearAll()
{
int frameCheck = Time.frameCount - 1;
cam.Value.historyRTSystem.ReleaseAll();
cam.Value.ReleaseHistoryBuffer();
s_Cameras.Clear();
s_Cleanup.Clear();

foreach (var kvp in s_Cameras)
{
if (kvp.Value.m_LastFrameActive != frameCheck)
if (kvp.Value.m_LastFrameActive < frameCheck)
s_Cleanup.Add(kvp.Key);
}

// Set up UnityPerView CBuffer.
public void SetupGlobalParams(CommandBuffer cmd, float time, float lastTime)
{
cmd.SetGlobalMatrix(HDShaderIDs._ViewMatrix, viewMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvViewMatrix, viewMatrix.inverse);
cmd.SetGlobalMatrix(HDShaderIDs._ProjMatrix, projMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvProjMatrix, projMatrix.inverse);
cmd.SetGlobalMatrix(HDShaderIDs._ViewProjMatrix, viewProjMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvViewProjMatrix, viewProjMatrix.inverse);
cmd.SetGlobalMatrix(HDShaderIDs._ViewMatrix, viewMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvViewMatrix, viewMatrix.inverse);
cmd.SetGlobalMatrix(HDShaderIDs._ProjMatrix, projMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvProjMatrix, projMatrix.inverse);
cmd.SetGlobalMatrix(HDShaderIDs._ViewProjMatrix, viewProjMatrix);
cmd.SetGlobalMatrix(HDShaderIDs._InvViewProjMatrix, viewProjMatrix.inverse);
cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, scaleBias);
cmd.SetGlobalVector(HDShaderIDs._ScreenSize, screenSize);
cmd.SetGlobalVector(HDShaderIDs._ScreenToTargetScale, scaleBias);
cmd.SetGlobalVectorArray(HDShaderIDs._FrustumPlanes, frustumPlaneEquations);
cmd.SetGlobalVectorArray(HDShaderIDs._FrustumPlanes, frustumPlaneEquations);
// Time is also a part of the UnityPerView CBuffer.
// Different views can have different values of the "Animated Materials" setting.

public RTHandleSystem.RTHandle GetPreviousFrameRT(int id)
{
return historyRTSystem.GetFrameRT(id, 1);
return m_HistoryRTSystem.GetFrameRT(id, 1);
return historyRTSystem.GetFrameRT(id, 0);
return m_HistoryRTSystem.GetFrameRT(id, 0);
historyRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), 2);
return historyRTSystem.GetFrameRT(id, 0);
m_HistoryRTSystem.AllocBuffer(id, (rts, i) => allocator(camera.name, i, rts), 2);
return m_HistoryRTSystem.GetFrameRT(id, 0);
}
void ReleaseHistoryBuffer()
{
m_HistoryRTSystem.ReleaseAll();
}
}
}

29
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DebugDisplay.cs


public static string k_PanelRendering = "Rendering";
public static string k_PanelScreenSpaceTracing = "Screen Space Tracing";
public static string k_PanelDecals = "Decals";
//static readonly string[] k_HiZIntersectionKind = { "None", "Depth", "Cell" };

DebugUI.Widget[] m_DebugRenderingItems;
DebugUI.Widget[] m_DebugScreenSpaceTracingItems;
DebugUI.Widget[] m_DebugDecalsItems;
public float debugOverlayRatio = 0.33f;

public LightingDebugSettings lightingDebugSettings = new LightingDebugSettings();
public MipMapDebugSettings mipMapDebugSettings = new MipMapDebugSettings();
public ColorPickerDebugSettings colorPickerDebugSettings = new ColorPickerDebugSettings();
public DecalsDebugSettings decalsDebugSettings = new DecalsDebugSettings();
public static GUIContent[] lightingFullScreenDebugStrings = null;
public static int[] lightingFullScreenDebugValues = null;

public bool IsDebugDisplayEnabled()
{
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayEnabled() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();
}
public bool IsDebugDisplayRemovePostprocess()
{
// We want to keep post process when only the override more are enabled and none of the other
return materialDebugSettings.IsDebugDisplayEnabled() || lightingDebugSettings.IsDebugDisplayRemovePostprocess() || mipMapDebugSettings.IsDebugDisplayEnabled() || IsDebugFullScreenEnabled();
}
public bool IsDebugMaterialDisplayEnabled()

{
UnregisterDebugItems(k_PanelScreenSpaceTracing, m_DebugScreenSpaceTracingItems);
RegisterScreenSpaceTracingDebug();
}
void RefreshDecalsDebug<T>(DebugUI.Field<T> field, T value)
{
UnregisterDebugItems(k_PanelDecals, m_DebugDecalsItems);
RegisterDecalsDebug();
}
public void RegisterLightingDebug()

panel.children.Add(m_DebugRenderingItems);
}
public void RegisterDecalsDebug()
{
m_DebugDecalsItems = new DebugUI.Widget[]
{
new DebugUI.BoolField { displayName = "Display atlas", getter = () => decalsDebugSettings.m_DisplayAtlas, setter = value => decalsDebugSettings.m_DisplayAtlas = value},
new DebugUI.UIntField { displayName = "Mip Level", getter = () => decalsDebugSettings.m_MipLevel, setter = value => decalsDebugSettings.m_MipLevel = value, min = () => 0u, max = () => (uint)(RenderPipelineManager.currentPipeline as HDRenderPipeline).GetDecalAtlasMipCount() }
};
var panel = DebugManager.instance.GetPanel(k_PanelDecals, true);
panel.children.Add(m_DebugDecalsItems);
}
RegisterDecalsDebug();
RegisterDisplayStatsDebug();
RegisterMaterialDebug();
RegisterLightingDebug();

public void UnregisterDebug()
{
UnregisterDebugItems(k_PanelDecals, m_DebugDecalsItems);
UnregisterDebugItems(k_PanelDisplayStats, m_DebugDisplayStatsItems);
UnregisterDebugItems(k_PanelMaterials, m_DebugMaterialItems);
UnregisterDebugItems(k_PanelLighting, m_DebugLightingItems);

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/LightingDebug.cs


{
public bool IsDebugDisplayEnabled()
{
return debugLightingMode != DebugLightingMode.None || overrideSmoothness || overrideAlbedo || overrideNormal;
return debugLightingMode != DebugLightingMode.None || overrideSmoothness || overrideAlbedo || overrideNormal || overrideSpecularColor;
}
public bool IsDebugDisplayRemovePostprocess()
{
return debugLightingMode != DebugLightingMode.None;
}
public DebugLightingMode debugLightingMode = DebugLightingMode.None;

26
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs


private Texture2DAtlas m_Atlas = null;
public bool m_AllocationSuccess = true;
public bool m_PrevAllocationSuccess = true;
public Texture2DAtlas Atlas
{

{
if (m_NumResults == 0)
return;
// only add if anything in this decal set is visible.
AddToTextureList(ref instance.m_TextureList);
int instanceCount = 0;
int batchCount = 0;
Matrix4x4[] decalToWorldBatch = null;

textureList.Add(m_Mask);
}
}
public void RenderIntoDBuffer(CommandBuffer cmd)
{

// updates textures, texture atlas indices and blend value
public void UpdateCachedMaterialData()
{
//instance.m_AllocationSuccess = true;
pair.Value.InitializeMaterialValues();
pair.Value.AddToTextureList(ref m_TextureList);
pair.Value.InitializeMaterialValues();
}
}

AddTexture(cmd, textureScaleBias);
}
if(!m_AllocationSuccess) // still failed to allocate, decal atlas size needs to increase
if(!m_AllocationSuccess && m_PrevAllocationSuccess) // still failed to allocate, decal atlas size needs to increase, debounce so that we don't spam the console with warnings
m_PrevAllocationSuccess = m_AllocationSuccess;
}
public void CreateDrawData()

// set to null so that they get recreated
m_DecalMesh = null;
m_Atlas = null;
}
public void RenderDebugOverlay(HDCamera hdCamera, CommandBuffer cmd, DebugDisplaySettings debugDisplaySettings, ref float x, ref float y, float overlaySize, float width)
{
if(debugDisplaySettings.decalsDebugSettings.m_DisplayAtlas)
{
using (new ProfilingSample(cmd, "Display Decal Atlas", CustomSamplerId.DisplayDebugDecalsAtlas.GetSampler()))
{
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1,1,0,0), new Vector4(width / hdCamera.actualWidth, overlaySize / hdCamera.actualHeight, x / hdCamera.actualWidth, y / hdCamera.actualHeight), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}
}
}
}

278
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Material/StackLit/StackLitUI.cs


protected static class Styles
{
public static string InputsText = "Inputs";
public static GUIContent doubleSidedNormalModeText = new GUIContent("Normal mode", "This will modify the normal base on the selected mode. Mirror: Mirror the normal with vertex normal plane, Flip: Flip the normal");
// Scalar scale factors for: metallic and the two lobe perceptual smoothness.
public static GUIContent metallicText = new GUIContent("Metallic", "Metallic scale factor");
public static GUIContent smoothnessAText = new GUIContent("Primary Lobe Smoothness", "Primary lobe smoothness scale factor");
public static GUIContent smoothnessBText = new GUIContent("Secondary Lobe Smoothness", "Secondary lobe smoothness scale factor");
public static GUIContent lobeMixText = new GUIContent("Lobe Mixing", "Lobe mixing factor");
public static GUIContent smoothnessARemappingText = new GUIContent("Primary Lobe Smoothness Remapping", "Primary lobe smoothness remapping");
public static GUIContent smoothnessBRemappingText = new GUIContent("Secondary Lobe Smoothness Remapping", "Secondary lobe smoothness remapping");
public static GUIContent maskMapASText = new GUIContent("Primary mask map - M(R), AO(G), D(B), S1(A)", "Primary mask map");
public static GUIContent maskMapBSText = new GUIContent("Secondary mask Map - (R), (G), (B), S2(A)", "Secondary mask map");
public static GUIContent normalMapText = new GUIContent("Normal Map", "Normal Map (BC7/BC5/DXT5(nm))");
public static GUIContent UVBaseMappingText = new GUIContent("UV mapping usage", "");
// Emissive
public static string emissiveLabelText = "Emissive Inputs";
public enum DoubleSidedNormalMode
{
Flip,
Mirror,
None
}
public enum UVBaseMapping
{
UV0,
UV1,
UV2,
UV3,
Planar,
Triplanar
}
protected MaterialProperty doubleSidedNormalMode = null;
protected const string kDoubleSidedNormalMode = "_DoubleSidedNormalMode";
// Example UV mapping mask, TODO: could have for multiple maps, and channel mask for scalars
protected MaterialProperty UVBase = null;
protected const string kUVBase = "_UVBase";
protected MaterialProperty UVMappingMask = null;
protected const string kUVMappingMask = "_UVMappingMask"; // hidden, see enum material property drawer in .shader
protected MaterialProperty baseColor = null;
protected const string kBaseColor = "_BaseColor";

protected const string kMetallic = "_Metallic";
protected MaterialProperty metallic = null;
// Primary lobe smoothness
protected MaterialProperty smoothnessA = null;
protected const string kSmoothnessA = "_SmoothnessA";
protected MaterialProperty smoothnessARemapMin = null;
protected const string kSmoothnessARemapMin = "_SmoothnessARemapMin";
protected MaterialProperty smoothnessARemapMax = null;
protected const string kSmoothnessARemapMax = "_SmoothnessARemapMax";
protected const string klobeMix = "_LobeMix";
protected MaterialProperty lobeMix = null;
// Secondary lobe smoothness
protected MaterialProperty smoothnessB = null;
protected const string kSmoothnessB = "_SmoothnessB";
protected MaterialProperty smoothnessBRemapMin = null;
protected const string kSmoothnessBRemapMin = "_SmoothnessBRemapMin";
protected MaterialProperty smoothnessBRemapMax = null;
protected const string kSmoothnessBRemapMax = "_SmoothnessBRemapMax";
// Two mask maps for the two smoothnesses
protected MaterialProperty maskMapA = null;
protected const string kMaskMapA = "_MaskMapA";
protected MaterialProperty maskMapB = null;
protected const string kMaskMapB = "_MaskMapB";
protected MaterialProperty normalScale = null;
protected const string kNormalScale = "_NormalScale";
protected MaterialProperty normalMap = null;
protected const string kNormalMap = "_NormalMap";
protected MaterialProperty emissiveColor = null;
protected const string kEmissiveColor = "_EmissiveColor";
protected MaterialProperty emissiveColorMap = null;

protected MaterialProperty albedoAffectEmissive = null;
protected const string kAlbedoAffectEmissive = "_AlbedoAffectEmissive";
protected override void FindBaseMaterialProperties(MaterialProperty[] props)
{
base.FindBaseMaterialProperties(props);
doubleSidedNormalMode = FindProperty(kDoubleSidedNormalMode, props);
}
UVBase = FindProperty(kUVBase, props);
UVMappingMask = FindProperty(kUVMappingMask, props);
metallic = FindProperty(kMetallic, props);
smoothnessA = FindProperty(kSmoothnessA, props);
smoothnessARemapMin = FindProperty(kSmoothnessARemapMin, props);
smoothnessARemapMax = FindProperty(kSmoothnessARemapMax, props);
smoothnessB = FindProperty(kSmoothnessB, props);
smoothnessBRemapMin = FindProperty(kSmoothnessBRemapMin, props);
smoothnessBRemapMax = FindProperty(kSmoothnessBRemapMax, props);
lobeMix = FindProperty(klobeMix, props);
maskMapA = FindProperty(kMaskMapA, props);
maskMapB = FindProperty(kMaskMapB, props);
normalMap = FindProperty(kNormalMap, props);
normalScale = FindProperty(kNormalScale, props);
emissiveColor = FindProperty(kEmissiveColor, props);
emissiveColorMap = FindProperty(kEmissiveColorMap, props);
emissiveIntensity = FindProperty(kEmissiveIntensity, props);

protected override void BaseMaterialPropertiesGUI()
{
base.BaseMaterialPropertiesGUI();
EditorGUI.indentLevel++;
// This follow double sided option, see BaseUnlitUI.BaseMaterialPropertiesGUI()
// Don't put anything between base.BaseMaterialPropertiesGUI(); above and this:
if (doubleSidedEnable.floatValue > 0.0f)
{
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(doubleSidedNormalMode, Styles.doubleSidedNormalModeText);
EditorGUI.indentLevel--;
}
//TODO: m_MaterialEditor.ShaderProperty(enableMotionVectorForVertexAnimation, StylesBaseUnlit.enableMotionVectorForVertexAnimationText);
//refs to this ?
EditorGUI.indentLevel--;
}
EditorGUI.indentLevel++;
m_MaterialEditor.ShaderProperty(metallic, Styles.metallicText);
// maskMaps and smoothness rescaling controls:
if(maskMapA.textureValue == null)
{
m_MaterialEditor.ShaderProperty(smoothnessA, Styles.smoothnessAText);
}
else
{
float remapMin = smoothnessARemapMin.floatValue;
float remapMax = smoothnessARemapMax.floatValue;
EditorGUI.BeginChangeCheck();
EditorGUILayout.MinMaxSlider(Styles.smoothnessARemappingText, ref remapMin, ref remapMax, 0.0f, 1.0f);
if (EditorGUI.EndChangeCheck())
{
smoothnessARemapMin.floatValue = remapMin;
smoothnessARemapMax.floatValue = remapMax;
}
}
if(maskMapB.textureValue == null)
{
m_MaterialEditor.ShaderProperty(smoothnessB, Styles.smoothnessBText);
}
else
{
float remapMin = smoothnessBRemapMin.floatValue;
float remapMax = smoothnessBRemapMax.floatValue;
EditorGUI.BeginChangeCheck();
EditorGUILayout.MinMaxSlider(Styles.smoothnessBRemappingText, ref remapMin, ref remapMax, 0.0f, 1.0f);
if (EditorGUI.EndChangeCheck())
{
smoothnessBRemapMin.floatValue = remapMin;
smoothnessBRemapMax.floatValue = remapMax;
}
}
m_MaterialEditor.ShaderProperty(lobeMix, Styles.lobeMixText);
m_MaterialEditor.TexturePropertySingleLine(Styles.maskMapASText, maskMapA);
m_MaterialEditor.TexturePropertySingleLine(Styles.maskMapBSText, maskMapB);
// Normal map:
m_MaterialEditor.TexturePropertySingleLine(Styles.normalMapText, normalMap, normalScale);
// UV Mapping:
EditorGUILayout.Space();
EditorGUI.BeginChangeCheck(); // UV mapping selection
m_MaterialEditor.ShaderProperty(UVBase, Styles.UVBaseMappingText);
UVBaseMapping uvBaseMapping = (UVBaseMapping)UVBase.floatValue;
float X, Y, Z, W;
X = (uvBaseMapping == UVBaseMapping.UV0) ? 1.0f : 0.0f;
Y = (uvBaseMapping == UVBaseMapping.UV1) ? 1.0f : 0.0f;
Z = (uvBaseMapping == UVBaseMapping.UV2) ? 1.0f : 0.0f;
W = (uvBaseMapping == UVBaseMapping.UV3) ? 1.0f : 0.0f;
UVMappingMask.colorValue = new Color(X, Y, Z, W);
//TODO:
//if ((uvBaseMapping == UVBaseMapping.Planar) || (uvBaseMapping == UVBaseMapping.Triplanar))
//{
// m_MaterialEditor.ShaderProperty(TexWorldScale, Styles.texWorldScaleText);
//}
if (EditorGUI.EndChangeCheck()) // ...UV mapping selection
{
}
m_MaterialEditor.TexturePropertySingleLine(Styles.emissiveText, emissiveColorMap, emissiveColor);
m_MaterialEditor.TextureScaleOffsetProperty(emissiveColorMap);
m_MaterialEditor.ShaderProperty(emissiveIntensity, Styles.emissiveIntensityText);
m_MaterialEditor.ShaderProperty(albedoAffectEmissive, Styles.albedoAffectEmissiveText);
EditorGUI.indentLevel--; // inputs
EditorGUILayout.Space();
// Surface type:
var surfaceTypeValue = (SurfaceType)surfaceType.floatValue;
if (surfaceTypeValue == SurfaceType.Transparent)
{

--EditorGUI.indentLevel;
}
// TODO: see DoEmissiveGUI( ) in LitUI.cs: custom uvmapping for emissive
EditorGUILayout.Space();
EditorGUILayout.LabelField(Styles.emissiveLabelText, EditorStyles.boldLabel);
EditorGUI.indentLevel++;
m_MaterialEditor.TexturePropertySingleLine(Styles.emissiveText, emissiveColorMap, emissiveColor);
m_MaterialEditor.ShaderProperty(emissiveIntensity, Styles.emissiveIntensityText);
m_MaterialEditor.ShaderProperty(albedoAffectEmissive, Styles.albedoAffectEmissiveText);
EditorGUI.indentLevel--;
}
protected override void MaterialPropertiesAdvanceGUI(Material material)

// All Setup Keyword functions must be static. It allow to create script to automatically update the shaders with a script if code change
static public void SetupMaterialKeywordsAndPass(Material material)
{
//TODO see BaseLitUI.cs:SetupBaseLitKeywords (stencil etc)
bool doubleSidedEnable = material.GetFloat(kDoubleSidedEnable) > 0.0f;
if (doubleSidedEnable)
{
DoubleSidedNormalMode doubleSidedNormalMode = (DoubleSidedNormalMode)material.GetFloat(kDoubleSidedNormalMode);
switch (doubleSidedNormalMode)
{
case DoubleSidedNormalMode.Mirror: // Mirror mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(1.0f, 1.0f, -1.0f, 0.0f));
break;
case DoubleSidedNormalMode.Flip: // Flip mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(-1.0f, -1.0f, -1.0f, 0.0f));
break;
case DoubleSidedNormalMode.None: // None mode (in tangent space)
material.SetVector("_DoubleSidedConstants", new Vector4(1.0f, 1.0f, 1.0f, 0.0f));
break;
}
}
//NOTE: For SSS in forward and split lighting, obviously we don't have a gbuffer pass,
// so no stencil tagging there, but velocity? To check...
//TODO: stencil state, displacement, wind, depthoffset, tesselation
SetupMainTexForAlphaTestGI("_BaseColorMap", "_BaseColor", material);
//TODO: disable DBUFFER
CoreUtils.SetKeyword(material, "_NORMALMAP_TANGENT_SPACE", true);
CoreUtils.SetKeyword(material, "_NORMALMAP", material.GetTexture(kNormalMap));
CoreUtils.SetKeyword(material, "_MASKMAPA", material.GetTexture(kMaskMapA));
CoreUtils.SetKeyword(material, "_MASKMAPB", material.GetTexture(kMaskMapB));
bool needUV2 = (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV2;
bool needUV3 = (UVBaseMapping)material.GetFloat(kUVBase) == UVBaseMapping.UV3;
if (needUV3)
{
material.DisableKeyword("_REQUIRE_UV2");
material.EnableKeyword("_REQUIRE_UV3");
}
else if (needUV2)
{
material.EnableKeyword("_REQUIRE_UV2");
material.DisableKeyword("_REQUIRE_UV3");
}
else
{
material.DisableKeyword("_REQUIRE_UV2");
material.DisableKeyword("_REQUIRE_UV3");
}
CoreUtils.SetKeyword(material, "_EMISSIVE_COLOR_MAP", material.GetTexture(kEmissiveColorMap));
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/ExponentialFogEditor.cs


using System.Collections;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;

PropertyField(m_FogHeightAttenuation);
}
}
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDCustomSamplerId.cs


GBuffer,
DBufferRender,
DBufferPrepareDrawData,
DisplayDebugDecalsAtlas,
DisplayDebugViewMaterial,
DebugViewMaterialGBuffer,
BlitDebugViewMaterialDebug,

35
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


int m_CurrentHeight;
// Use to detect frame changes
int m_FrameCount;
int m_FrameCount;
public int GetDecalAtlasMipCount()
{
int highestDim = Math.Max(renderPipelineSettings.decalSettings.atlasWidth, renderPipelineSettings.decalSettings.atlasHeight);
return (int)Math.Log(highestDim, 2);
}
public int GetShadowSliceCount(uint atlasIndex) { return m_LightLoop.GetShadowSliceCount(atlasIndex); }
readonly SkyManager m_SkyManager = new SkyManager();

Mathf.Log(Mathf.Min(previousDepthPyramidRT.rt.width, previousDepthPyramidRT.rt.height), 2),
0.0f
));
}
}
var previousColorPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.ColorPyramid);
if (previousColorPyramidRT != null)

Mathf.Log(Mathf.Min(previousColorPyramidRT.rt.width, previousColorPyramidRT.rt.height), 2),
0.0f
));
}
}
}
}

}
if (newFrame)
{
HDCamera.CleanUnused();
{
HDCamera.CleanUnused();
// Make sure both are never 0.
m_LastTime = (m_Time > 0) ? m_Time : t;

// Frame settings state was updated by previous render, we must recalculate it
FrameSettings.InitializeFrameSettings(camera, m_Asset.GetRenderPipelineSettings(), srcFrameSettings, ref m_FrameSettings);
}
// Init material if needed
// TODO: this should be move outside of the camera loop but we have no command buffer, ask details to Tim or Julien to do this

var postProcessLayer = camera.GetComponent<PostProcessLayer>();
// Disable post process if we enable debug mode or if the post process layer is disabled
if (m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
if (m_CurrentDebugDisplaySettings.IsDebugDisplayRemovePostprocess() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
{
m_FrameSettings.enablePostprocess = false;
}

DecalSystem.instance.EndCull();
m_DbufferManager.vsibleDecalCount = DecalSystem.m_DecalsVisibleThisFrame;
DecalSystem.instance.UpdateCachedMaterialData(); // textures, alpha or fade distances could've changed
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible
DecalSystem.instance.UpdateTextureAtlas(cmd); // as this is only used for transparent pass, would've been nice not to have to do this if no transparent renderers are visible, needs to happen after CreateDrawData
}
}
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);

// Caution: We require sun light here as some skies use the sun light to render, it means that UpdateSkyEnvironment must be called after PrepareLightsForGPU.
// TODO: Try to arrange code so we can trigger this call earlier and use async compute here to run sky convolution during other passes (once we move convolution shader to compute).
UpdateSkyEnvironment(hdCamera, cmd);
RenderDepthPyramid(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
StopStereoRendering(renderContext, hdCamera.camera);

var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction);
if (debugSSTThisPass)
}
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingRefraction);
if (debugSSTThisPass)
{
cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);

}
}
}
}
// This is use to Display legacy shader with an error shader
[Conditional("DEVELOPMENT_BUILD"), Conditional("UNITY_EDITOR")]

m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer, cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
}
void RenderDepthPyramid(HDCamera hdCamera, CommandBuffer cmd, ScriptableRenderContext renderContext, FullScreenDebugMode debugMode)

m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture(), cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(hdCamera), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
}
void RenderPostProcess(HDCamera hdcamera, CommandBuffer cmd, PostProcessLayer layer)

}
m_LightLoop.RenderDebugOverlay(hdCamera, cmd, m_CurrentDebugDisplaySettings, ref x, ref y, overlaySize, hdCamera.actualWidth);
DecalSystem.instance.RenderDebugOverlay(hdCamera, cmd, m_CurrentDebugDisplaySettings, ref x, ref y, overlaySize, hdCamera.actualWidth);
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Source4 = Shader.PropertyToID("_Source4");
public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");

public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeProperties = Shader.PropertyToID("_VolumeProperties");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs


public float weight;
public float multiplier;
public Vector3 sampleDirectionDiscardWS;
// Sampling properties
public int envIndex;
};

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightDefinition.cs.hlsl


float3 boxSideFadeNegative;
float weight;
float multiplier;
float3 sampleDirectionDiscardWS;
int envIndex;
};

float GetMultiplier(EnvLightData value)
{
return value.multiplier;
}
float3 GetSampleDirectionDiscardWS(EnvLightData value)
{
return value.sampleDirectionDiscardWS;
}
int GetEnvIndex(EnvLightData value)
{

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


var capturePosition = Vector3.zero;
var influenceToWorld = probe.influenceToWorld;
var sampleDirectionDiscardWS = Vector3.zero;
// 31 bits index, 1 bit cache type
var envIndex = -1;
if (probe.planarReflectionProbe != null)

// We transform it to object space by translating the capturePosition
var vp = gpuProj * gpuView * Matrix4x4.Translate(capturePosition);
m_Env2DCaptureVP[fetchIndex] = vp;
sampleDirectionDiscardWS = captureRotation * Vector3.forward;
}
else if (probe.reflectionProbe != null)
{

envLightData.blendDistanceNegative = probe.blendDistanceNegative;
envLightData.boxSideFadePositive = probe.boxSideFadePositive;
envLightData.boxSideFadeNegative = probe.boxSideFadeNegative;
envLightData.sampleDirectionDiscardWS = sampleDirectionDiscardWS;
envLightData.influenceRight = influenceToWorld.GetColumn(0).normalized;
envLightData.influenceUp = influenceToWorld.GetColumn(1).normalized;

m_lightList.bounds.AddRange(m_lightList.rightEyeBounds);
m_lightList.lightVolumes.AddRange(m_lightList.rightEyeLightVolumes);
}
UpdateDataBuffers();
return m_enableBakeShadowMask;

// XRTODO: If possible, we could generate a non-oblique stereo projection
// matrix. It's ok if it's not the exact same matrix, as long as it encompasses
// the same FOV as the original projection matrix (which would mean padding each half
// of the frustum with the max half-angle). We don't need the light information in
// of the frustum with the max half-angle). We don't need the light information in
// real projection space. We just use screen space to figure out what is proximal
// to a cluster or tile.
// Once we generate this non-oblique projection matrix, it can be shared across both eyes (un-array)

37
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


[AddComponentMenu("Rendering/Homogeneous Density Volume", 1100)]
public class HomogeneousDensityVolume : MonoBehaviour
{
public DensityVolumeParameters parameters = new DensityVolumeParameters();
public DensityVolumeParameters parameters;
public HomogeneousDensityVolume()
{
parameters.albedo = new Color(0.5f, 0.5f, 0.5f);
parameters.meanFreePath = 10.0f;
parameters.asymmetry = 0.0f;
}
private void Awake()
{

void OnDrawGizmos()
{
if (parameters.IsLocalVolume())
{
Gizmos.color = parameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousDensityVolume GetGlobalHomogeneousDensityVolume()
{
HomogeneousDensityVolume globalVolume = null;
HomogeneousDensityVolume[] volumes = FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
if (volume.enabled && !volume.parameters.IsLocalVolume())
{
globalVolume = volume;
break;
}
}
return globalVolume;
Gizmos.color = parameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

187
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl


// Interpolation in the log space is non-linear.
// Therefore, given 'logEncodedDepth', we compute a new depth value
// which allows us to perform HW interpolation which is linear in the view space.
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth,
float ComputeLerpPositionForLogEncoding(float linearDepth,
float logEncodedDepth,
float2 VBufferSliceCount,
float4 VBufferDepthDecodingParams)
{

float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float s0 = floor(d * numSlices - 0.5);
float s1 = ceil(d * numSlices - 0.5);
float s = d * numSlices - 0.5;
float s0 = floor(s);
float s1 = ceil(s);
float d0 = saturate(s0 * rcpNumSlices + (0.5 * rcpNumSlices));
float d1 = saturate(s1 * rcpNumSlices + (0.5 * rcpNumSlices));
float z0 = DecodeLogarithmicDepthGeneralized(d0, VBufferDepthDecodingParams);

return d0 + t * rcpNumSlices;
}
// Performs trilinear reconstruction of the V-Buffer.
// If (clampToEdge == false), out-of-bounds loads return 0.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge,
float2 positionNDC, float linearDepth,
// if (correctLinearInterpolation), we use ComputeLerpPositionForLogEncoding() to correct weighting
// of both slices at the cost of extra ALUs.
//
// if (quadraticFilterXY), we perform biquadratic (3x3) reconstruction for each slice to reduce
// aliasing at the cost of extra ALUs and bandwidth.
// Warning: you MUST pass a linear sampler in order for the quadratic filter to work.
//
// Note: for correct filtering, the data has to be stored in the perceptual space.
// This means storing tone mapped radiance and transmittance instead of optical depth.
// See "A Fresh Look at Generalized Sampling", p. 51.
//
// if (clampToBorder), samples outside of the buffer return 0 (we perform a smooth fade).
// Otherwise, the sampler simply clamps the texture coordinate to the edge of the texture.
// Warning: clamping to border may not work as expected with the quadratic filter due to its extent.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBuffer, clampSampler),
float2 positionNDC,
float linearDepth,
float4 VBufferResolution,
float4 VBufferDepthDecodingParams)
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY,
bool clampToBorder)
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float w;
// Unity doesn't support samplers clamping to border, so we have to do it ourselves.
// TODO: add the proper sampler support.
bool isInBounds = Min3(uv.x, uv.y, d) > 0 && Max3(uv.x, uv.y, d) < 1;
UNITY_BRANCH if (clampToEdge || isInBounds)
if (correctLinearInterpolation)
#if 1
// Adjust the texture coordinate for HW linear filtering.
w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
}
else
{
float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
w = d;
}
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0);
float fadeWeight = 1;
if (clampToBorder)
{
// Compute the distance to the edge, and remap it to the [0, 1] range.
// TODO: add support for the HW border clamp sampler.
float weightU = saturate((1 - 2 * abs(uv.x - 0.5)) * VBufferResolution.x);
float weightV = saturate((1 - 2 * abs(uv.y - 0.5)) * VBufferResolution.y);
float weightW = saturate((1 - 2 * abs(w - 0.5)) * VBufferSliceCount.x);
fadeWeight = weightU * weightV * weightW;
else
float4 result = 0;
if (fadeWeight > 0)
return 0;
}
}
if (quadraticFilterXY)
{
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);
// Returns interpolated {volumetric radiance, transmittance}. The sampler clamps to edge.
float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler),
float2 positionNDC, float linearDepth,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams)
{
#ifdef RECONSTRUCTION_FILTER_TRILINEAR
float4 L = SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, trilinearSampler), true,
positionNDC, linearDepth,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams);
#else // Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
float2 uv = positionNDC;
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);
float2 weights[2], offsets[2];
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
// The distance between slices is log-encoded.
float z = linearDepth;
float d = EncodeLogarithmicDepthGeneralized(z, VBufferDepthEncodingParams);
result = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * VBufferResolution.zw, w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * VBufferResolution.zw, w), 0) // Top right
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * VBufferResolution.zw, w), 0) // Bottom left
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * VBufferResolution.zw, w), 0); // Bottom right
}
else
{
result = SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3(uv, w), 0);
}
#if 0
// Ignore non-linearity (for performance reasons) at the cost of accuracy.
// The results are exact for a stationary camera, but can potentially cause some judder in motion.
float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
result *= fadeWeight;
}
float2 weights[2], offsets[2];
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
return result;
}
float2 rcpRes = VBufferResolution.zw;
float4 SampleVBuffer(TEXTURE3D_ARGS(VBuffer, clampSampler),
float3 positionWS,
float4x4 viewProjMatrix,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY,
bool clampToBorder)
{
float2 positionNDC = ComputeNormalizedDeviceCoordinates(positionWS, viewProjMatrix);
float linearDepth = mul(viewProjMatrix, float4(positionWS, 1)).w;
// TODO: reconstruction should be performed in the perceptual space (e.i., after tone mapping).
// But our VBuffer is linear. How to achieve that?
// See "A Fresh Look at Generalized Sampling", p. 51.
float4 L = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * rcpRes, w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * rcpRes, w), 0) // Top right
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * rcpRes, w), 0) // Bottom left
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * rcpRes, w), 0); // Bottom right
#endif
return SampleVBuffer(TEXTURE3D_PARAM(VBuffer, clampSampler),
positionNDC,
linearDepth,
VBufferResolution,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams,
correctLinearInterpolation,
quadraticFilterXY,
clampToBorder);
}
// TODO: add some animated noise to the reconstructed radiance.
return float4(L.rgb, Transmittance(L.a));
// Returns interpolated {volumetric radiance, transmittance}.
float4 SampleVolumetricLighting(TEXTURE3D_ARGS(VBufferLighting, clampSampler),
float2 positionNDC,
float linearDepth,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY)
{
// TODO: add some slowly animated noise to the reconstructed value.
return FastTonemapInvert(SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, clampSampler),
positionNDC,
linearDepth,
VBufferResolution,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams,
correctLinearInterpolation,
quadraticFilterXY,
false));
}
#endif // UNITY_VBUFFER_INCLUDED

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumeVoxelization.compute


//--------------------------------------------------------------------------------------------------
StructuredBuffer<OrientedBBox> _VolumeBounds;
StructuredBuffer<DensityVolumeProperties> _VolumeProperties;
StructuredBuffer<DensityVolumeProperties> _VolumeData;
RW_TEXTURE3D(float4, _VBufferDensity); // RGB = sqrt(scattering), A = sqrt(extinction)

if (overlapFraction > 0)
{
// There is an overlap. Sample the 3D texture, or load the constant value.
voxelScattering += overlapFraction * _VolumeProperties[volumeIndex].scattering;
voxelExtinction += overlapFraction * _VolumeProperties[volumeIndex].extinction;
voxelScattering += overlapFraction * _VolumeData[volumeIndex].scattering;
voxelExtinction += overlapFraction * _VolumeData[volumeIndex].extinction;
}
#ifndef USE_CLUSTERED_LIGHTLIST

160
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute


#define VBUFFER_SLICE_COUNT 128
#endif
#define SUPPORT_ASYMMETRY 1 // Support asymmetric phase functions
#define SUPPORT_PUNCTUAL_LIGHTS 1 // Punctual lights contribute to fog lighting
#define GROUP_SIZE_1D 8

//--------------------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Color.hlsl"
#include "CoreRP/ShaderLibrary/Filtering.hlsl"
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"

// Implementation
//--------------------------------------------------------------------------------------------------
// A ray with a single origin and two directions:
// one pointing at the center of the voxel, and one jittered in screen space.
float3 strataDirWS; // Normalized, tile-stratified
float3 centerDirWS; // Normalized, tile-centered
float strataDirInvViewZ; // 1 / ViewSpace(strataDirWS).z
float twoDirRatioViewZ; // ViewSpace(strataDirWS).z / ViewSpace(centerDirWS).z
float3 jitterDirWS; // Normalized, voxel-jittered in the screen space
float3 centerDirWS; // Normalized, voxel-centered in the screen space
float jitterDirInvViewZ; // 1 / ViewSpace(jitterDirWS).z
float twoDirRatioViewZ; // ViewSpace(jitterDirWS).z / ViewSpace(centerDirWS).z
// Returns a point along the stratified direction.
float ConvertLinearDepthToJitterRayDist(DualRay ray, float z)
{
return z * ray.jitterDirInvViewZ;
}
float ConvertJitterDistToCenterRayDist(DualRay ray, float t)
{
return t * ray.twoDirRatioViewZ;
}
// Returns a point along the jittered direction.
return ray.originWS + t * ray.strataDirWS;
return ray.originWS + t * ray.jitterDirWS;
// Returns a point along the centered direction. It has a special property:
// ViewSpace(GetPointAtDistance(ray, t)).z = ViewSpace(GetCenterAtDistance(ray, t)).z,
// e.i. both points computed from the same value of 't' reside on the same Z-plane in the view space.
float3 GetCenterAtDistance(DualRay ray, float t)
// Returns a point along the centered direction.
float3 GetCenterAtDistance(DualRay ray, float s)
t *= ray.twoDirRatioViewZ; // Perform the Z-coordinate conversion
return ray.originWS + t * ray.centerDirWS;
return ray.originWS + s * ray.centerDirWS;
}
struct VoxelLighting

color, attenuation);
// Important:
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

float3 coneAxisX = lenMul * light.right;
float3 coneAxisY = lenMul * light.up;
sampleLight = IntersectRayCone(ray.originWS, ray.strataDirWS,
sampleLight = IntersectRayCone(ray.originWS, ray.jitterDirWS,
light.positionWS, light.forward,
coneAxisX, coneAxisY,
t0, t1, tEntr, tExit);

float t, distSq, rcpPdf;
ImportanceSamplePunctualLight(rndVal, light.positionWS,
ray.originWS, ray.strataDirWS,
ray.originWS, ray.jitterDirWS,
tEntr, tExit, t, distSq, rcpPdf,
hackMinDistSq);

float3 L = -lightToSample * distRcp;
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, lightToSample,
distances, color, attenuation);
EvaluateLight_Punctual(context, posInput, light, unused,
0, L, lightToSample, distances, color, attenuation);
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

float3x3 rotMat = float3x3(light.right, light.up, light.forward);
float3 o = mul(rotMat, ray.originWS - light.positionWS);
float3 d = mul(rotMat, ray.strataDirWS);
float3 d = mul(rotMat, ray.jitterDirWS);
float range = light.size.x;
float3 boxPt0 = float3(-1, -1, 0);

float4 distances = float4(1, 1, 1, distProj);
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, lightToSample,
distances, color, attenuation);
EvaluateLight_Punctual(context, posInput, light, unused,
0, L, lightToSample, distances, color, attenuation);
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

void FillVolumetricLightingBuffer(LightLoopContext context, uint featureFlags,
PositionInputs posInput, DualRay ray)
{
float n = _VBufferDepthDecodingParams.x + _VBufferDepthDecodingParams.z;
float z0 = n; // Start integration from the near plane
float t0 = ray.strataDirInvViewZ * z0; // Convert view space Z to distance along the stratified ray
float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices
const float n = _VBufferDepthDecodingParams.x + _VBufferDepthDecodingParams.z;
const float z0 = n; // Start integration from the near plane
const float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices
float t0 = ConvertLinearDepthToJitterRayDist(ray, z0);
// The contribution of the ambient probe does not depend on the position,
// only on the direction and the length of the interval.

uint3 voxelCoord = uint3(posInput.positionSS, slice);
float e1 = slice * de + de; // (slice + 1) / sliceCount
#if defined(SHADER_API_METAL)
// Warning: this compiles, but it's nonsense. Use DecodeLogarithmicDepthGeneralized().
float z1 = DecodeLogarithmicDepth(e1, _VBufferDepthDecodingParams);
#else
#endif
float t1 = ray.strataDirInvViewZ * z1; // Convert view space Z to distance along the stratified ray
float t1 = ConvertLinearDepthToJitterRayDist(ray, z1);
float dt = t1 - t0;
#ifdef USE_CLUSTERED_LIGHTLIST

// Compute the -exact- position of the center of the voxel.
// It's important since the accumulated value of the integral is stored at the center.
// We will use it for participating media sampling, asymmetric scattering and reprojection.
float tc = t0 + 0.5 * dt;
float3 centerWS = GetCenterAtDistance(ray, tc);
float s = ConvertJitterDistToCenterRayDist(ray, t0 + 0.5 * dt);
float3 centerWS = GetCenterAtDistance(ray, s);
// Sample the participating medium at 'tc' (or 'centerWS').
// Sample the participating medium at the center of the voxel.
// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = LOAD_TEXTURE3D(_VBufferDensity, voxelCoord).rgb;

);
#endif
#if ENABLE_REPROJECTION
float2 reprojPosNDC = ComputeNormalizedDeviceCoordinates(centerWS, _PrevViewProjMatrix);
float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w;
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler),
false, reprojPosNDC, reprojZ,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_linear_clamp_sampler),
centerWS,
_PrevViewProjMatrix,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
false, false, true);
// Compute the exponential moving average over 'n' frames:
// X = (1 - a) * ValueAtFrame[n] + a * AverageOverPreviousFrames.

// TODO: doesn't seem to be worth it, removed for now.
// Perform temporal blending.
// Both radiance values are obtained by integrating over line segments of different length.
// Blending only makes sense if the length of both intervals is the same.
// Therefore, the reprojected radiance needs to be rescaled by (frame_dt / reproj_dt).
// Both radiance values are obtained by integrating over line segments of different length,
// with potentially different participating media coverage.
// Blending only makes sense if the voxels are virtually identical.
// Therefore, we need to rescale the history to make it match the current configuration.
// In order to do that, we integrate transmittance over the length of the ray interval
// passing through the center of the voxel. The integral can be interpreted as the amount of
// isotropically in-scattered radiance from a directional light with unit intensity.
// We ignore jittering, as we want values from the same voxel to be reporojected without
// any rescaling.
float ds = ConvertJitterDistToCenterRayDist(ray, dt);
float centerTransmInt = TransmittanceIntegralHomogeneousMedium(extinction, ds);
float reprojRcpLen = reprojSuccess ? rcp(reprojValue.a) : 0;
float lengthScale = dt * reprojRcpLen;
float reprojScale = reprojSuccess ? (centerTransmInt * rcp(reprojValue.a)) : 0;
float3 blendedRadiance = (1 - blendFactor) * lighting.radianceNoPhase + blendFactor * lengthScale * reprojRadiance;
float3 blendedRadiance = (1 - blendFactor) * lighting.radianceNoPhase + blendFactor * reprojScale * reprojRadiance;
// Store the feedback for the voxel.
// TODO: dynamic lights (which update their position, rotation, cookie or shadow at runtime)

_VBufferLightingFeedback[voxelCoord] = float4(blendedRadiance, dt);
_VBufferLightingFeedback[voxelCoord] = float4(blendedRadiance, centerTransmInt);
#if SUPPORT_ASYMMETRY
#endif
#if SUPPORT_ASYMMETRY
#else
float3 blendedRadiance = lighting.radianceNoPhase;
#endif
#if SUPPORT_ASYMMETRY
float phase = _CornetteShanksConstant;
#else
float phase = IsotropicPhaseFunction();
#endif
float4 integral = float4(totalRadiance, opticalDepth);
float phase = _CornetteShanksConstant;
// Integrate the contribution of the probe over the interval.
// Integral{a, b}{Transmittance(0, t) * L_s(t) dt} = Transmittance(0, a) * Integral{a, b}{Transmittance(0, t - a) * L_s(t) dt}.

opticalDepth += 0.5 * extinction * dt;
// Store the voxel data.
_VBufferLightingIntegral[voxelCoord] = float4(totalRadiance, opticalDepth);
// Note: for correct filtering, the data has to be stored in the perceptual space.
// This means storing the tone mapped radiance and transmittance instead of optical depth.
// See "A Fresh Look at Generalized Sampling", p. 51.
_VBufferLightingIntegral[voxelCoord] = float4(FastTonemap(totalRadiance), Transmittance(opticalDepth));
z0 = z1;
t0 = t1;
#ifdef USE_CLUSTERED_LIGHTLIST

float2 centerCoord = voxelCoord + float2(0.5, 0.5);
#if ENABLE_REPROJECTION
float2 strataCoord = centerCoord + _VBufferSampleOffset.xy;
float2 jitterCoord = centerCoord + _VBufferSampleOffset.xy;
float2 strataCoord = centerCoord;
float2 jitterCoord = centerCoord;
// Compute the (tile-centered) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
// Compute the (voxel-centered in the screen space) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
// Compute the (tile-stratified) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
float3 strataDirWS = mul(-float3(strataCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
float strataDirLenSq = dot(strataDirWS, strataDirWS);
float strataDirLenRcp = rsqrt(strataDirLenSq);
float strataDirLen = strataDirLenSq * strataDirLenRcp;
// Compute the (voxel-jittered in the screen space) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
float3 jitterDirWS = mul(-float3(jitterCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
float jitterDirLenSq = dot(jitterDirWS, jitterDirWS);
float jitterDirLenRcp = rsqrt(jitterDirLenSq);
float jitterDirLen = jitterDirLenSq * jitterDirLenRcp;
ray.strataDirWS = strataDirWS * strataDirLenRcp; // Normalize
ray.jitterDirWS = jitterDirWS * jitterDirLenRcp; // Normalize
ray.strataDirInvViewZ = strataDirLen; // View space Z
ray.twoDirRatioViewZ = centerDirLen * strataDirLenRcp; // View space Z ratio
ray.jitterDirInvViewZ = jitterDirLen; // View space Z
ray.twoDirRatioViewZ = centerDirLen * jitterDirLenRcp; // View space Z ratio
// TODO
LightLoopContext context;

167
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[GenerateHLSL]
public struct DensityVolumeProperties
public struct DensityVolumeData
public static DensityVolumeProperties GetNeutralProperties()
public static DensityVolumeData GetNeutralValues()
DensityVolumeProperties properties = new DensityVolumeProperties();
DensityVolumeData data;
properties.scattering = Vector3.zero;
properties.extinction = 0;
data.scattering = Vector3.zero;
data.extinction = 0;
return properties;
return data;
[Serializable]
public class DensityVolumeParameters
public class VolumeRenderingUtils
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public DensityVolumeParameters()
public static float MeanFreePathFromExtinction(float extinction)
isLocal = true;
albedo = new Color(0.5f, 0.5f, 0.5f);
meanFreePath = 10.0f;
asymmetry = 0.0f;
return 1.0f / extinction;
public bool IsLocalVolume()
public static float ExtinctionFromMeanFreePath(float meanFreePath)
return isLocal;
return 1.0f / meanFreePath;
public Vector3 GetAbsorptionCoefficient()
public static Vector3 AbsorptionFromExtinctionAndScattering(float extinction, Vector3 scattering)
float extinction = GetExtinctionCoefficient();
Vector3 scattering = GetScatteringCoefficient();
return Vector3.Max(new Vector3(extinction, extinction, extinction) - scattering, Vector3.zero);
return new Vector3(extinction, extinction, extinction) - scattering;
public Vector3 GetScatteringCoefficient()
public static Vector3 ScatteringFromExtinctionAndAlbedo(float extinction, Vector3 albedo)
float extinction = GetExtinctionCoefficient();
return new Vector3(albedo.r * extinction, albedo.g * extinction, albedo.b * extinction);
return extinction * albedo;
public float GetExtinctionCoefficient()
public static Vector3 AlbedoFromMeanFreePathAndScattering(float meanFreePath, Vector3 scattering)
return 1.0f / meanFreePath;
return meanFreePath * scattering;
}
[Serializable]
public struct DensityVolumeParameters
{
public Color albedo; // Single scattering albedo [0, 1]. Alpha is ignored
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public void Constrain()
{

albedo.a = 1.0f;
meanFreePath = Mathf.Max(meanFreePath, 1.0f);
meanFreePath = Mathf.Clamp(meanFreePath, 1.0f, float.MaxValue);
public DensityVolumeProperties GetProperties()
public DensityVolumeData GetData()
DensityVolumeProperties properties = new DensityVolumeProperties();
DensityVolumeData data = new DensityVolumeData();
properties.scattering = GetScatteringCoefficient();
properties.extinction = GetExtinctionCoefficient();
data.extinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(meanFreePath);
data.scattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(data.extinction, (Vector3)(Vector4)albedo);
return properties;
return data;
public List<OrientedBBox> bounds;
public List<DensityVolumeProperties> properties;
public List<OrientedBBox> bounds;
public List<DensityVolumeData> density;
}
public class VolumetricLightingSystem

ComputeShader m_VolumeVoxelizationCS = null;
ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
static ComputeBuffer s_VisibleVolumePropertiesBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection

{
if (preset == VolumetricLightingPreset.Off) return;
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumeBounds = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<DensityVolumeProperties>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeProperties)));
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumeBounds = new List<OrientedBBox>();
m_VisibleVolumeData = new List<DensityVolumeData>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumeDataBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeData)));
}
public void Cleanup()

m_VBuffers[i].Destroy();
}
m_VBuffers = null;
m_VisibleVolumeBounds = null;
m_VisibleVolumeProperties = null;
m_VBuffers = null;
m_VisibleVolumeBounds = null;
m_VisibleVolumeData = null;
CoreUtils.SafeRelease(s_VisibleVolumePropertiesBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)

{
if (preset == VolumetricLightingPreset.Off) return;
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
// TODO: may want to cache these results somewhere.
DensityVolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.parameters.GetProperties()
: DensityVolumeProperties.GetNeutralProperties();
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, globalVolumeProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, globalVolumeProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
// Modify the near plane.
// Warning: it can screw up the reprojection. However, we have to do it in order for clustered lighting to work correctly.
m_VBufferNearPlane = camera.camera.nearClipPlane;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);

SetPreconvolvedAmbientLightProbe(cmd, asymmetry);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
SetPreconvolvedAmbientLightProbe(cmd, fog.asymmetry);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));

if (preset == VolumetricLightingPreset.Off) return densityVolumes;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{
Vector3 camPosition = camera.camera.transform.position;

}
m_VisibleVolumeBounds.Clear();
m_VisibleVolumeProperties.Clear();
m_VisibleVolumeData.Clear();
// Collect all visible finite volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];

HomogeneousDensityVolume volume = volumes[i];
// Only test active finite volumes.
if (volume.enabled && volume.parameters.IsLocalVolume())
if (volume.enabled)
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);

if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var properties = volume.parameters.GetProperties();
var data = volume.parameters.GetData();
m_VisibleVolumeProperties.Add(properties);
m_VisibleVolumeData.Add(data);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
s_VisibleVolumeDataBuffer.SetData(m_VisibleVolumeData);
densityVolumes.properties = m_VisibleVolumeProperties;
densityVolumes.density = m_VisibleVolumeData;
return densityVolumes;
}

float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeProperties, s_VisibleVolumePropertiesBuffer);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
// TODO: set the constant buffer data only once.
cmd.SetComputeMatrixParam( m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);

using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
if (globalVolume == null)
if (visualEnvironment.fogType != FogType.Volumetric)
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingIntegralBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingFeedbackBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;

int sampleIndex = rfc % 7;
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(asymmetry));
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.asymmetry));
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer()); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
if (enableReprojection)

38
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


bsdfData.thickness = max(thickness, 0.0001);
}
// For image based lighting, a part of the BSDF is pre-integrated.
// This is done both for specular and diffuse (in case of DisneyDiffuse)
void GetPreIntegratedFGD(float NdotV, float perceptualRoughness, float3 fresnel0, out float3 specularFGD, out float diffuseFGD, out float reflectivity)
{
// Pre-integrate GGX FGD
// Integral{BSDF * <N,L> dw} =
// Integral{(F0 + (1 - F0) * (1 - <V,H>)^5) * (BSDF / F) * <N,L> dw} =
// (1 - F0) * Integral{(1 - <V,H>)^5 * (BSDF / F) * <N,L> dw} + F0 * Integral{(BSDF / F) * <N,L> dw}=
// (1 - F0) * x + F0 * y = lerp(x, y, F0)
// Pre integrate DisneyDiffuse FGD:
// z = DisneyDiffuse
float3 preFGD = SAMPLE_TEXTURE2D_LOD(_PreIntegratedFGD, s_linear_clamp_sampler, float2(NdotV, perceptualRoughness), 0).xyz;
specularFGD = lerp(preFGD.xxx, preFGD.yyy, fresnel0);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
diffuseFGD = 1.0;
#else
// Remap from the [0, 1] to the [0.5, 1.5] range.
diffuseFGD = preFGD.z + 0.5;
#endif
reflectivity = preFGD.y;
}
// This function is use to help with debugging and must be implemented by any lit material
// Implementer must take into account what are the current override component and
// adjust SurfaceData properties accordingdly

// IBL
// Handle IBL + multiscattering
float reflectivity;
GetPreIntegratedFGD(NdotV, preLightData.iblPerceptualRoughness, bsdfData.fresnel0, preLightData.specularFGD, preLightData.diffuseFGD, reflectivity);
float specularReflectivity;
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness, bsdfData.fresnel0, preLightData.specularFGD, preLightData.diffuseFGD, specularReflectivity);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
preLightData.diffuseFGD = 1.0;
#endif
iblR = reflect(-V, iblN);
// This is a ad-hoc tweak to better match reference of anisotropic GGX.

// We deem the intensity difference of a couple of percent for high values of roughness
// to not be worth the cost of another precomputed table.
// Note: this formulation bakes the BSDF non-symmetric!
preLightData.energyCompensation = 1.0 / reflectivity - 1.0;
preLightData.energyCompensation = 1.0 / specularReflectivity - 1.0;
#else
preLightData.energyCompensation = 0.0;
#endif // LIT_USE_GGX_ENERGY_COMPENSATION

// Formula is empirical.
float roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness);
R = lerp(R, preLightData.iblR, saturate(smoothstep(0, 1, roughness * roughness)));
float3 sampleDirectionDiscardWS = lightData.sampleDirectionDiscardWS;
if (dot(sampleDirectionDiscardWS, R) < 0) // Use by planar reflection to early reject opposite plan reflection, neutral for reflection probe
return lighting;
float3 F = preLightData.specularFGD;
float iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness);

22
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/PreIntegratedFGD/PreIntegratedFGD.hlsl


TEXTURE2D(_PreIntegratedFGD);
// For image based lighting, a part of the BSDF is pre-integrated.
// This is done both for specular GGX height-correlated and DisneyDiffuse
// reflectivity is Integral{(BSDF_GGX / F) - use for multiscattering
void GetPreIntegratedFGDGGXAndDisneyDiffuse(float NdotV, float perceptualRoughness, float3 fresnel0, out float3 GGXSpecularFGD, out float disneyDiffuseFGD, out float reflectivity)
{
float3 preFGD = SAMPLE_TEXTURE2D_LOD(_PreIntegratedFGD, s_linear_clamp_sampler, float2(NdotV, perceptualRoughness), 0).xyz;
// Pre-integrate GGX FGD
// Integral{BSDF * <N,L> dw} =
// Integral{(F0 + (1 - F0) * (1 - <V,H>)^5) * (BSDF / F) * <N,L> dw} =
// (1 - F0) * Integral{(1 - <V,H>)^5 * (BSDF / F) * <N,L> dw} + F0 * Integral{(BSDF / F) * <N,L> dw}=
// (1 - F0) * x + F0 * y = lerp(x, y, F0)
GGXSpecularFGD = lerp(preFGD.xxx, preFGD.yyy, fresnel0);
// Pre integrate DisneyDiffuse FGD:
// z = DisneyDiffuse
// Remap from the [0, 1] to the [0.5, 1.5] range.
disneyDiffuseFGD = preFGD.z + 0.5;
reflectivity = preFGD.y;
}

82
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs


using UnityEngine;
using UnityEngine.Rendering;
//-----------------------------------------------------------------------------
// structure definition
//-----------------------------------------------------------------------------
[GenerateHLSL(PackingRules.Exact)]
public enum MaterialFeatureFlags
{
LitStandard = 1 << 0
};
//-----------------------------------------------------------------------------
// SurfaceData
//-----------------------------------------------------------------------------

public struct SurfaceData
{
[SurfaceDataAttributes("Material Features")]
public uint materialFeatures;
// Standard
[SurfaceDataAttributes("Base Color", false, true)]
public Vector3 baseColor;

[SurfaceDataAttributes("Smoothness A")]
public float perceptualSmoothnessA;
[SurfaceDataAttributes("Smoothness B")]
public float perceptualSmoothnessB;
[SurfaceDataAttributes("Lobe Mixing")]
public float lobeMix;
[SurfaceDataAttributes("Metallic")]
public float metallic;
};
//-----------------------------------------------------------------------------

[GenerateHLSL(PackingRules.Exact, false, true, 1400)]
public struct BSDFData
{
public uint materialFeatures;
public Vector3 fresnel0;
public float perceptualRoughnessA;
public float perceptualRoughnessB;
public float lobeMix;
public float roughnessAT;
public float roughnessAB;
public float roughnessBT;
public float roughnessBB;
public float anisotropy;
//public fixed float test[2];
//-----------------------------------------------------------------------------
// Init precomputed textures
//-----------------------------------------------------------------------------
bool m_isInit;
public StackLit() {}
public override void Build(HDRenderPipelineAsset hdAsset)
{
PreIntegratedFGD.instance.Build();
//LTCAreaLight.instance.Build();
m_isInit = false;
}
public override void Cleanup()
{
PreIntegratedFGD.instance.Cleanup();
//LTCAreaLight.instance.Cleanup();
m_isInit = false;
}
public override void RenderInit(CommandBuffer cmd)
{
if (m_isInit)
return;
PreIntegratedFGD.instance.RenderInit(cmd);
m_isInit = true;
}
public override void Bind()
{
PreIntegratedFGD.instance.Bind();
//LTCAreaLight.instance.Bind();
}
}
}

92
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.cs.hlsl


#ifndef STACKLIT_CS_HLSL
#define STACKLIT_CS_HLSL
//
// UnityEngine.Experimental.Rendering.HDPipeline.StackLit+MaterialFeatureFlags: static fields
//
#define MATERIALFEATUREFLAGS_LIT_STANDARD (1)
//
#define DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR (1300)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL (1301)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE (1302)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_MATERIAL_FEATURES (1300)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR (1301)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL (1302)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE (1303)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A (1304)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B (1305)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING (1306)
#define DEBUGVIEW_STACKLIT_SURFACEDATA_METALLIC (1307)
#define DEBUGVIEW_STACKLIT_BSDFDATA_DIFFUSE_COLOR (1400)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_WS (1401)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE (1402)
#define DEBUGVIEW_STACKLIT_BSDFDATA_MATERIAL_FEATURES (1400)
#define DEBUGVIEW_STACKLIT_BSDFDATA_DIFFUSE_COLOR (1401)
#define DEBUGVIEW_STACKLIT_BSDFDATA_FRESNEL0 (1402)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_WS (1403)
#define DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE (1404)
#define DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_A (1405)
#define DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_B (1406)
#define DEBUGVIEW_STACKLIT_BSDFDATA_LOBE_MIXING (1407)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AT (1408)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AB (1409)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BT (1410)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BB (1411)
#define DEBUGVIEW_STACKLIT_BSDFDATA_ANISOTROPY (1412)
uint materialFeatures;
float perceptualSmoothnessA;
float perceptualSmoothnessB;
float lobeMix;
float metallic;
};
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.StackLit+BSDFData

uint materialFeatures;
float3 fresnel0;
float perceptualRoughnessA;
float perceptualRoughnessB;
float lobeMix;
float roughnessAT;
float roughnessAB;
float roughnessBT;
float roughnessBB;
float anisotropy;
};
//

{
switch (paramId)
{
case DEBUGVIEW_STACKLIT_SURFACEDATA_MATERIAL_FEATURES:
result = GetIndexColor(surfacedata.materialFeatures);
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_BASE_COLOR:
result = surfacedata.baseColor;
needLinearToSRGB = true;

case DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE:
result = surfacedata.normalWS * 0.5 + 0.5;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_A:
result = surfacedata.perceptualSmoothnessA.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_SMOOTHNESS_B:
result = surfacedata.perceptualSmoothnessB.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_LOBE_MIXING:
result = surfacedata.lobeMix.xxx;
break;
case DEBUGVIEW_STACKLIT_SURFACEDATA_METALLIC:
result = surfacedata.metallic.xxx;
break;
}
}

{
switch (paramId)
{
case DEBUGVIEW_STACKLIT_BSDFDATA_MATERIAL_FEATURES:
result = GetIndexColor(bsdfdata.materialFeatures);
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_FRESNEL0:
result = bsdfdata.fresnel0;
break;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_A:
result = bsdfdata.perceptualRoughnessA.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_PERCEPTUAL_ROUGHNESS_B:
result = bsdfdata.perceptualRoughnessB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_LOBE_MIXING:
result = bsdfdata.lobeMix.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AT:
result = bsdfdata.roughnessAT.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_AB:
result = bsdfdata.roughnessAB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BT:
result = bsdfdata.roughnessBT.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ROUGHNESS_BB:
result = bsdfdata.roughnessBB.xxx;
break;
case DEBUGVIEW_STACKLIT_BSDFDATA_ANISOTROPY:
result = bsdfdata.anisotropy.xxx;
break;
}
}

289
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.hlsl


//
// Also add options at the top of this file, see Lit.hlsl.
//-----------------------------------------------------------------------------
// Texture and constant buffer declaration
//-----------------------------------------------------------------------------
// Declare the BSDF specific FGD property and its fetching function
#include "../PreIntegratedFGD/PreIntegratedFGD.hlsl"
//-----------------------------------------------------------------------------
// Definition
//-----------------------------------------------------------------------------
#define HAS_REFRACTION (defined(_REFRACTION_PLANE) || defined(_REFRACTION_SPHERE)) && (defined(_REFRACTION_SSRAY_PROXY) || defined(_REFRACTION_SSRAY_HIZ))
#define DEFAULT_SPECULAR_VALUE 0.04
//-----------------------------------------------------------------------------
// Configuration
//-----------------------------------------------------------------------------
#define LIT_DIFFUSE_LAMBERT_BRDF // TODO Disney Diffuse
#define LIT_USE_GGX_ENERGY_COMPENSATION
//-----------------------------------------------------------------------------
// Helper functions/variable specific to this material
//-----------------------------------------------------------------------------
// This method allows us to know at compile time what material features should be removed from the code by Tile (Indepenently of the value of material feature flag per pixel).
// This is only useful for classification during lighting, so it's not needed in EncodeIntoGBuffer and ConvertSurfaceDataToBSDFData (where we always know exactly what the material feature is)
bool HasFeatureFlag(uint featureFlags, uint flag)
{
return ((featureFlags & flag) != 0);
}
float3 ComputeDiffuseColor(float3 baseColor, float metallic)
{
return baseColor * (1.0 - metallic);
}
float3 ComputeFresnel0(float3 baseColor, float metallic, float dielectricF0)
{
return lerp(dielectricF0.xxx, baseColor, metallic);
}
// This function is use to help with debugging and must be implemented by any lit material
// Implementer must take into account what are the current override component and

BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
// NEWLITTODO: will be much more involved obviously, and use metallic, etc.
bsdfData.diffuseColor = surfaceData.baseColor;
// IMPORTANT: In our forward only case, all enable flags are statically know at compile time, so the compiler can do compile time optimization
bsdfData.materialFeatures = surfaceData.materialFeatures;
// Two lobe base material
bsdfData.perceptualRoughnessA = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessA);
bsdfData.perceptualRoughnessB = PerceptualSmoothnessToPerceptualRoughness(surfaceData.perceptualSmoothnessB);
bsdfData.lobeMix = surfaceData.lobeMix;
// There is no metallic with SSS and specular color mode
//todo: float metallic = HasFeatureFlag(surfaceData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SPECULAR_COLOR | MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING | MATERIALFEATUREFLAGS_LIT_TRANSMISSION) ? 0.0 : surfaceData.metallic;
float metallic = surfaceData.metallic;
bsdfData.diffuseColor = ComputeDiffuseColor(surfaceData.baseColor, metallic);
bsdfData.fresnel0 = ComputeFresnel0(surfaceData.baseColor, surfaceData.metallic, DEFAULT_SPECULAR_VALUE);
// roughnessT and roughnessB are clamped, and are meant to be used with punctual and directional lights.
// perceptualRoughness is not clamped, and is meant to be used for IBL.
// TODO: add ui inputs, +tangent map, tangentws to use anisotropy; for now bsdfData.anisotropy = 0,
// so only bsdfData.roughnessT is used.
ConvertAnisotropyToClampRoughness(bsdfData.perceptualRoughnessA, bsdfData.anisotropy, bsdfData.roughnessAT, bsdfData.roughnessAB);
ConvertAnisotropyToClampRoughness(bsdfData.perceptualRoughnessB, bsdfData.anisotropy, bsdfData.roughnessBT, bsdfData.roughnessBB);
ApplyDebugToBSDFData(bsdfData);
return bsdfData;

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_SURFACEDATA_NORMAL_VIEW_SPACE:
case DEBUGVIEW_STACKLIT_SURFACEDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(surfaceData.normalWS) * 0.5 + 0.5;
break;

// Overide debug value output to be more readable
switch (paramId)
{
case DEBUGVIEW_LIT_BSDFDATA_NORMAL_VIEW_SPACE:
case DEBUGVIEW_STACKLIT_BSDFDATA_NORMAL_VIEW_SPACE:
// Convert to view space
result = TransformWorldToViewDir(bsdfData.normalWS) * 0.5 + 0.5;
break;

struct PreLightData
{
float NdotV; // Could be negative due to normal mapping, use ClampNdotV()
//NEWLITTODO
// IBL: we calculate and prefetch the pre-integrated split sum data for
// both lobes
float3 iblR[2]; // Dominant specular direction, used for IBL in EvaluateBSDF_Env()
float iblPerceptualRoughness[2];
float3 specularFGD[2]; // Store preconvoled BRDF for both specular and diffuse
float diffuseFGD[2];
// GGX
float partLambdaV[2]; // One for each lobe
float energyCompensation;
};
PreLightData GetPreLightData(float3 V, PositionInputs posInput, inout BSDFData bsdfData)

float3 N = bsdfData.normalWS;
preLightData.NdotV = dot(N, V);
preLightData.iblPerceptualRoughness[0] = bsdfData.perceptualRoughnessA;
preLightData.iblPerceptualRoughness[1] = bsdfData.perceptualRoughnessB;
//float NdotV = ClampNdotV(preLightData.NdotV);
float NdotV = ClampNdotV(preLightData.NdotV);
float3 iblN[2], iblR[2];
// We will need two hacked N for the stretch anisotropic hack later.
// (Could use a UNITY_UNROLL loop, but not much code to dupe)
preLightData.partLambdaV[0] = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughnessAT);
preLightData.partLambdaV[1] = GetSmithJointGGXPartLambdaV(NdotV, bsdfData.roughnessBT);
iblN[0] = iblN[1] = N;
// IBL
// Handle IBL pre calculated data + GGX multiscattering energy loss compensation term
float specularReflectivity[2];
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness[0], bsdfData.fresnel0, preLightData.specularFGD[0], preLightData.diffuseFGD[0], specularReflectivity[0]);
GetPreIntegratedFGDGGXAndDisneyDiffuse(NdotV, preLightData.iblPerceptualRoughness[1], bsdfData.fresnel0, preLightData.specularFGD[1], preLightData.diffuseFGD[1], specularReflectivity[1]);
#ifdef LIT_DIFFUSE_LAMBERT_BRDF
preLightData.diffuseFGD[0] = preLightData.diffuseFGD[1] = 1.0;
#endif
iblR[0] = reflect(-V, iblN[0]);
iblR[1] = reflect(-V, iblN[1]);
// This is a ad-hoc tweak to better match reference of anisotropic GGX.
// TODO: We need a better hack.
float fact = saturate(1.2 - abs(bsdfData.anisotropy));
preLightData.iblPerceptualRoughness[0] *= fact;
preLightData.iblPerceptualRoughness[1] *= fact;
// Corretion of reflected direction for better handling of rough material
preLightData.iblR[0] = GetSpecularDominantDir(N, iblR[0], preLightData.iblPerceptualRoughness[0], NdotV);
preLightData.iblR[1] = GetSpecularDominantDir(N, iblR[1], preLightData.iblPerceptualRoughness[1], NdotV);
#ifdef LIT_USE_GGX_ENERGY_COMPENSATION
// Ref: Practical multiple scattering compensation for microfacet models.
// We only apply the formulation for metals.
// For dielectrics, the change of reflectance is negligible.
// We deem the intensity difference of a couple of percent for high values of roughness
// to not be worth the cost of another precomputed table.
// Note: this formulation bakes the BSDF non-symmetric!
// Note: that this also assumes all specular comes from GGX BSDFs.
// (That's the FGD we use above to get integral[BSDF/F (N.w) dw] )
// In our case, since this compensation term is already an average
// applied to a sum (akin to a "split sum" approximation) we will
// just lerp our two "specularReflectivities"
preLightData.energyCompensation = 1.0 / lerp(specularReflectivity[0], specularReflectivity[1], bsdfData.lobeMix) - 1.0;
#else
preLightData.energyCompensation = 0.0;
#endif // LIT_USE_GGX_ENERGY_COMPENSATION
return preLightData;
}

// BSDF share between directional light, punctual light and area light (reference)
//-----------------------------------------------------------------------------
// NEWLITTODO
// This function apply BSDF. Assumes that NdotL is positive.
void BSDF( float3 V, float3 L, float NdotL, float3 positionWS, PreLightData preLightData, BSDFData bsdfData,

float3 N = bsdfData.normalWS;
// Optimized math. Ref: PBR Diffuse Lighting for GGX + Smith Microsurfaces (slide 114).
float LdotV = dot(L, V);
float invLenLV = rsqrt(max(2.0 * LdotV + 2.0, FLT_EPS)); // invLenLV = rcp(length(L + V)), clamp to avoid rsqrt(0) = NaN
float NdotH = saturate((NdotL + preLightData.NdotV) * invLenLV); // Do not clamp NdotV here
float LdotH = saturate(invLenLV * LdotV + invLenLV);
float NdotV = ClampNdotV(preLightData.NdotV);
// TODO: Proper Fresnel
float3 F = F_Schlick(bsdfData.fresnel0, LdotH);
// TODO: with iridescence, will be per light sample.
float DV[2];
DV[0] = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughnessAT, preLightData.partLambdaV[0]);
DV[1] = DV_SmithJointGGX(NdotH, NdotL, NdotV, bsdfData.roughnessBT, preLightData.partLambdaV[1]);
specularLighting = F * lerp(DV[0], DV[1], bsdfData.lobeMix);
// TODO: config option + diffuse GGX
specularLighting = float3(0.0, 0.0, 0.0);
// TODO: coat
}
//-----------------------------------------------------------------------------

IndirectLighting lighting;
ZERO_INITIALIZE(IndirectLighting, lighting);
//NEWLITTODO
// TODO: refraction
#if !HAS_REFRACTION
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFRACTION)
return lighting;
#endif
float3 envLighting;
float3 positionWS = posInput.positionWS;
float weight;
float tempWeight[2];
#ifdef LIT_DISPLAY_REFERENCE_IBL
envLighting = IntegrateSpecularGGXIBLRef(lightLoopContext, V, preLightData, lightData, bsdfData);
// TODO: Do refraction reference (is it even possible ?)
// TODO: handle clear coat
// TODO: Handle two lobes in reference
// #ifdef LIT_DIFFUSE_LAMBERT_BRDF
// envLighting += IntegrateLambertIBLRef(lightData, V, bsdfData);
// #else
// envLighting += IntegrateDisneyDiffuseIBLRef(lightLoopContext, V, preLightData, lightData, bsdfData);
// #endif
#else
float3 R[2];
R[0] = preLightData.iblR[0];
R[1] = preLightData.iblR[1];
// TODO: Refraction
// We will sample 2 times (one for each lobe) the environment.
// Steps are:
// -Calculate influence weights from intersection with the proxies.
// Since the weights are influence blending weights, we can correctly
// use our lobe weight and mix them.
// -Fudge the sampling direction to dampen boundary artefacts.
// -Do early discard for planar reflections.
// -Fetch 2 samples of preintegrated environment lighting
// (see preLD, first part of the split-sum approx.)
// -Use the 2 BSDF preintegration terms we pre-fetched in preLightData
// (second part of the split-sum approx.,
// and common to all Env. Lights. using the same BSDF and
// we only have GGX thus only one FGD map for now)
// -Multiply the two split sum terms together for each lobe
// and linearly combine the results.
// Note: using influenceShapeType and projectionShapeType instead of (lightData|proxyData).shapeType allow to make compiler optimization in case the type is know (like for sky)
tempWeight[0] = tempWeight[1] = 1.0;
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R[0], tempWeight[0]);
EvaluateLight_EnvIntersection(positionWS, bsdfData.normalWS, lightData, influenceShapeType, R[1], tempWeight[1]);
weight = lerp(tempWeight[0], tempWeight[1], bsdfData.lobeMix);
// TODO: reflections + coating
// When we are rough, we tend to see outward shifting of the reflection when at the boundary of the projection volume
// Also it appear like more sharp. To avoid these artifact and at the same time get better match to reference we lerp to original unmodified reflection.
// Formula is empirical.
float roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness[0]);
R[0] = lerp(R[0], preLightData.iblR[0], saturate(smoothstep(0, 1, roughness * roughness)));
roughness = PerceptualRoughnessToRoughness(preLightData.iblPerceptualRoughness[1]);
R[1] = lerp(R[1], preLightData.iblR[1], saturate(smoothstep(0, 1, roughness * roughness)));
float3 F[2];
F[0] = preLightData.specularFGD[0];
F[1] = preLightData.specularFGD[1];
float4 preLD[2];
float iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness[0]);
preLD[0] = SampleEnv(lightLoopContext, lightData.envIndex, R[0], iblMipLevel);
iblMipLevel = PerceptualRoughnessToMipmapLevel(preLightData.iblPerceptualRoughness[1]);
preLD[1] = SampleEnv(lightLoopContext, lightData.envIndex, R[1], iblMipLevel);
// Used by planar reflection to discard pixel:
weight *= lerp(preLD[0].a, preLD[1].a, bsdfData.lobeMix);
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION)
{
envLighting = lerp(F[0] * preLD[0].rgb, F[1] * preLD[1].rgb, bsdfData.lobeMix);
// TODO: Clear Coat component if needed
}
else
{
// TODO: Refractions
// No clear coat support with refraction
// specular transmisted lighting is the remaining of the reflection (let's use this approx)
// With refraction, we don't care about the clear coat value, only about the Fresnel, thus why we use 'envLighting ='
//envLighting = (1.0 - F) * preLD.rgb * preLightData.transparentTransmittance;
}
#endif // LIT_DISPLAY_REFERENCE_IBL
UpdateLightingHierarchyWeights(hierarchyWeight, weight);
envLighting *= weight * lightData.multiplier;
if (GPUImageBasedLightingType == GPUIMAGEBASEDLIGHTINGTYPE_REFLECTION)
lighting.specularReflected = envLighting;
//TODO refraction:
//else
// lighting.specularTransmitted = envLighting * preLightData.transparentTransmittance;
return lighting;
}

PreLightData preLightData, BSDFData bsdfData, BakeLightingData bakeLightingData, AggregateLighting lighting,
out float3 diffuseLighting, out float3 specularLighting)
{
// Apply the albedo to the direct diffuse lighting and that's about it.
// TODO: AO, SO, SSS, Refraction
specularLighting = lighting.direct.specular; // should be 0 for now.
specularLighting = lighting.direct.specular + lighting.indirect.specularReflected;
// Apply the fudge factor (boost) to compensate for multiple scattering not accounted for in BSDF.
// This assumes all spec comes from a GGX BSDF.
// Note: The multiply with fresnel0 is to apply it only for metallic
specularLighting *= 1.0 + bsdfData.fresnel0 * preLightData.energyCompensation;
#ifdef DEBUG_DISPLAY

switch (_DebugLightingMode)
{
case DEBUGLIGHTINGMODE_LUX_METER:
//diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
diffuseLighting = lighting.direct.diffuse + bakeLightingData.bakeDiffuseLighting;
//diffuseLighting = indirectAmbientOcclusion;
//diffuseLighting = aoFactor.indirectAmbientOcclusion;
//diffuseLighting = specularOcclusion;
//diffuseLighting = aoFactor.indirectSpecularOcclusion;
break;
case DEBUGLIGHTINGMODE_SCREEN_SPACE_TRACING_REFRACTION:

32
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLit.shader


_BaseColor("BaseColor", Color) = (1,1,1,1)
_BaseColorMap("BaseColorMap", 2D) = "white" {}
_Metallic("Metallic", Range(0.0, 1.0)) = 0
_SmoothnessA("SmoothnessA", Range(0.0, 1.0)) = 1.0
_SmoothnessARemapMin("SmoothnessARemapMin", Float) = 0.0
_SmoothnessARemapMax("SmoothnessARemapMax", Float) = 1.0
_SmoothnessB("SmoothnessB", Range(0.0, 1.0)) = 1.0
_SmoothnessBRemapMin("SmoothnessBRemapMin", Float) = 0.0
_SmoothnessBRemapMax("SmoothnessBRemapMax", Float) = 1.0
_LobeMix("lobeMix", Range(0.0, 1.0)) = 0
_MaskMapA("MaskMapA", 2D) = "white" {}
_MaskMapB("MaskMapB", 2D) = "white" {}
_NormalMap("NormalMap", 2D) = "bump" {} // Tangent space normal map
_NormalScale("_NormalScale", Range(0.0, 2.0)) = 1
[Enum(UV0, 0, UV1, 1, UV2, 2, UV3, 3, Planar, 4, Triplanar, 5)] _UVBase("UV Set for base", Float) = 0
[HideInInspector] _UVMappingMask("_UVMappingMask", Color) = (1, 0, 0, 0)
_EmissiveColor("EmissiveColor", Color) = (1, 1, 1)
_EmissiveColorMap("EmissiveColorMap", 2D) = "white" {}
_EmissiveIntensity("EmissiveIntensity", Float) = 0

#pragma shader_feature _ALPHATEST_ON
#pragma shader_feature _DOUBLESIDED_ON
#pragma shader_feature _NORMALMAP_TANGENT_SPACE
#pragma shader_feature _ _REQUIRE_UV2 _REQUIRE_UV3
// ...TODO: for surface gradient framework eg see litdata.hlsl,
// but we need it right away for toggle with LayerTexCoord mapping so we might need them
// from the Frag input right away. See also ShaderPass/StackLitSharePass.hlsl.
#pragma shader_feature _NORMALMAP
#pragma shader_feature _MASKMAPA
#pragma shader_feature _MASKMAPB
#pragma shader_feature _EMISSIVE_COLOR_MAP
// Keyword for transparent

242
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitData.hlsl


//-------------------------------------------------------------------------------------
// Fill SurfaceData/Builtin data function
//-------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Sampling/SampleUVMapping.hlsl"
//-----------------------------------------------------------------------------
// Texture Mapping (think of LayerTexCoord as simply TexCoordMappings,
// ie no more layers here - cf Lit materials)
//-----------------------------------------------------------------------------
//
// For easier copying of code for now use a LayerTexCoord wrapping struct.
// We don't have details yet.
//
// NEWLITTODO: Eventually, we could quickly share GetBuiltinData of LitBuiltinData.hlsl
// in our GetSurfaceAndBuiltinData( ) here, since we will use the LayerTexCoord identifier,
// and an identical ComputeLayerTexCoord( ) prototype
//
struct LayerTexCoord
{
UVMapping base;
UVMapping details;
// Store information that will be share by all UVMapping
float3 vertexNormalWS; // TODO: store also object normal map for object triplanar
};
// Want to use only one sampler for normalmap/bentnormalmap either we use OS or TS. And either we have normal map or bent normal or both.
//
// Note (compared to Lit shader):
//
// We don't have a layered material with which we are sharing code here like the LayeredLit shader, but we can also save a couple of
// samplers later if we use bentnormals.
//
// _IDX suffix is meaningless here, could use the name SAMPLER_NORMALMAP_ID instead of SAMPLER_NORMALMAP_IDX and replace all
// indirect #ifdef _NORMALMAP_TANGENT_SPACE_IDX #ifdef and _NORMALMAP_IDX tests with the more direct
// shader_feature keywords _NORMALMAP_TANGENT_SPACE and _NORMALMAP.
//
// (Originally in the LayeredLit shader, shader_feature keywords like _NORMALMAP become _NORMALMAP0 but since files are shared,
// LitDataIndividualLayer will use a generic _NORMALMAP_IDX defined before its inclusion by the client LitData or LayeredLitData.
// That way, LitDataIndividualLayer supports multiple inclusions)
//
//
#ifdef _NORMALMAP_TANGENT_SPACE
#if defined(_NORMALMAP)
#define SAMPLER_NORMALMAP_ID sampler_NormalMap
// TODO:
//#elif defined(_BENTNORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_BentNormalMap
#endif
#else
// TODO:
//#error STACKLIT_USES_ONLY_TANGENT_SPACE_FOR_NOW
//#if defined(_NORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_NormalMapOS
//#elif defined(_BENTNORMALMAP)
//#define SAMPLER_NORMALMAP_ID sampler_BentNormalMapOS
//#endif
#endif
void ComputeLayerTexCoord( // Uv related parameters
float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3, float4 uvMappingMask,
// scale and bias for base
float2 texScale, float2 texBias,
// mapping type and output
int mappingType, inout LayerTexCoord layerTexCoord)
{
//TODO: Planar, Triplanar, detail map, surface_gradient.
// Handle uv0, uv1, uv2, uv3 based on _UVMappingMask weight (exclusif 0..1)
float2 uvBase = uvMappingMask.x * texCoord0 +
uvMappingMask.y * texCoord1 +
uvMappingMask.z * texCoord2 +
uvMappingMask.w * texCoord3;
// Copy data in uvmapping fields: used by generic sampling code (see especially SampleUVMappingNormalInternal.hlsl)
layerTexCoord.base.mappingType = mappingType;
layerTexCoord.base.normalWS = layerTexCoord.vertexNormalWS;
// Apply tiling options
layerTexCoord.base.uv = uvBase * texScale + texBias;
}
float3 GetNormalTS(FragInputs input, LayerTexCoord layerTexCoord, float3 detailNormalTS, float detailMask)
{
// TODO: different spaces (eg #ifdef _NORMALMAP_TANGENT_SPACE #elif object space, SURFACE_GRADIENT, etc.)
// and use detail map
float3 normalTS;
// Note we don't use the _NORMALMAP_IDX mechanism of the Lit shader, since we don't have "layers", we can
// directly use the shader_feature keyword:
#ifdef _NORMALMAP
normalTS = SAMPLE_UVMAPPING_NORMALMAP(_NormalMap, SAMPLER_NORMALMAP_ID, layerTexCoord.base, _NormalScale);
#else
normalTS = float3(0.0, 0.0, 1.0);
#endif
return normalTS;
}
// This maybe call directly by tessellation (domain) shader, thus all part regarding surface gradient must be done
// in function with FragInputs input as parameters
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(float2 texCoord0, float2 texCoord1, float2 texCoord2, float2 texCoord3,
float3 positionWS, float3 vertexNormalWS, inout LayerTexCoord layerTexCoord)
{
layerTexCoord.vertexNormalWS = vertexNormalWS;
// TODO:
//layerTexCoord.triplanarWeights = ComputeTriplanarWeights(vertexNormalWS);
int mappingType = UV_MAPPING_UVSET;
//TODO: _MAPPING_PLANAR, _MAPPING_TRIPLANAR
// Be sure that the compiler is aware that we don't use UV1 to UV3 for main layer so it can optimize code
ComputeLayerTexCoord( texCoord0, texCoord1, texCoord2, texCoord3, _UVMappingMask, /* TODO _UVDetailsMappingMask, */
_BaseColorMap_ST.xy, _BaseColorMap_ST.zw, /* TODO _DetailMap_ST.xy, _DetailMap_ST.zw, 1.0, _LinkDetailsWithBase,
/* TODO positionWS, _TexWorldScale, */
mappingType, layerTexCoord);
}
// This is call only in this file
// layerTexCoord must have been initialize to 0 outside of this function
void GetLayerTexCoord(FragInputs input, inout LayerTexCoord layerTexCoord)
{
// TODO: SURFACE_GRADIENT
//#ifdef SURFACE_GRADIENT
//GenerateLayerTexCoordBasisTB(input, layerTexCoord);
//#endif
GetLayerTexCoord( input.texCoord0, input.texCoord1, input.texCoord2, input.texCoord3,
input.positionWS, input.worldToTangent[2].xyz, layerTexCoord);
}
//-----------------------------------------------------------------------------
// ...Texture Mapping
//-----------------------------------------------------------------------------
//
// cf with
// LitData.hlsl:GetSurfaceAndBuiltinData()
// LitDataIndividualLayer.hlsl:GetSurfaceData( )
// LitBuiltinData.hlsl:GetBuiltinData()
//
// Here we can combine them
//
// NEWLITTODO:
// For now, just use the interpolated vertex normal. This has been normalized in the initial fragment interpolators unpacking.
// Eventually, we want to share all the LitData LayerTexCoord (and surface gradient frame + uv, planar, triplanar, etc.) logic, also
// spread in LitDataIndividualLayer and LitDataMeshModification.
surfaceData.normalWS = input.worldToTangent[2].xyz;
float2 baseColorMapUv = TRANSFORM_TEX(input.texCoord0, _BaseColorMap);
surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).rgb * _BaseColor.rgb;
float alpha = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).a * _BaseColor.a;
LayerTexCoord layerTexCoord;
ZERO_INITIALIZE(LayerTexCoord, layerTexCoord);
GetLayerTexCoord(input, layerTexCoord);
// -------------------------------------------------------------
// Surface Data:
// -------------------------------------------------------------
// We perform the conversion to world of the normalTS outside of the GetSurfaceData
// so it allow us to correctly deal with detail normal map and optimize the code for the layered shaders
float3 normalTS;
// TODO: Those are only needed once we handle specular occlusion and optionnally bent normal maps.
// Also, for the builtinData part, use bentnormal to sample diffuse GI
//float3 bentNormalTS;
//float3 bentNormalWS;
//float alpha = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).a * _BaseColor.a;
float alpha = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).a * _BaseColor.a;
#ifdef _ALPHATEST_ON
//NEWLITTODO: Once we include those passes in the main StackLit.shader, add handling of CUTOFF_TRANSPARENT_DEPTH_PREPASS and _POSTPASS
// and the related properties (in the .shader) and uniforms (in the StackLitProperties file) _AlphaCutoffPrepass, _AlphaCutoffPostpass

// TODO detail map:
float3 detailNormalTS = float3(0.0, 0.0, 0.0);
float detailMask = 0.0;
//TODO remove the following and use fetching macros that use uvmapping :
//float2 baseColorMapUv = TRANSFORM_TEX(input.texCoord0, _BaseColorMap);
//surfaceData.baseColor = SAMPLE_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, baseColorMapUv).rgb * _BaseColor.rgb;
surfaceData.baseColor = SAMPLE_UVMAPPING_TEXTURE2D(_BaseColorMap, sampler_BaseColorMap, layerTexCoord.base).rgb * _BaseColor.rgb;
//surfaceData.normalWS = float3(0.0, 0.0, 0.0);
normalTS = GetNormalTS(input, layerTexCoord, detailNormalTS, detailMask);
//TODO: bentNormalTS
#if defined(_MASKMAPA)
surfaceData.perceptualSmoothnessA = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapA, sampler_MaskMapA, layerTexCoord.base).a;
surfaceData.perceptualSmoothnessA = lerp(_SmoothnessARemapMin, _SmoothnessARemapMax, surfaceData.perceptualSmoothnessA);
#else
surfaceData.perceptualSmoothnessA = _SmoothnessA;
#endif
#if defined(_MASKMAPB)
surfaceData.perceptualSmoothnessB = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapB, sampler_MaskMapB, layerTexCoord.base).a;
surfaceData.perceptualSmoothnessB = lerp(_SmoothnessBRemapMin, _SmoothnessBRemapMax, surfaceData.perceptualSmoothnessB);
#else
surfaceData.perceptualSmoothnessB = _SmoothnessB;
#endif
// TODOSTACKLIT: lobe weighting
surfaceData.lobeMix = _LobeMix;
// MaskMapA is RGBA: Metallic, Ambient Occlusion (Optional), detail Mask (Optional), Smoothness
// TODO: Ambient occlusion, detail mask.
#ifdef _MASKMAPA
surfaceData.metallic = SAMPLE_UVMAPPING_TEXTURE2D(_MaskMapA, sampler_MaskMapA, layerTexCoord.base).r;
#else
surfaceData.metallic = 1.0;
#endif
surfaceData.metallic *= _Metallic;
// These static material feature allow compile time optimization
// TODO: As we add features, or-set the flags eg MATERIALFEATUREFLAGS_LIT_* with #ifdef
// on corresponding _MATERIAL_FEATURE_* shader_feature kerwords (set by UI) so the compiler
// knows the value of surfaceData.materialFeatures.
surfaceData.materialFeatures = MATERIALFEATUREFLAGS_LIT_STANDARD;
// -------------------------------------------------------------
// Surface Data Part 2 (outsite GetSurfaceData( ) in Lit shader):
// -------------------------------------------------------------
GetNormalWS(input, V, normalTS, surfaceData.normalWS); // MaterialUtilities.hlsl
// TODO: decal etc.
surfaceData.color = GetTextureDataDebug(_DebugMipMapMode, baseColorMapUv, _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.color);
surfaceData.baseColor = GetTextureDataDebug(_DebugMipMapMode, layerTexCoord.base.uv, _BaseColorMap, _BaseColorMap_TexelSize, _BaseColorMap_MipInfo, surfaceData.baseColor);
surfaceData.metallic = 0;
// Builtin Data
// -------------------------------------------------------------
// Builtin Data:
// -------------------------------------------------------------
// NEWLITTODO: for all BuiltinData, might need to just refactor and use a comon function like that
// contained in LitBuiltinData.hlsl

// Emissive Intensity is only use here, but is part of BuiltinData to enforce UI parameters as we want the users to fill one color and one intensity
builtinData.emissiveIntensity = _EmissiveIntensity;
builtinData.emissiveIntensity = _EmissiveIntensity; // We still store intensity here so we can reuse it with debug code
#ifdef _EMISSIVE_COLOR_MAP
builtinData.emissiveColor *= SAMPLE_TEXTURE2D(_EmissiveColorMap, sampler_EmissiveColorMap, TRANSFORM_TEX(input.texCoord0, _EmissiveColorMap)).rgb;

23
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/StackLit/StackLitProperties.hlsl


TEXTURE2D(_BaseColorMap);
SAMPLER(sampler_BaseColorMap);
TEXTURE2D(_MaskMapA);
SAMPLER(sampler_MaskMapA);
TEXTURE2D(_MaskMapB);
SAMPLER(sampler_MaskMapB);
TEXTURE2D(_NormalMap);
SAMPLER(sampler_NormalMap);
CBUFFER_START(UnityPerMaterial)

float4 _BaseColorMap_MipInfo;
float _Metallic;
float _SmoothnessA;
float _SmoothnessARemapMin;
float _SmoothnessARemapMax;
float _SmoothnessB;
float _SmoothnessBRemapMin;
float _SmoothnessBRemapMax;
float _LobeMix;
float _NormalScale;
float4 _UVMappingMask;
float3 _EmissiveColor;
float4 _EmissiveColorMap_ST;

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute


void SubsurfaceScattering(uint2 groupId : SV_GroupID,
uint groupThreadId : SV_GroupThreadID)
{
groupThreadId &= GROUP_SIZE_2D - 1; // Help the compiler
// Note: any factor of 64 is a suitable wave size for our algorithm.
uint waveIndex = WaveReadFirstLane(groupThreadId / 64);
uint laneIndex = groupThreadId % 64;

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipeline/FrameSettings.cs


public bool enableMSAA = false;
public MSAASamples msaaSampleCount { get; private set; }
public bool enableShadowMask = false;
public bool enableShadowMask = true;
public LightLoopSettings lightLoopSettings = new LightLoopSettings();

aggregate.enableObjectMotionVectors = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableObjectMotionVectors && renderPipelineSettings.supportMotionVectors;
aggregate.enableDBuffer = srcFrameSettings.enableDBuffer && renderPipelineSettings.supportDBuffer;
aggregate.enableAtmosphericScattering = srcFrameSettings.enableAtmosphericScattering;
// We must take care of the scene view fog flags in the editor
if (!CoreUtils.IsSceneViewFogEnabled(camera))
aggregate.enableAtmosphericScattering = false;
aggregate.enableRoughRefraction = srcFrameSettings.enableRoughRefraction;
aggregate.enableTransparentPostpass = srcFrameSettings.enableTransparentPostpass;
aggregate.enableDistortion = camera.cameraType != CameraType.Reflection && srcFrameSettings.enableDistortion;

30
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramid.cs


foreach (var rth in m_DepthPyramidMips)
RTHandles.Release(rth);
}
}
public int GetPyramidLodCount(HDCamera camera)
public int GetPyramidLodCount(Vector2Int size)
var minSize = Mathf.Min(camera.actualWidth, camera.actualHeight);
return Mathf.FloorToInt(Mathf.Log(minSize, 2f));
var minSize = Mathf.Min(size.x, size.y);
return Mathf.Max(0, Mathf.FloorToInt(Mathf.Log(minSize, 2f)));
}
Vector2Int CalculatePyramidMipSize(Vector2Int baseMipSize, int mipIndex)

RTHandleSystem.RTHandle sourceDepthTexture,
RTHandleSystem.RTHandle targetDepthTexture)
{
int lodCount = GetPyramidLodCount(hdCamera);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetDepthTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping DepthPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetDepthTexture.rt.format, m_DepthPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetDepthTexture);

RTHandleSystem.RTHandle sourceColorTexture,
RTHandleSystem.RTHandle targetColorTexture)
{
int lodCount = GetPyramidLodCount(hdCamera);
int lodCount = Mathf.Min(
GetPyramidLodCount(targetColorTexture.referenceSize),
GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight))
);
if (lodCount == 0)
{
Debug.LogWarning("The target for the pyramid buffer has an invalid size. Skipping ColorPyramid calculation.");
return;
}
UpdatePyramidMips(hdCamera, targetColorTexture.rt.format, m_ColorPyramidMips, lodCount);
Vector2 scale = GetPyramidToScreenScale(hdCamera, targetColorTexture);

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/BufferPyramidProcessor.cs


cmd.SetComputeTextureParam(m_DepthPyramidCS, kernel, _Result, dest);
cmd.SetComputeVectorParam(m_DepthPyramidCS, _SrcSize, new Vector4(
srcWorkMip.width, srcWorkMip.height,
(1.0f / srcWorkMip.width) * scale.x, (1.0f / srcWorkMip.height) * scale.y)
(1.0f / srcMip.width) * scale.x, (1.0f / srcMip.height) * scale.y)
);
cmd.DispatchCompute(

1
);
var dstMipWidthToCopy = Mathf.Min(dest.rt.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.rt.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.rt.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.rt.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(mips[i], 0, 0, 0, 0, dstMipWidthToCopy, dstMipHeightToCopy, targetTexture, 0, i + 1, 0, 0);

var dest = mips[i];
var srcMip = new RectInt(0, 0, srcRect.width >> i, srcRect.height >> i);
//var dstMip = new RectInt(0, 0, srcMip.width >> 1, srcMip.height >> 1);
var srcWorkMip = new RectInt(
0,
0,

1
);
var dstMipWidthToCopy = Mathf.Min(dest.width, dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(dest.height, dstWorkMip.height);
var dstMipWidthToCopy = Mathf.Min(targetTexture.width >> (i + 1), dstWorkMip.width);
var dstMipHeightToCopy = Mathf.Min(targetTexture.height >> (i + 1), dstWorkMip.height);
// If we could bind texture mips as UAV we could avoid this copy...(which moreover copies more than the needed viewport if not fullscreen)
cmd.CopyTexture(

27
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs


using System;
using System;
using System.Diagnostics;
using UnityEngine.Rendering;

public abstract class AtmosphericScattering : VolumeComponent
{
static readonly int m_TypeParam = Shader.PropertyToID("_AtmosphericScatteringType");
// Fog Color
static readonly int m_ColorModeParam = Shader.PropertyToID("_FogColorMode");
static readonly int m_FogColorDensityParam = Shader.PropertyToID("_FogColorDensity");

public static void PushNeutralShaderParameters(CommandBuffer cmd)
{
cmd.SetGlobalFloat(m_TypeParam, (float)FogType.None);
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)FogType.None);
// In case volumetric lighting is enabled, we need to make sure that all rendering passes
// (not just the atmospheric scattering one) receive neutral parameters.
if (ShaderConfig.s_VolumetricLightingPreset != 0)
{
var data = DensityVolumeData.GetNeutralValues();
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, data.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, data.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, 0.0f);
}
// Not used by the volumetric fog.
if (frameSettings.enableAtmosphericScattering)
cmd.SetGlobalFloat(m_TypeParam, (float)type);
else
cmd.SetGlobalFloat(m_TypeParam, (float)FogType.None);
Debug.Assert(frameSettings.enableAtmosphericScattering);
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)type);
// Fog Color
cmd.SetGlobalFloat(m_ColorModeParam, (float)colorMode.value);

{
None,
Linear,
Exponential
Exponential,
Volumetric
}
[GenerateHLSL]

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs.hlsl


#define FOGTYPE_NONE (0)
#define FOGTYPE_LINEAR (1)
#define FOGTYPE_EXPONENTIAL (2)
#define FOGTYPE_VOLUMETRIC (3)
//
// UnityEngine.Experimental.Rendering.HDPipeline.FogColorMode: static fields

59
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl


#endif
CBUFFER_START(AtmosphericScattering)
float _AtmosphericScatteringType;
int _AtmosphericScatteringType;
// Common
float _FogColorMode;
float4 _FogColorDensity; // color in rgb, density in alpha

float3 fogColor = 0;
float fogFactor = 0;
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float4 volFog = SampleInScatteredRadianceAndTransmittance(TEXTURE3D_PARAM(_VBufferLighting, s_trilinear_clamp_sampler),
posInput.positionNDC, posInput.linearDepth,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
fogColor = volFog.rgb;
fogFactor = 1 - volFog.a;
#else
switch (_AtmosphericScatteringType)
{
case FOGTYPE_LINEAR:
{
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange) * saturate((_LinearFogHeightEnd - GetAbsolutePositionWS(posInput.positionWS).y) * _LinearFogHeightOneOverRange);
break;
}
case FOGTYPE_EXPONENTIAL:
{
fogColor = GetFogColor(posInput);
float distance = length(GetWorldSpaceViewDir(posInput.positionWS));
float fogHeight = max(0.0, GetAbsolutePositionWS(posInput.positionWS).y - _ExpFogBaseHeight);
fogFactor = _FogDensity * TransmittanceHomogeneousMedium(_ExpFogHeightAttenuation, fogHeight) * (1.0f - TransmittanceHomogeneousMedium(1.0f / _ExpFogDistance, distance));
break;
}
case FOGTYPE_VOLUMETRIC:
{
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float4 volFog = SampleVolumetricLighting(TEXTURE3D_PARAM(_VBufferLighting, s_linear_clamp_sampler),
posInput.positionNDC,
posInput.linearDepth,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
true, true);
if (_AtmosphericScatteringType == FOGTYPE_EXPONENTIAL)
{
fogColor = GetFogColor(posInput);
float distance = length(GetWorldSpaceViewDir(posInput.positionWS));
float fogHeight = max(0.0, GetAbsolutePositionWS(posInput.positionWS).y - _ExpFogBaseHeight);
fogFactor = _FogDensity * TransmittanceHomogeneousMedium(_ExpFogHeightAttenuation, fogHeight) * (1.0f - TransmittanceHomogeneousMedium(1.0f / _ExpFogDistance, distance));
}
else if (_AtmosphericScatteringType == FOGTYPE_LINEAR)
{
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange) * saturate((_LinearFogHeightEnd - GetAbsolutePositionWS(posInput.positionWS).y) * _LinearFogHeightOneOverRange);
}
else // NONE
{
fogFactor = 1 - volFog.a; // Opacity from transmittance
fogColor = volFog.rgb * min(rcp(fogFactor), FLT_MAX); // Un-premultiply, clamp to avoid (0 * INF = NaN)
#endif
break;
}
#endif
return float4(fogColor, fogFactor);
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/OpaqueAtmosphericScattering.shader


// #pragma enable_d3d11_debug_symbols
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Color.hlsl"
#include "../ShaderVariables.hlsl"
#include "AtmosphericScattering/AtmosphericScattering.hlsl"

36
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyManager.cs


public class BuiltinSkyParameters
{
public Matrix4x4 pixelCoordToViewDirMatrix;
public Matrix4x4 invViewProjMatrix;
public Vector3 cameraPosWS;
public Vector4 screenSize;
public CommandBuffer commandBuffer;
public Light sunLight;
public Matrix4x4 pixelCoordToViewDirMatrix;
public Matrix4x4 invViewProjMatrix;
public Vector3 cameraPosWS;
public Vector4 screenSize;
public CommandBuffer commandBuffer;
public Light sunLight;
public HDCamera hdCamera;
public HDCamera hdCamera;
public DebugDisplaySettings debugSettings;

#if UNITY_EDITOR
if (camera.camera.cameraType == CameraType.Preview)
{
m_VisualSky.skySettings = m_DefaultPreviewSky;
m_VisualSky.skySettings = GetDefaultPreviewSkyInstance();
}
#endif

cmd.SetGlobalFloat(HDShaderIDs._SkyTextureMipCount, mipCount);
}
#if UNITY_EDITOR
ProceduralSky GetDefaultPreviewSkyInstance()
{
if (m_DefaultPreviewSky == null)
{
m_DefaultPreviewSky = ScriptableObject.CreateInstance<ProceduralSky>();
}
return m_DefaultPreviewSky;
}
#endif
public void Build(HDRenderPipelineAsset hdAsset, IBLFilterGGX iblFilterGGX)
{
m_BakingSkyRenderingContext = new SkyRenderingContext(iblFilterGGX, (int)hdAsset.renderPipelineSettings.lightLoopSettings.skyReflectionSize, false);

m_LightingOverrideVolumeStack = VolumeManager.instance.CreateStack();
m_LightingOverrideLayerMask = hdAsset.renderPipelineSettings.lightLoopSettings.skyLightingOverrideLayerMask;
#if UNITY_EDITOR
m_DefaultPreviewSky = ScriptableObject.CreateInstance<ProceduralSky>();
#endif
#if UNITY_EDITOR
CoreUtils.Destroy(m_DefaultPreviewSky);
#endif
CoreUtils.Destroy(m_StandardSkyboxMaterial);
CoreUtils.Destroy(m_BlitCubemapMaterial);
CoreUtils.Destroy(m_OpaqueAtmScatteringMaterial);

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/SkyRenderingContext.cs


{
internal class SkyRenderingContext
{
IBLFilterGGX m_IBLFilterGGX;
IBLFilterGGX m_IBLFilterGGX;
Vector4 m_CubemapScreenSize;
Matrix4x4[] m_facePixelCoordToViewDirMatrices = new Matrix4x4[6];
Matrix4x4[] m_faceCameraInvViewProjectionMatrix = new Matrix4x4[6];
bool m_SupportsConvolution = false;
bool m_SupportsMIS = false;
BuiltinSkyParameters m_BuiltinParameters = new BuiltinSkyParameters();
bool m_NeedUpdate = true;
Vector4 m_CubemapScreenSize;
Matrix4x4[] m_facePixelCoordToViewDirMatrices = new Matrix4x4[6];
Matrix4x4[] m_faceCameraInvViewProjectionMatrix = new Matrix4x4[6];
bool m_SupportsConvolution = false;
bool m_SupportsMIS = false;
BuiltinSkyParameters m_BuiltinParameters = new BuiltinSkyParameters();
bool m_NeedUpdate = true;
public RenderTexture cubemapRT { get { return m_SkyboxCubemapRT; } }
public Texture reflectionTexture { get { return m_SkyboxGGXCubemapRT; } }

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs


using System;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public void PushFogShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
{
if (!frameSettings.enableAtmosphericScattering)
{
AtmosphericScattering.PushNeutralShaderParameters(cmd);
return;
}
switch (fogType.value)
{
case FogType.None:

case FogType.Exponential:
{
var fogSettings = VolumeManager.instance.stack.GetComponent<ExponentialFog>();
fogSettings.PushShaderParameters(cmd, frameSettings);
break;
}
case FogType.Volumetric:
{
var fogSettings = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
fogSettings.PushShaderParameters(cmd, frameSettings);
break;
}

838
Tests/GraphicsTests/RenderPipeline/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering.unity
文件差异内容过多而无法显示
查看文件

4
Tests/GraphicsTests/RenderPipeline/HDRenderPipeline/Scenes/1xxx_Materials/1301_SubSurfaceScattering/GroundLeaf_Transmission-None.mat


m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MaskMap:
m_Texture: {fileID: 2800000, guid: b24f69ff4ace9194fb0d9eee4f5cf1a4, type: 3}
m_Texture: {fileID: 2800000, guid: bb365ecc385707e4393520c6e7893cc8, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _NormalMap:

- _PPDPrimitiveWidth: 1
- _PreRefractionPass: 0
- _RefractionMode: 0
- _RefractionModel: 0
- _RefractionSSRayModel: 0
- _SSSAndTransmissionType: 0
- _ShiverDirectionality: 0.5
- _ShiverDrag: 0.2

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DecalsDebug.cs


using System;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[Serializable]
public class DecalsDebugSettings
{
public bool m_DisplayAtlas = false;
public UInt32 m_MipLevel = 0;
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Debug/DecalsDebug.cs.meta


fileFormatVersion: 2
guid: 300a1c22817829f48a156cef57b55a57
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using UnityEngine.Experimental.Rendering.HDPipeline;
using UnityEditor.Experimental.Rendering;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{
[VolumeComponentEditor(typeof(VolumetricFog))]
public class VolumetricFogEditor : AtmosphericScatteringEditor
{
private SerializedDataParameter m_Albedo;
private SerializedDataParameter m_MeanFreePath;
private SerializedDataParameter m_Asymmetry;
public override void OnEnable()
{
base.OnEnable();
var o = new PropertyFetcher<VolumetricFog>(serializedObject);
m_Albedo = Unpack(o.Find(x => x.albedo));
m_MeanFreePath = Unpack(o.Find(x => x.meanFreePath));
m_Asymmetry = Unpack(o.Find(x => x.asymmetry));
}
public override void OnInspectorGUI()
{
PropertyField(m_Albedo);
PropertyField(m_MeanFreePath);
PropertyField(m_Asymmetry);
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs.meta


fileFormatVersion: 2
guid: a530ef8d0c07494409d34294d8e04a89
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

67
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs


using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class VolumetricFog : AtmosphericScattering
{
public ColorParameter albedo = new ColorParameter(new Color(0.5f, 0.5f, 0.5f));
public MinFloatParameter meanFreePath = new MinFloatParameter(float.MaxValue, 1.0f);
public ClampedFloatParameter asymmetry = new ClampedFloatParameter(0.0f, -1.0f, 1.0f);
// Override the volume blending function.
public override void Override(VolumeComponent state, float interpFactor)
{
VolumetricFog other = state as VolumetricFog;
float thisExtinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(meanFreePath);
Vector3 thisScattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(thisExtinction, (Vector3)(Vector4)albedo.value);
float otherExtinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(other.meanFreePath);
Vector3 otherScattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(otherExtinction, (Vector3)(Vector4)other.albedo.value);
float blendExtinction = Mathf.Lerp(otherExtinction, thisExtinction, interpFactor);
Vector3 blendScattering = Vector3.Lerp(otherScattering, thisScattering, interpFactor);
float blendAsymmetry = Mathf.Lerp(other.asymmetry, asymmetry, interpFactor);
float blendMeanFreePath = VolumeRenderingUtils.MeanFreePathFromExtinction(blendExtinction);
Color blendAlbedo = (Color)(Vector4)VolumeRenderingUtils.AlbedoFromMeanFreePathAndScattering(blendMeanFreePath, blendScattering);
blendAlbedo.a = 1.0f;
if (meanFreePath.overrideState)
{
other.meanFreePath.value = blendMeanFreePath;
}
if (albedo.overrideState)
{
other.albedo.value = blendAlbedo;
}
if (asymmetry.overrideState)
{
other.asymmetry.value = blendAsymmetry;
}
}
public override void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
{
DensityVolumeParameters param;
param.albedo = albedo;
param.meanFreePath = meanFreePath;
param.asymmetry = asymmetry;
DensityVolumeData data = param.GetData();
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)FogType.Volumetric);
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, data.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, data.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs.meta


fileFormatVersion: 2
guid: 29b00527f85bb3346a4d2cb710971587
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存