浏览代码

Merge pull request #755 from EvgeniiG/volumetrics

Add the initial implementation of volumetric lighting
/main
GitHub 7 年前
当前提交
1461533c
共有 35 个文件被更改,包括 1618 次插入71 次删除
  1. 24
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  2. 116
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl
  3. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Random.hlsl
  4. 90
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl
  5. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs
  6. 26
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  7. 17
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  8. 5
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting.meta
  9. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader
  10. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
  11. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  12. 28
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
  13. 9
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  14. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset
  15. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs
  16. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs
  17. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs.hlsl
  18. 7
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  19. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
  20. 40
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl
  21. 9
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl.meta
  22. 113
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl
  23. 9
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl.meta
  24. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics.meta
  25. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta
  26. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources.meta
  27. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl.meta
  28. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.meta
  29. 28
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl
  30. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute.meta
  31. 493
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
  32. 38
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs
  33. 499
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs

24
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


TEMPLATE_1_REAL(Sq, x, return x * x)
TEMPLATE_1_INT(Sq, x, return x * x)
bool IsPower2(uint x)
{
return (x & (x - 1)) == 0;
}
// Input [0, 1] and output [0, PI/2]
// 9 VALU
real FastACosPos(real inX)

// Z buffer to linear 0..1 depth (0 at near plane, 1 at far plane).
// Does not correctly handle oblique view frustums.
// zBufferParam = { (f-n)/n, 1, (f-n)/n*f, 1/f }
float Linear01DepthFromNear(float depth, float4 zBufferParam)
{
return 1.0 / (zBufferParam.x + zBufferParam.y / depth);

// Does not correctly handle oblique view frustums.
// zBufferParam = { (f-n)/n, 1, (f-n)/n*f, 1/f }
float Linear01Depth(float depth, float4 zBufferParam)
{
return 1.0 / (zBufferParam.x * depth + zBufferParam.y);

// Does not correctly handle oblique view frustums.
// zBufferParam = { (f-n)/n, 1, (f-n)/n*f, 1/f }
float LinearEyeDepth(float depth, float4 zBufferParam)
{
return 1.0 / (zBufferParam.z * depth + zBufferParam.w);

float LinearEyeDepth(float3 positionWS, float4x4 viewProjMatrix)
{
return mul(viewProjMatrix, float4(positionWS, 1.0)).w;
}
// 'z' is the view-space Z position (linear depth).
// saturate() the output of the function to clamp them to the [0, 1] range.
// encodingParams = { n, log2(f/n), 1/n, 1/log2(f/n) }
float EncodeLogarithmicDepth(float z, float4 encodingParams)
{
return log2(max(0, z * encodingParams.z)) * encodingParams.w;
}
// 'd' is the logarithmically encoded depth value.
// saturate(d) to clamp the output of the function to the [n, f] range.
// encodingParams = { n, log2(f/n), 1/n, 1/log2(f/n) }
float DecodeLogarithmicDepth(float d, float4 encodingParams)
{
return encodingParams.x * exp2(d * encodingParams.y);
}
// ----------------------------------------------------------------------------

116
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl


#ifndef UNITY_GEOMETRICTOOLS_INCLUDED
#define UNITY_GEOMETRICTOOLS_INCLUDED
// Solves the quadratic equation of the form: a*t^2 + b*t + c = 0.
// Returns 'false' if there are no real roots, 'true' otherwise.
// Numerically stable.
// Ref: Numerical Recipes in C++ (3rd Edition)
bool SolveQuadraticEquation(float a, float b, float c, out float2 roots)
{
float d = b * b - 4 * a * c;
float q = -0.5 * (b + FastMulBySignOf(b, sqrt(d)));
roots = float2(q / a, c / q);
return (d >= 0);
}
// return furthest near intersection in x and closest far intersection in y
// if (intersections.y > intersections.x) the ray hit the box, else it miss it
// Assume dir is normalize
float2 BoxRayIntersect(float3 start, float3 dir, float3 boxMin, float3 boxMax)
// This implementation does not attempt to explicitly handle NaNs.
// Ref: https://tavianator.com/fast-branchless-raybounding-box-intersections-part-2-nans/
bool IntersectRayAABB(float3 rayOrigin, float3 rayDirection,
float3 boxMin, float3 boxMax,
float tMin, float tMax,
out float tEntr, out float tExit)
float3 invDir = 1.0 / dir;
float3 rayDirInv = rcp(rayDirection); // Could be precomputed
// Find the ray intersection with box plane
float3 firstPlaneIntersect = (boxMin - start) * invDir;
float3 secondPlaneIntersect = (boxMax - start) * invDir;
// Perform ray-slab intersection (component-wise).
float3 t0 = boxMin * rayDirInv - (rayOrigin * rayDirInv);
float3 t1 = boxMax * rayDirInv - (rayOrigin * rayDirInv);
// Get the closest/furthest of these intersections along the ray (Ok because x/0 give +inf and -x/0 give �inf )
float3 closestPlane = min(firstPlaneIntersect, secondPlaneIntersect);
float3 furthestPlane = max(firstPlaneIntersect, secondPlaneIntersect);
// Find the closest/farthest distance (component-wise).
float3 tSlabEntr = min(t0, t1);
float3 tSlabExit = max(t0, t1);
// Find the farthest entry and the nearest exit.
tEntr = Max3(tSlabEntr.x, tSlabEntr.y, tSlabEntr.z);
tExit = Min3(tSlabExit.x, tSlabExit.y, tSlabExit.z);
float2 intersections;
// Find the furthest near intersection
intersections.x = max(closestPlane.x, max(closestPlane.y, closestPlane.z));
// Find the closest far intersection
intersections.y = min(min(furthestPlane.x, furthestPlane.y), furthestPlane.z);
// Clamp to the range.
tEntr = max(tEntr, tMin);
tExit = min(tExit, tMax);
return intersections;
return tEntr < tExit;
// Assume dir is normalize
float BoxRayIntersectSimple(float3 start, float3 dir, float3 boxMin, float3 boxMax)
float IntersectRayAABBSimple(float3 start, float3 dir, float3 boxMin, float3 boxMax)
{
float3 invDir = 1.0 / dir;

}
// Assume Sphere is at the origin (i.e start = position - spherePosition)
float2 SphereRayIntersect(float3 start, float3 dir, float radius, out bool intersect)
bool IntersectRaySphere(float3 start, float3 dir, float radius, out float2 intersections)
{
float a = dot(dir, dir);
float b = dot(dir, start) * 2.0;

float2 intersections = float2(0.0, 0.0);
intersect = false;
bool intersect = false;
intersections = float2(0.0, 0.0);
if (discriminant < 0.0 || a == 0.0)
{
intersections.x = 0.0;

intersect = true;
}
return intersections;
return intersect;
float SphereRayIntersectSimple(float3 start, float3 dir, float radius)
float IntersectRaySphereSimple(float3 start, float3 dir, float radius)
{
float b = dot(dir, start) * 2.0;
float c = dot(start, start) - radius * radius;

}
float3 RayPlaneIntersect(in float3 rayOrigin, in float3 rayDirection, in float3 planeOrigin, in float3 planeNormal)
float3 IntersectRayPlane(float3 rayOrigin, float3 rayDirection, float3 planeOrigin, float3 planeNormal)
}
// Can support cones with an elliptic base: pre-scale 'coneAxisX' and 'coneAxisY' by (h/r_x) and (h/r_y).
// Returns parametric distances 'tEntr' and 'tExit' along the ray,
// subject to constraints 'tMin' and 'tMax'.
bool IntersectRayCone(float3 rayOrigin, float3 rayDirection,
float3 coneOrigin, float3 coneDirection,
float3 coneAxisX, float3 coneAxisY,
float tMin, float tMax,
out float tEntr, out float tExit)
{
// Inverse transform the ray into a coordinate system with the cone at the origin facing along the Z axis.
float3x3 rotMat = float3x3(coneAxisX, coneAxisY, coneDirection);
float3 o = mul(rotMat, rayOrigin - coneOrigin);
float3 d = mul(rotMat, rayDirection);
// Cone equation (facing along Z): (h/r*x)^2 + (h/r*y)^2 - z^2 = 0.
// Cone axes are premultiplied with (h/r).
// Set up the quadratic equation: a*t^2 + b*t + c = 0.
float a = d.x * d.x + d.y * d.y - d.z * d.z;
float b = o.x * d.x + o.y * d.y - o.z * d.z;
float c = o.x * o.x + o.y * o.y - o.z * o.z;
float2 roots;
// Check whether we have at least 1 root.
bool hit = SolveQuadraticEquation(a, 2 * b, c, roots);
tEntr = min(roots.x, roots.y);
tExit = max(roots.x, roots.y);
float3 pEntr = o + tEntr * d;
float3 pExit = o + tExit * d;
// Clip the negative cone.
bool pEntrNeg = pEntr.z < 0;
bool pExitNeg = pExit.z < 0;
if (pEntrNeg && pExitNeg) { hit = false; }
if (pEntrNeg) { tEntr = tExit; tExit = tMax; }
if (pExitNeg) { tExit = tEntr; tEntr = tMin; }
// Clamp using the values passed into the function.
tEntr = clamp(tEntr, tMin, tMax);
tExit = clamp(tExit, tMin, tMax);
// Check for grazing intersections.
if (tEntr == tExit) { hit = false; }
return hit;
}
//-----------------------------------------------------------------------------

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Random.hlsl


#ifndef UNITY_NOISE_INCLUDED
#define UNITY_NOISE_INCLUDED
#ifndef UNITY_RANDOM_INCLUDED
#define UNITY_RANDOM_INCLUDED
#if !defined(SHADER_API_GLES)

#endif // SHADER_API_GLES
#endif // UNITY_NOISE_INCLUDED
#endif // UNITY_RANDOM_INCLUDED

90
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl


#ifndef UNITY_VOLUME_RENDERING_INCLUDED
#define UNITY_VOLUME_RENDERING_INCLUDED
real OpticalDepthHomogeneous(real extinction, real intervalLength)
// Reminder:
// Optical_Depth(x, y) = Integral{x, y}{Extinction(t) dt}
// Transmittance(x, y) = Exp(-Optical_Depth(x, y))
// Transmittance(x, z) = Transmittance(x, y) * Transmittance(y, z)
// Integral{a, b}{Transmittance(0, t) * L_s(t) dt} = Transmittance(0, a) * Integral{a, b}{Transmittance(0, t - a) * L_s(t) dt}.
real OpticalDepthHomogeneousMedium(real extinction, real intervalLength)
{
return extinction * intervalLength;
}
real3 OpticalDepthHomogeneousMedium(real3 extinction, real intervalLength)
{
return extinction * intervalLength;
}

return exp(-opticalDepth);
}
real TransmittanceIntegralOverHomogeneousInterval(real extinction, real start, real end)
real3 Transmittance(real3 opticalDepth)
{
return exp(-opticalDepth);
}
real TransmittanceHomogeneousMedium(real extinction, real intervalLength)
return (exp(-extinction * start) - exp(-extinction * end)) / extinction;
return Transmittance(OpticalDepthHomogeneousMedium(extinction, intervalLength));
real3 OpticalDepthHomogeneous(real3 extinction, real intervalLength)
real3 TransmittanceHomogeneousMedium(real3 extinction, real intervalLength)
return extinction * intervalLength;
return Transmittance(OpticalDepthHomogeneousMedium(extinction, intervalLength));
real3 Transmittance(real3 opticalDepth)
// Integral{a, b}{Transmittance(0, t - a) dt}.
real TransmittanceIntegralHomogeneousMedium(real extinction, real intervalLength)
return exp(-opticalDepth);
return rcp(extinction) - rcp(extinction) * exp(-extinction * intervalLength);
real3 TransmittanceIntegralOverHomogeneousInterval(real3 extinction, real start, real end)
// Integral{a, b}{Transmittance(0, t - a) dt}.
real3 TransmittanceIntegralHomogeneousMedium(real3 extinction, real intervalLength)
return (exp(-extinction * start) - exp(-extinction * end)) / extinction;
return rcp(extinction) - rcp(extinction) * exp(-extinction * intervalLength);
}
real IsotropicPhaseFunction()

HenyeyGreensteinPhasePartVarying(asymmetry, LdotD);
}
// Samples the interval of homogeneous participating medium using the closed-form tracking approach
// (proportionally to the transmittance).
// Returns the offset from the start of the interval and the weight = (transmittance / pdf).
// Ref: Production Volume Rendering, 3.6.1.
void ImportanceSampleHomogeneousMedium(real rndVal, real extinction, real intervalLength,
out real offset, out real weight)
{
// pdf = extinction * exp(-extinction * t) / (1 - exp(-intervalLength * extinction))
// weight = exp(-extinction * t) / pdf
// weight = (1 - exp(-extinction * intervalLength)) / extinction;
real x = 1 - exp(-extinction * intervalLength);
// Avoid division by 0.
real rcpExt = extinction != 0 ? rcp(extinction) : 0;
weight = x * rcpExt;
offset = -log(1 - rndVal * x) * rcpExt;
}
// Implements equiangular light sampling.
// Returns the distance from the origin of the ray, the squared (radial) distance from the light,
// and the reciprocal of the PDF.
// Ref: Importance Sampling of Area Lights in Participating Medium.
void ImportanceSamplePunctualLight(real rndVal, real3 lightPosition,
real3 rayOrigin, real3 rayDirection,
real tMin, real tMax,
out real dist, out real rSq, out real rcpPdf,
real minDistSq = FLT_EPS)
{
real3 originToLight = lightPosition - rayOrigin;
real originToLightProj = dot(originToLight, rayDirection);
real originToLightDistSq = dot(originToLight, originToLight);
real rayToLightDistSq = max(originToLightDistSq - originToLightProj * originToLightProj, minDistSq);
real a = tMin - originToLightProj;
real b = tMax - originToLightProj;
real dSq = rayToLightDistSq;
real dRcp = rsqrt(dSq);
real d = dSq * dRcp;
// TODO: optimize me. :-(
real theta0 = FastATan(a * dRcp);
real theta1 = FastATan(b * dRcp);
real gamma = theta1 - theta0;
real theta = lerp(theta0, theta1, rndVal);
real t = d * tan(theta);
dist = originToLightProj + t;
rSq = dSq + t * t;
rcpPdf = gamma * rSq * dRcp;
}
#endif // UNITY_VOLUME_RENDERING_INCLUDED

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs


newAsset.deferredComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/LightLoop/Deferred.compute");
newAsset.deferredDirectionalShadowComputeShader = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/DeferredDirectionalShadow.compute");
newAsset.volumetricLightingCS = Load<ComputeShader>(HDRenderPipelinePath + "Lighting/Volumetrics/Resources/VolumetricLighting.compute");
newAsset.subsurfaceScatteringCS = Load<ComputeShader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.compute");
newAsset.subsurfaceScattering = Load<Shader>(HDRenderPipelinePath + "Material/SubsurfaceScattering/SubsurfaceScattering.shader");

26
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


[Flags]
public enum StencilBitMask
{
Clear = 0, // 0x0
LightingMask = 7, // 0x7 - 3 bit
ObjectVelocity = 128, // 1 bit
All = 255 // 0xFF - 8 bit
Clear = 0, // 0x0
LightingMask = 7, // 0x7 - 3 bit
ObjectVelocity = 128, // 0x80 - 1 bit
All = 255 // 0xFF - 8 bit
}
RenderStateBlock m_DepthStateOpaque;

m_LightLoop.AllocResolutionDependentBuffers(texWidth, texHeight);
}
int viewId = hdCamera.camera.GetInstanceID(); // Warning: different views can use the same camera
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!
if (m_VolumetricLightingPreset != VolumetricLightingPreset.Off)
ResizeVBuffer(viewId, texWidth, texHeight);
// update recorded window resolution
m_CurrentWidth = texWidth;
m_CurrentHeight = texHeight;

m_SSSBufferManager.PushGlobalParams(cmd, sssParameters, m_FrameSettings);
m_DbufferManager.PushGlobalParams(cmd);
if (m_VolumetricLightingPreset != VolumetricLightingPreset.Off)
{
SetVolumetricLightingData(hdCamera, cmd);
}
}
}

m_LightLoop.BuildGPULightLists(camera, cmd, m_CameraDepthStencilBufferRT, m_CameraStencilBufferCopyRT, m_SkyManager.IsSkyValid());
}
}
// Render the volumetric lighting.
// The pass requires the volume properties, the light list and the shadows, and can run async.
VolumetricLightingPass(hdCamera, cmd);
RenderDeferredLighting(hdCamera, cmd);

visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBufferRT, m_CameraDepthStencilBufferRT, cmd);
if (visualEnv.fogType != FogType.None)
if (visualEnv.fogType != FogType.None || m_VolumetricLightingPreset != VolumetricLightingPreset.Off)
m_SkyManager.RenderOpaqueAtmosphericScattering(cmd);
}

17
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _SkyParam = Shader.PropertyToID("_SkyParam");
public static readonly int _PixelCoordToViewDirWS = Shader.PropertyToID("_PixelCoordToViewDirWS");
public static readonly int _GlobalFog_Extinction = Shader.PropertyToID("_GlobalFog_Extinction");
public static readonly int _GlobalFog_Asymmetry = Shader.PropertyToID("_GlobalFog_Asymmetry");
public static readonly int _GlobalFog_Scattering = Shader.PropertyToID("_GlobalFog_Scattering");
public static readonly int _GlobalFog_Extinction = Shader.PropertyToID("_GlobalFog_Extinction");
public static readonly int _GlobalFog_Scattering = Shader.PropertyToID("_GlobalFog_Scattering");
public static readonly int _VBufferResolution = Shader.PropertyToID("_VBufferResolution");
public static readonly int _VBufferScaleAndSliceCount = Shader.PropertyToID("_VBufferScaleAndSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");
public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity");
public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting");
public static readonly int _VBufferLightingIntegral = Shader.PropertyToID("_VBufferLightingIntegral");
public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
}
}

5
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting.meta


fileFormatVersion: 2
guid: 3002976b0b09954499dd1f6e00169b06
guid: 5e0fe6d5bfdaab148b33c776c94a500f
timeCreated: 1474297943
licenseType: Pro
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader


// variable declaration
//-------------------------------------------------------------------------------------
#ifdef SHADOWS_SHADOWMASK
#ifdef SHADOWS_SHADOWMASK
#endif
#endif
struct Attributes
{

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl


#endif
}
// Note: no volumetric attenuation along shadow rays for directional lights.
attenuation *= shadow;
[branch] if (lightData.cookieIndex >= 0)

shadow = lerp(1.0, shadow, lightData.shadowDimmer);
#endif
}
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
[flatten] if (lightData.lightType == GPULIGHTTYPE_PROJECTOR_BOX)
{
float3 lightToSample = positionWS - lightData.positionWS;
dist = dot(-lightToSample, L);
}
shadow *= TransmittanceHomogeneousMedium(_GlobalFog_Extinction, dist);
#endif
attenuation *= shadow;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


{
cmd.SetGlobalBuffer(HDShaderIDs.g_logBaseBuffer, s_PerTileLogBaseTweak);
}
// Set up clustered lighting for volumetrics.
cmd.SetGlobalBuffer(HDShaderIDs.g_vLightListGlobal, s_PerVoxelLightLists);
}
}
}

28
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl


return TILE_SIZE_CLUSTERED;
}
void GetCountAndStartCluster(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)
float GetLightClusterMinLinearDepth(uint2 tileIndex, uint clusterIndex)
uint2 tileIndex = posInput.tileCoord;
float logBase = g_fClustBase;
if (g_isLogBaseBufferEnabled)
{
logBase = g_logBaseBuffer[tileIndex.y * _NumTileClusteredX + tileIndex.x];
}
return ClusterIdxToZFlex(clusterIndex, logBase, g_isLogBaseBufferEnabled != 0);
}
uint GetLightClusterIndex(uint2 tileIndex, float linearDepth)
{
float logBase = g_fClustBase;
if (g_isLogBaseBufferEnabled)
{

int clustIdx = SnapToClusterIdxFlex(posInput.linearDepth, logBase, g_isLogBaseBufferEnabled != 0);
return SnapToClusterIdxFlex(linearDepth, logBase, g_isLogBaseBufferEnabled != 0);
}
void GetCountAndStartCluster(uint2 tileIndex, uint clusterIndex, uint lightCategory, out uint start, out uint lightCount)
{
const int idx = ((lightCategory * nrClusters + clustIdx) * _NumTileClusteredY + tileIndex.y) * _NumTileClusteredX + tileIndex.x;
const int idx = ((lightCategory * nrClusters + clusterIndex) * _NumTileClusteredY + tileIndex.y) * _NumTileClusteredX + tileIndex.x;
}
void GetCountAndStartCluster(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)
{
uint2 tileIndex = posInput.tileCoord;
uint clusterIndex = GetLightClusterIndex(tileIndex, posInput.linearDepth);
GetCountAndStartCluster(tileIndex, clusterIndex, lightCategory, start, lightCount);
}
void GetCountAndStart(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)

9
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


// SurfaceData is define in Lit.cs which generate Lit.cs.hlsl
#include "Lit.cs.hlsl"
#include "../SubsurfaceScattering/SubsurfaceScattering.hlsl"
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
//-----------------------------------------------------------------------------
// Texture and constant buffer declaration

float3 dirLS = mul(R, worldToLocal);
float sphereOuterDistance = lightData.influenceExtents.x;
float projectionDistance = SphereRayIntersectSimple(positionLS, dirLS, sphereOuterDistance);
float projectionDistance = IntersectRaySphereSimple(positionLS, dirLS, sphereOuterDistance);
projectionDistance = max(projectionDistance, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
// We can reuse dist calculate in LS directly in WS as there is no scaling. Also the offset is already include in lightData.positionWS
R = (positionWS + projectionDistance * R) - lightData.positionWS;

{
dirLS = mul(coatR, worldToLocal);
projectionDistance = SphereRayIntersectSimple(positionLS, dirLS, sphereOuterDistance);
projectionDistance = IntersectRaySphereSimple(positionLS, dirLS, sphereOuterDistance);
projectionDistance = max(projectionDistance, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
coatR = (positionWS + projectionDistance * coatR) - lightData.positionWS;
}

// 1. First process the projection
float3 dirLS = mul(R, worldToLocal);
float3 boxOuterDistance = lightData.influenceExtents;
float projectionDistance = BoxRayIntersectSimple(positionLS, dirLS, -boxOuterDistance, boxOuterDistance);
float projectionDistance = IntersectRayAABBSimple(positionLS, dirLS, -boxOuterDistance, boxOuterDistance);
projectionDistance = max(projectionDistance, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
// No need to normalize for fetching cubemap

if (bsdfData.materialId == MATERIALID_LIT_CLEAR_COAT && HasMaterialFeatureFlag(MATERIALFEATUREFLAGS_LIT_CLEAR_COAT))
{
dirLS = mul(coatR, worldToLocal);
projectionDistance = BoxRayIntersectSimple(positionLS, dirLS, -boxOuterDistance, boxOuterDistance);
projectionDistance = IntersectRayAABBSimple(positionLS, dirLS, -boxOuterDistance, boxOuterDistance);
projectionDistance = max(projectionDistance, lightData.minProjectionDistance); // Setup projection to infinite if requested (mean no projection shape)
coatR = (positionWS + projectionDistance * coatR) - lightData.positionWS;
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset


type: 3}
deferredDirectionalShadowComputeShader: {fileID: 7200000, guid: fbde6fae193b2a94e9fd97c163c204f4,
type: 3}
volumetricLightingCS: {fileID: 7200000, guid: 799166e2ee6a4b041bba9e74f6942097,
type: 3}
subsurfaceScatteringCS: {fileID: 7200000, guid: b06a7993621def248addd55d0fe931b1,
type: 3}
subsurfaceScattering: {fileID: 4800000, guid: 867b36db983aa0548889a66f8d685ff6,

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs


public ComputeShader buildMaterialFlagsShader;
public ComputeShader deferredComputeShader;
public ComputeShader deferredDirectionalShadowComputeShader;
public ComputeShader volumetricLightingCS;
public ComputeShader subsurfaceScatteringCS; // Disney SSS
public Shader subsurfaceScattering; // Jimenez SSS

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs


public enum ShaderOptions
{
CameraRelativeRendering = 1, // Rendering sets the origin of the world to the position of the primary (scene view) camera
UseDisneySSS = 1 // Allow to chose between Burley Normalized Diffusion (Multi + Fix direction single scattering) or Jimenez diffusion approximation (Multiscattering only - More blurry) for Subsurface scattering
UseDisneySSS = 1, // Allow to chose between Burley Normalized Diffusion (Multi + Fix direction single scattering) or Jimenez diffusion approximation (Multiscattering only - More blurry) for Subsurface scattering
VolumetricLightingPreset = 0 // 0 = disabled, 1 = normal, 2 = ultra
};
// Note: #define can't be use in include file in C# so we chose this way to configure both C# and hlsl

public const int k_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;
public const int k_CameraRelativeRendering = (int)ShaderOptions.CameraRelativeRendering;
public const int k_UseDisneySSS = (int)ShaderOptions.UseDisneySSS;
public const int k_UseDisneySSS = (int)ShaderOptions.UseDisneySSS;
public const int k_VolumetricLightingPreset = (int)ShaderOptions.VolumetricLightingPreset;
public static int s_VolumetricLightingPreset = (int)ShaderOptions.VolumetricLightingPreset;
}
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs.hlsl


//
#define SHADEROPTIONS_CAMERA_RELATIVE_RENDERING (1)
#define SHADEROPTIONS_USE_DISNEY_SSS (1)
#define SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET (0)
#endif

7
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


float3 unity_ShadowColor;
uint _TaaFrameIndex; // [0, 7]
// Volumetric lighting. Should be a struct in 'UnityPerFrame'.
// Unfortunately, structures inside constant buffers are not supported by Unity.
float3 _GlobalFog_Scattering;
float _GlobalFog_Extinction;
float4 _VBufferResolution; // { w, h, 1/w, 1/h }
float4 _VBufferScaleAndSliceCount; // { fracVisW, fracVisH, count, 1/count }
float4 _VBufferDepthEncodingParams; // { n, log2(f/n), 1/n, 1/log2(f/n) }
CBUFFER_END
// ----------------------------------------------------------------------------

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl


#define UNITY_ATMOSPHERIC_SCATTERING_INCLUDED
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
#include "CoreRP/ShaderLibrary/Filtering.hlsl"
#include "../../Lighting/VBuffer.hlsl"
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
TEXTURE3D(_VBufferLighting);
#endif
CBUFFER_START(AtmosphericScattering)
float _AtmosphericScatteringType;

// Returns fog color in rgb and fog factor in alpha.
float4 EvaluateAtmosphericScattering(PositionInputs posInput)
{
float3 fogColor = 0;
float fogFactor = 0;
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float4 volFog = SampleInScatteredRadianceAndTransmittance(TEXTURE3D_PARAM(_VBufferLighting, s_trilinear_clamp_sampler),
posInput.positionNDC, posInput.linearDepth,
_VBufferResolution, _VBufferScaleAndSliceCount,
_VBufferDepthEncodingParams);
fogColor = volFog.rgb;
fogFactor = 1 - volFog.a;
#else
float3 fogColor = GetFogColor(posInput);
float fogFactor = _FogDensity * (1.0f - Transmittance(OpticalDepthHomogeneous(1.0f / _ExpFogDistance, posInput.linearDepth)));
return float4(fogColor, fogFactor);
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * (1.0f - TransmittanceHomogeneousMedium(1.0f / _ExpFogDistance, posInput.linearDepth));
float3 fogColor = GetFogColor(posInput);
float fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange);
return float4(fogColor, fogFactor);
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange);
return float4(0.0, 0.0, 0.0, 0.0);
#endif
return float4(fogColor, fogFactor);
#endif

40
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl


#ifndef UNITY_FILTERING_INCLUDED
#define UNITY_FILTERING_INCLUDED
// Cardinal (interpolating) B-Spline of the 2nd degree (3rd order). Support = 3x3.
// The fractional coordinate of each part is assumed to be in the [0, 1] range (centered on 0.5).
// https://www.desmos.com/calculator/47j9r9lolm
real2 BSpline2IntLeft(real2 x)
{
return 0.5 * x * x;
}
real2 BSpline2IntMiddle(real2 x)
{
return (1 - x) * x + 0.5;
}
real2 BSpline2IntRight(real2 x)
{
return (0.5 * x - 1) * x + 0.5;
}
// Compute weights & offsets for 4x bilinear taps for the biquadratic B-Spline filter.
// The fractional coordinate should be in the [0, 1] range (centered on 0.5).
// Inspired by: http://vec3.ca/bicubic-filtering-in-fewer-taps/
void BiquadraticFilter(real2 fracCoord, out real2 weights[2], out real2 offsets[2])
{
real2 l = BSpline2IntLeft(fracCoord);
real2 m = BSpline2IntMiddle(fracCoord);
real2 r = 1 - l - m;
// Compute offsets for 4x bilinear taps for the quadratic B-Spline reconstruction kernel.
// 0: lerp between left and middle
// 1: lerp between middle and right
weights[0] = l + 0.5 * m;
weights[1] = r + 0.5 * m;
offsets[0] = -0.5 + 0.5 * m * rcp(weights[0]);
offsets[1] = 0.5 + r * rcp(weights[1]);
}
#endif // UNITY_FILTERING_INCLUDED

9
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl.meta


fileFormatVersion: 2
guid: 54ca0b3b7814f804aac1450b38477c74
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

113
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl


#ifndef UNITY_VBUFFER_INCLUDED
#define UNITY_VBUFFER_INCLUDED
// Interpolation in the log space is non-linear.
// Therefore, given 'logEncodedDepth', we compute a new depth value
// which allows us to perform HW interpolation which is linear in the view space.
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth,
float4 VBufferScaleAndSliceCount,
float4 VBufferDepthEncodingParams)
{
float z = linearDepth;
float d = logEncodedDepth;
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
float s0 = floor(d * numSlices - 0.5);
float s1 = ceil(d * numSlices - 0.5);
float d0 = saturate(s0 * rcpNumSlices + (0.5 * rcpNumSlices));
float d1 = saturate(s1 * rcpNumSlices + (0.5 * rcpNumSlices));
float z0 = DecodeLogarithmicDepth(d0, VBufferDepthEncodingParams);
float z1 = DecodeLogarithmicDepth(d1, VBufferDepthEncodingParams);
// Compute the linear interpolation weight.
float t = saturate((z - z0) / (z1 - z0));
return d0 + t * rcpNumSlices;
}
// Performs trilinear reconstruction of the V-Buffer.
// If (clampToEdge == false), out-of-bounds loads return 0.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge,
float2 positionNDC, float linearDepth,
float4 VBufferScaleAndSliceCount,
float4 VBufferDepthEncodingParams)
{
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
// Account for the visible area of the V-Buffer.
float2 uv = positionNDC * VBufferScaleAndSliceCount.xy;
// The distance between slices is log-encoded.
float z = linearDepth;
float d = EncodeLogarithmicDepth(z, VBufferDepthEncodingParams);
// Unity doesn't support samplers clamping to border, so we have to do it ourselves.
// TODO: add the proper sampler support.
bool isInBounds = Min3(uv.x, uv.y, d) > 0 && Max3(uv.x, uv.y, d) < 1;
[branch] if (clampToEdge || isInBounds)
{
#if 0
// We could ignore non-linearity at the cost of accuracy.
// TODO: visually test this option (especially in motion).
float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthEncodingParams);
#endif
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0);
}
else
{
return 0;
}
}
// Returns interpolated {volumetric radiance, transmittance}. The sampler clamps to edge.
float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler),
float2 positionNDC, float linearDepth,
float4 VBufferResolution,
float4 VBufferScaleAndSliceCount,
float4 VBufferDepthEncodingParams)
{
#ifdef RECONSTRUCTION_FILTER_TRILINEAR
float4 L = SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, trilinearSampler), true,
positionNDC, linearDepth,
VBufferScaleAndSliceCount, VBufferDepthEncodingParams);
#else
// Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
// Account for the visible area of the V-Buffer.
float2 xy = positionNDC * (VBufferResolution.xy * VBufferScaleAndSliceCount.xy);
float2 ic = floor(xy);
float2 fc = frac(xy);
// The distance between slices is log-encoded.
float z = linearDepth;
float d = EncodeLogarithmicDepth(z, VBufferDepthEncodingParams);
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthEncodingParams);
float2 weights[2], offsets[2];
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
float2 rcpRes = VBufferResolution.zw;
// TODO: reconstruction should be performed in the perceptual space (e.i., after tone mapping).
// But our VBuffer is linear. How to achieve that?
// See "A Fresh Look at Generalized Sampling", p. 51.
float4 L = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * rcpRes, w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * rcpRes, w), 0) // Top right
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * rcpRes, w), 0) // Bottom left
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * rcpRes, w), 0); // Bottom right
#endif
// TODO: add some animated noise to the reconstructed radiance.
return float4(L.rgb, Transmittance(L.a));
}
#endif // UNITY_VBUFFER_INCLUDED

9
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl.meta


fileFormatVersion: 2
guid: a9a6d43965a406e43b8b3c3851981e2e
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics.meta


fileFormatVersion: 2
guid: 1fe4fc72895e4bb4f90ff44b47e76051
folderAsset: yes
timeCreated: 1503411233
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta


fileFormatVersion: 2
guid: 8f608e240d5376341bcef2478d231457
timeCreated: 1503411233
licenseType: Pro
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources.meta


fileFormatVersion: 2
guid: 333b470add5766f44a744f476efc19a8
folderAsset: yes
timeCreated: 1503591964
licenseType: Pro
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl.meta


fileFormatVersion: 2
guid: d5c6a48928753954e8b1a84bbde280ec
timeCreated: 1504275050
licenseType: Pro
ShaderImporter:
externalObjects: {}
defaultTextures: []
userData:
assetBundleName:
assetBundleVariant:

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.meta


fileFormatVersion: 2
guid: 3090aceb9ee51fc4f9d9830cfef9684c
timeCreated: 1504273866
licenseType: Pro
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

28
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl


//
// This file was automatically generated. Please don't edit by hand.
//
#ifndef VOLUMETRICLIGHTING_CS_HLSL
#define VOLUMETRICLIGHTING_CS_HLSL
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties
// PackingRules = Exact
struct VolumeProperties
{
float3 scattering;
float extinction;
};
//
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties
//
float3 GetScattering(VolumeProperties value)
{
return value.scattering;
}
float GetExtinction(VolumeProperties value)
{
return value.extinction;
}
#endif

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute.meta


fileFormatVersion: 2
guid: 799166e2ee6a4b041bba9e74f6942097
timeCreated: 1503570390
licenseType: Pro
ComputeShaderImporter:
externalObjects: {}
currentAPIMask: 4
userData:
assetBundleName:
assetBundleVariant:

493
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute


//--------------------------------------------------------------------------------------------------
// Definitions
//--------------------------------------------------------------------------------------------------
#pragma kernel VolumetricLightingAllLights VolumetricLighting=VolumetricLightingAllLights ENABLE_REPROJECTION=0 LIGHTLOOP_SINGLE_PASS
#pragma kernel VolumetricLightingAllLightsReproj VolumetricLighting=VolumetricLightingAllLightsReproj ENABLE_REPROJECTION=1 LIGHTLOOP_SINGLE_PASS
#pragma kernel VolumetricLightingClustered VolumetricLighting=VolumetricLightingClustered ENABLE_REPROJECTION=0 LIGHTLOOP_TILE_PASS USE_CLUSTERED_LIGHTLIST
#pragma kernel VolumetricLightingClusteredReproj VolumetricLighting=VolumetricLightingClusteredReproj ENABLE_REPROJECTION=1 LIGHTLOOP_TILE_PASS USE_CLUSTERED_LIGHTLIST
#pragma enable_d3d11_debug_symbols
#define DEBUG_REPROJECTION 0
#include "../../../ShaderPass/ShaderPass.cs.hlsl"
#define SHADERPASS SHADERPASS_VOLUMETRIC_LIGHTING
#include "../../../ShaderConfig.cs.hlsl"
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET == 1)
// E.g. for 1080p: (1920/8)x(1080/8)x(128) = 4,147,200 voxels
#define VBUFFER_TILE_SIZE 8
#define VBUFFER_SLICE_COUNT 128
#else
// E.g. for 1080p: (1920/4)x(1080/4)x(256) = 33,177,600 voxels
#define VBUFFER_TILE_SIZE 4
#define VBUFFER_SLICE_COUNT 256
#endif
#define GROUP_SIZE_1D 16
#define GROUP_SIZE_2D (GROUP_SIZE_1D * GROUP_SIZE_1D)
//--------------------------------------------------------------------------------------------------
// Included headers
//--------------------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Filtering.hlsl"
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"
#include "CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl"
#include "../VolumetricLighting.cs.hlsl"
#include "../../../ShaderVariables.hlsl"
#define UNITY_MATERIAL_VOLUMETRIC // Define before including Lighting.hlsl and Material.hlsl
#include "../../../Lighting/Lighting.hlsl" // Includes Material.hlsl
#include "../../../Lighting/LightEvaluation.hlsl"
#include "../../../Lighting/VBuffer.hlsl"
//--------------------------------------------------------------------------------------------------
// Inputs & outputs
//--------------------------------------------------------------------------------------------------
RW_TEXTURE3D(float4, _VBufferLightingIntegral); // RGB = radiance, A = optical depth
RW_TEXTURE3D(float4, _VBufferLightingFeedback); // RGB = radiance, A = interval length
TEXTURE3D(_VBufferLightingHistory); // RGB = radiance, A = interval length
// TODO: avoid creating another Constant Buffer...
CBUFFER_START(UnityVolumetricLighting)
float4 _VBufferSampleOffset; // {x, y, z}, w = rendered frame count
float4x4 _VBufferCoordToViewDirWS; // Actually just 3x3, but Unity can only set 4x4
CBUFFER_END
//--------------------------------------------------------------------------------------------------
// Implementation
//--------------------------------------------------------------------------------------------------
struct Ray
{
float3 originWS;
float3 directionWS; // Normalized, stratified
float ratioLenToZ; // 1 / ViewSpaceZ
float3 centerDirWS; // Not normalized, centered
};
float3 GetPointAtDistance(Ray ray, float t)
{
return ray.originWS + t * ray.directionWS;
}
float3 GetCenterAtDistance(Ray ray, float t)
{
return ray.originWS + t * ray.centerDirWS;
}
// Computes the light integral (in-scattered radiance) within the voxel.
// Multiplication by the scattering coefficient and the phase function is performed outside.
float3 EvaluateVoxelLighting(LightLoopContext context, uint featureFlags, PositionInputs posInput,
Ray ray, float t0, float t1, float dt, float rndVal, float extinction
#ifdef LIGHTLOOP_TILE_PASS
, uint clusterIndices[2], float clusterDepths[2])
#else
)
#endif
{
float3 voxelRadiance = 0;
BakeLightingData unused; // Unused for now, so define once
if (featureFlags & LIGHTFEATUREFLAGS_DIRECTIONAL)
{
float tOffset, weight;
ImportanceSampleHomogeneousMedium(rndVal, extinction, dt, tOffset, weight);
float t = t0 + tOffset;
posInput.positionWS = GetPointAtDistance(ray, t);
for (uint i = 0; i < _DirectionalLightCount; ++i)
{
// Fetch the light.
DirectionalLightData light = _DirectionalLightDatas[i];
float3 L = -light.forward; // Lights point backwards in Unity
float3 color; float attenuation;
EvaluateLight_Directional(context, posInput, light, unused, 0, L,
color, attenuation);
// Note: the 'weight' accounts for transmittance from 't0' to 't'.
float intensity = attenuation * weight;
// Compute the amount of in-scattered radiance.
voxelRadiance += intensity * color;
}
}
#ifdef LIGHTLOOP_TILE_PASS
// Loop over 1 or 2 light clusters.
int cluster = 0;
do
{
float tMin = max(t0, ray.ratioLenToZ * clusterDepths[cluster]);
float tMax = t1;
if (cluster == 0 && (clusterIndices[0] != clusterIndices[1]))
{
tMax = min(t1, ray.ratioLenToZ * clusterDepths[1]);
}
#else
float tMin = t0;
float tMax = t1;
#endif // LIGHTLOOP_TILE_PASS
if (featureFlags & LIGHTFEATUREFLAGS_PUNCTUAL)
{
uint lightCount, lightStart;
#ifdef LIGHTLOOP_TILE_PASS
GetCountAndStartCluster(posInput.tileCoord, clusterIndices[cluster], LIGHTCATEGORY_PUNCTUAL,
lightStart, lightCount);
#else
lightCount = _PunctualLightCount;
lightStart = 0;
#endif // LIGHTLOOP_TILE_PASS
if (lightCount > 0)
{
LightData light = FetchLight(lightStart, 0);
uint i = 0, last = lightCount - 1;
// Box lights require special handling (see the next while loop).
while (i <= last && light.lightType != GPULIGHTTYPE_PROJECTOR_BOX)
{
float tEntr = tMin;
float tExit = tMax;
bool sampleLight = true;
// Perform ray-cone intersection for pyramid and spot lights.
if (light.lightType != GPULIGHTTYPE_POINT)
{
float lenMul = 1;
if (light.lightType == GPULIGHTTYPE_PROJECTOR_PYRAMID)
{
// 'light.right' and 'light.up' vectors are pre-scaled on the CPU
// s.t. if you were to place them at the distance of 1 directly in front
// of the light, they would give you the "footprint" of the light.
// For spot lights, the cone fit is exact.
// For pyramid lights, however, this is the "inscribed" cone
// (contained within the pyramid), and we want to intersect
// the "escribed" cone (which contains the pyramid).
// Therefore, we have to scale the radii by the sqrt(2).
lenMul = rsqrt(2);
}
float3 coneAxisX = lenMul * light.right;
float3 coneAxisY = lenMul * light.up;
sampleLight = IntersectRayCone(ray.originWS, ray.directionWS,
light.positionWS, light.forward,
coneAxisX, coneAxisY,
tMin, tMax, tEntr, tExit);
}
if (sampleLight)
{
// We are unable to adequately sample features larger
// than the half of the length of the integration interval
// divided by the number of temporal samples (7).
// Therefore, we apply this hack to reduce flickering.
float hackMinDistSq = Sq(dt * (0.5 / 7));
float t, distSq, rcpPdf;
ImportanceSamplePunctualLight(rndVal, light.positionWS,
ray.originWS, ray.directionWS,
tEntr, tExit, t, distSq, rcpPdf,
hackMinDistSq);
posInput.positionWS = GetPointAtDistance(ray, t);
float3 lightToSample = posInput.positionWS - light.positionWS;
float dist = sqrt(distSq);
float3 L = -lightToSample * rsqrt(distSq);
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, dist, distSq,
color, attenuation);
float intensity = attenuation * rcpPdf;
// Compute transmittance from 't0' to 't'.
intensity *= TransmittanceHomogeneousMedium(extinction, t - t0);
// Compute the amount of in-scattered radiance.
voxelRadiance += color * intensity;
}
light = FetchLight(lightStart, min(++i, last));
}
while (i <= last) // GPULIGHTTYPE_PROJECTOR_BOX
{
light = FetchLight(lightStart, min(++i, last));
light.lightType = GPULIGHTTYPE_PROJECTOR_BOX;
// Convert the box light from OBB to AABB.
// 'light.right' and 'light.up' vectors are pre-scaled on the CPU by (2/w) and (2/h).
float3x3 rotMat = float3x3(light.right, light.up, light.forward);
float3 o = mul(rotMat, ray.originWS - light.positionWS);
float3 d = mul(rotMat, ray.directionWS);
float range = light.size.x;
float3 boxPt0 = float3(-1, -1, 0);
float3 boxPt1 = float3( 1, 1, range);
float tEntr, tExit;
if (IntersectRayAABB(o, d, boxPt0, boxPt1, tMin, tMax, tEntr, tExit))
{
float tOffset, weight;
ImportanceSampleHomogeneousMedium(rndVal, extinction, tExit - tEntr, tOffset, weight);
float t = tEntr + tOffset;
posInput.positionWS = GetPointAtDistance(ray, t);
float3 L = -light.forward;
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, 1, 1,
color, attenuation);
// Note: the 'weight' accounts for transmittance from 'tEntr' to 't'.
float intensity = attenuation * weight;
// Compute transmittance from 't0' to 'tEntr'.
intensity *= TransmittanceHomogeneousMedium(extinction, tEntr - t0);
// Compute the amount of in-scattered radiance.
voxelRadiance += intensity * color;
}
}
}
}
#ifdef LIGHTLOOP_TILE_PASS
cluster++;
// Check whether the voxel is completely inside the light cluster.
} while ((cluster < 2) && (clusterIndices[0] != clusterIndices[1]));
#endif // LIGHTLOOP_TILE_PASS
return voxelRadiance;
}
// Computes the in-scattered radiance along the ray.
void FillVolumetricLightingBuffer(LightLoopContext context, uint featureFlags,
PositionInputs posInput, Ray ray)
{
float z0 = _VBufferDepthEncodingParams.x; // Start integration from the near plane
float t0 = ray.ratioLenToZ * z0;
float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices
float3 totalRadiance = 0;
float opticalDepth = 0;
uint sliceCountHack = max(VBUFFER_SLICE_COUNT, (uint)_VBufferDepthEncodingParams.x); // Prevent unrolling...
#ifdef LIGHTLOOP_TILE_PASS
// Our voxel is not necessarily completely inside a single light cluster.
// Note that Z-binning can solve this problem, as we can iterate over all Z-bins
// to compute min/max light indices, and then use this range for the entire slice.
uint clusterIndices[2];
float clusterDepths[2];
clusterIndices[0] = GetLightClusterIndex(posInput.tileCoord, z0);
clusterDepths[0] = GetLightClusterMinLinearDepth(posInput.tileCoord, clusterIndices[0]);
#endif // LIGHTLOOP_TILE_PASS
// TODO: replace 'sliceCountHack' with VBUFFER_SLICE_COUNT when the shader compiler bug is fixed.
for (uint slice = 0; slice < sliceCountHack; slice++)
{
float e1 = slice * de + de; // (slice + 1) / sliceCount
float z1 = DecodeLogarithmicDepth(e1, _VBufferDepthEncodingParams);
float t1 = ray.ratioLenToZ * z1;
float dt = t1 - t0;
#ifdef LIGHTLOOP_TILE_PASS
clusterIndices[1] = GetLightClusterIndex(posInput.tileCoord, z1);
clusterDepths[1] = GetLightClusterMinLinearDepth(posInput.tileCoord, clusterIndices[1]);
#endif
// Compute the -exact- position of the center of the voxel.
// It's important since the accumulated value of the integral is stored at the center.
// We will use it for participating media sampling and reprojection.
float tc = t0 + 0.5 * dt;
float3 centerWS = GetCenterAtDistance(ray, tc);
// Sample the participating medium at 'tc' (or 'centerWS').
// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = _GlobalFog_Scattering;
float extinction = _GlobalFog_Extinction;
#if ENABLE_REPROJECTION
// This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
// Each of them is the centroid of the interval of length 2/14.
float rndVal = _VBufferSampleOffset.z;
#else
float rndVal = 0.5;
#endif
float3 voxelRadiance = EvaluateVoxelLighting(context, featureFlags, posInput,
ray, t0, t1, dt, rndVal, extinction
#ifdef LIGHTLOOP_TILE_PASS
, clusterIndices, clusterDepths);
#else
);
#endif
#if ENABLE_REPROJECTION
// Reproject the history at 'centerWS'.
float2 reprojPosNDC = ComputeNormalizedDeviceCoordinates(centerWS, _PrevViewProjMatrix);
float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w;
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler),
false, reprojPosNDC, reprojZ,
_VBufferScaleAndSliceCount,
_VBufferDepthEncodingParams);
// Compute the exponential moving average over 'n' frames:
// X = (1 - a) * ValueAtFrame[n] + a * AverageOverPreviousFrames.
// We want each sample to be uniformly weighted by (1 / n):
// X = (1 / n) * Sum{i from 1 to n}{ValueAtFrame[i]}.
// Therefore, we get:
// (1 - a) = (1 / n) => a = (1 - 1 / n) = (n - 1) / n,
// X = (1 / n) * ValueAtFrame[n] + (1 - 1 / n) * AverageOverPreviousFrames.
// Why does it work? We need to make the following assumption:
// AverageOverPreviousFrames ≈ AverageOverFrames[n - 1].
// AverageOverFrames[n - 1] = (1 / (n - 1)) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}.
// This implies that the reprojected (accumulated) value has mostly converged.
// X = (1 / n) * ValueAtFrame[n] + ((n - 1) / n) * (1 / (n - 1)) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}.
// X = (1 / n) * ValueAtFrame[n] + (1 / n) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}.
// X = Sum{i from 1 to n}{ValueAtFrame[i] / n}.
float numFrames = 7;
float frameWeight = 1 / numFrames;
float historyWeight = 1 - frameWeight;
// The accuracy of the integral linearly decreases with the length of the interval.
// Therefore, reprojecting longer intervals should result in a lower confidence.
// TODO: doesn't seem to be worth it, removed for now.
// Perform temporal blending.
// Both radiance values are obtained by integrating over line segments of different length.
// Blending only makes sense if the length of both intervals is the same.
// Therefore, the reprojected radiance needs to be rescaled by (frame_dt / reproj_dt).
bool reprojSuccess = reprojValue.a != 0;
float blendFactor = reprojSuccess ? historyWeight : 0;
float reprojRcpLen = reprojSuccess ? rcp(reprojValue.a) : 0;
float lengthScale = dt * reprojRcpLen;
float3 reprojRadiance = reprojValue.rgb;
float3 blendedRadiance = (1 - blendFactor) * voxelRadiance + blendFactor * lengthScale * reprojRadiance;
// Store the feedback for the voxel.
// TODO: dynamic lights (which update their position, rotation, cookie or shadow at runtime)
// do not support reprojection and should neither read nor write to the history buffer.
// to the history buffer. This will cause them to alias, but it is the only way
// to prevent ghosting.
_VBufferLightingFeedback[uint3(posInput.positionSS, slice)] = float4(blendedRadiance, dt);
#else
float3 blendedRadiance = voxelRadiance;
#endif
#if DEBUG_REPROJECTION
if (distance(voxelRadiance, reprojValue.rgb) > 0.1) blendedRadiance = float3(1000, 0, 0);
#endif
// Compute the transmittance from the camera to 't0'.
float transmittance = Transmittance(opticalDepth);
// Integral{a, b}{Transmittance(0, t) * L_s(t) dt} = Transmittance(0, a) * Integral{a, b}{Transmittance(0, t - a) * L_s(t) dt}.
totalRadiance += (transmittance * IsotropicPhaseFunction()) * scattering * blendedRadiance;
// Compute the optical depth up to the center of the interval.
opticalDepth += 0.5 * extinction * dt;
// Store the voxel data.
_VBufferLightingIntegral[uint3(posInput.positionSS, slice)] = float4(totalRadiance, opticalDepth);
// Compute the optical depth up to the end of the interval.
opticalDepth += 0.5 * extinction * dt;
t0 = t1;
#ifdef LIGHTLOOP_TILE_PASS
clusterIndices[0] = clusterIndices[1];
clusterDepths[0] = clusterDepths[1];
#endif // LIGHTLOOP_TILE_PASS
}
}
[numthreads(GROUP_SIZE_2D, 1, 1)]
void VolumetricLighting(uint2 groupId : SV_GroupID,
uint groupThreadId : SV_GroupThreadID)
{
// Perform compile-time checks.
if (!IsPower2(VBUFFER_TILE_SIZE) || !IsPower2(TILE_SIZE_CLUSTERED)) return;
// Note: any factor of 64 is a suitable wave size for our algorithm.
uint waveIndex = WaveReadFirstLane(groupThreadId / 64);
uint laneIndex = groupThreadId % 64;
uint quadIndex = laneIndex / 4;
// Arrange threads in the Morton order to optimally match the memory layout of GCN tiles.
uint2 groupCoord = DecodeMorton2D(groupThreadId);
uint2 groupOffset = groupId * GROUP_SIZE_1D;
uint2 voxelCoord = groupOffset + groupCoord;
uint2 tileCoord = voxelCoord * VBUFFER_TILE_SIZE / TILE_SIZE_CLUSTERED;
uint voxelsPerClusterTile = Sq((uint)(TILE_SIZE_CLUSTERED / VBUFFER_TILE_SIZE));
if (voxelsPerClusterTile >= 64)
{
// TODO: this is a compile-time test, make sure the compiler actually scalarizes.
tileCoord = WaveReadFirstLane(tileCoord);
}
[branch] if (voxelCoord.x >= (uint)_VBufferResolution.x ||
voxelCoord.y >= (uint)_VBufferResolution.y)
{
return;
}
float2 centerCoord = voxelCoord + 0.5;
#if ENABLE_REPROJECTION
float2 sampleCoord = centerCoord + _VBufferSampleOffset.xy;
#else
float2 sampleCoord = centerCoord;
#endif
// Compute the (stratified) ray direction s.t. its ViewSpaceZ = 1.
float3 rayDir = mul(-float3(sampleCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
float lenSq = dot(rayDir, rayDir);
float lenRcp = rsqrt(lenSq);
float len = lenSq * lenRcp;
#if ENABLE_REPROJECTION
// Compute the ray direction which passes through the center of the voxel s.t. its ViewSpaceZ = 1.
float3 rayCenterDir = mul(-float3(centerCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
#else
float3 rayCenterDir = rayDir;
#endif
Ray ray;
ray.originWS = GetCurrentViewPosition();
ray.ratioLenToZ = len;
ray.directionWS = rayDir * lenRcp;
ray.centerDirWS = rayCenterDir * lenRcp;
// TODO
LightLoopContext context;
context.shadowContext = InitShadowContext();
uint featureFlags = 0xFFFFFFFF;
PositionInputs posInput = GetPositionInput(voxelCoord, _VBufferResolution.zw, tileCoord);
FillVolumetricLightingBuffer(context, featureFlags, posInput, ray);
}

38
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("RenderPipeline/High Definition/Homogenous Fog", -1)]
public class HomogeneousFog : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
volumeParameters.Constrain();
}
void OnDrawGizmos()
{
if (volumeParameters != null && !volumeParameters.IsVolumeUnbounded())
{
Gizmos.DrawWireCube(volumeParameters.bounds.center, volumeParameters.bounds.size);
}
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

499
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


using System;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[GenerateHLSL]
public struct VolumeProperties
{
public Vector3 scattering; // [0, 1], prefer sRGB
public float extinction; // [0, 1], prefer sRGB
public static VolumeProperties GetNeutralVolumeProperties()
{
VolumeProperties properties = new VolumeProperties();
properties.scattering = Vector3.zero;
properties.extinction = 0;
return properties;
}
} // struct VolumeProperties
[Serializable]
public class VolumeParameters
{
public Bounds bounds; // Position and dimensions in meters
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public VolumeParameters()
{
bounds = new Bounds(Vector3.zero, Vector3.positiveInfinity);
albedo = new Color(0.5f, 0.5f, 0.5f);
meanFreePath = 10.0f;
}
public bool IsVolumeUnbounded()
{
return bounds.size.x == float.PositiveInfinity &&
bounds.size.y == float.PositiveInfinity &&
bounds.size.z == float.PositiveInfinity;
}
public Vector3 GetAbsorptionCoefficient()
{
float extinction = GetExtinctionCoefficient();
Vector3 scattering = GetScatteringCoefficient();
return Vector3.Max(new Vector3(extinction, extinction, extinction) - scattering, Vector3.zero);
}
public Vector3 GetScatteringCoefficient()
{
float extinction = GetExtinctionCoefficient();
return new Vector3(albedo.r * extinction, albedo.g * extinction, albedo.b * extinction);
}
public float GetExtinctionCoefficient()
{
return 1.0f / meanFreePath;
}
public void Constrain()
{
bounds.size = Vector3.Max(bounds.size, Vector3.zero);
albedo.r = Mathf.Clamp01(albedo.r);
albedo.g = Mathf.Clamp01(albedo.g);
albedo.b = Mathf.Clamp01(albedo.b);
meanFreePath = Mathf.Max(meanFreePath, 1.0f);
}
public VolumeProperties GetProperties()
{
VolumeProperties properties = new VolumeProperties();
properties.scattering = GetScatteringCoefficient();
properties.extinction = GetExtinctionCoefficient();
return properties;
}
} // class VolumeParameters
public partial class HDRenderPipeline : RenderPipeline
{
public enum VolumetricLightingPreset
{
Off,
Normal,
Ultra,
Count
};
VolumetricLightingPreset m_VolumetricLightingPreset
{ get { return (VolumetricLightingPreset)Math.Min(ShaderConfig.s_VolumetricLightingPreset, (int)VolumetricLightingPreset.Count); } }
ComputeShader m_VolumetricLightingCS { get { return m_Asset.renderPipelineResources.volumetricLightingCS; } }
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const int k_VBufferCount = 3; // 0 and 1 - history (prev) and feedback (next), 2 - integral (curr)
RenderTexture[] m_VBufferLighting = null;
RenderTargetIdentifier[] m_VBufferLightingRT = null;
int m_ViewCount = 0;
int[] m_ViewIdArray = new int[8];
int ViewOffsetFromViewId(int viewId)
{
int viewOffset = -1;
Debug.Assert(m_ViewCount == 0 || m_ViewIdArray != null);
for (int i = 0; i < m_ViewCount; i++)
{
if (m_ViewIdArray[i] == viewId)
{
viewOffset = i;
}
}
return viewOffset;
}
public static int ComputeVBufferTileSize(VolumetricLightingPreset preset)
{
switch (preset)
{
case VolumetricLightingPreset.Normal:
return 8;
case VolumetricLightingPreset.Ultra:
return 4;
case VolumetricLightingPreset.Off:
return 0;
default:
Debug.Assert(false, "Encountered an unexpected VolumetricLightingPreset.");
return 0;
}
}
public static int ComputeVBufferSliceCount(VolumetricLightingPreset preset)
{
switch (preset)
{
case VolumetricLightingPreset.Normal:
return 128;
case VolumetricLightingPreset.Ultra:
return 256;
case VolumetricLightingPreset.Off:
return 0;
default:
Debug.Assert(false, "Encountered an unexpected VolumetricLightingPreset.");
return 0;
}
}
// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
// the VBuffer can potentially extend past the boundaries of the viewport.
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
Vector2 ComputeVBufferResolutionAndScale(float screenWidth, float screenHeight,
ref int w, ref int h, ref int d)
{
int t = ComputeVBufferTileSize(m_VolumetricLightingPreset);
// Ceil(ScreenSize / TileSize).
w = ((int)screenWidth + t - 1) / t;
h = ((int)screenHeight + t - 1) / t;
d = ComputeVBufferSliceCount(m_VolumetricLightingPreset);
return new Vector2(screenWidth / (w * t), screenHeight / (h * t));
}
void ResizeVBuffer(int viewId, int screenWidth, int screenHeight)
{
int viewOffset = ViewOffsetFromViewId(viewId);
if (viewOffset >= 0)
{
// Found, check resolution.
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(screenWidth, screenHeight, ref w, ref h, ref d);
Debug.Assert(m_VBufferLighting != null);
Debug.Assert(m_VBufferLighting.Length >= (viewOffset + 1) * k_VBufferCount);
Debug.Assert(m_VBufferLighting[viewOffset * k_VBufferCount] != null);
if (w == m_VBufferLighting[viewOffset * k_VBufferCount].width &&
h == m_VBufferLighting[viewOffset * k_VBufferCount].height &&
d == m_VBufferLighting[viewOffset * k_VBufferCount].volumeDepth)
{
// Everything matches, nothing to do here.
return;
}
}
// Otherwise, we have to recreate the VBuffer.
CreateVBuffer(viewId, screenWidth, screenHeight);
}
void CreateVBuffer(int viewId, int screenWidth, int screenHeight)
{
// Clean up first.
DestroyVBuffer(viewId);
int viewOffset = ViewOffsetFromViewId(viewId);
if (viewOffset < 0)
{
// Not found. Push back.
viewOffset = m_ViewCount++;
Debug.Assert(viewOffset < 8);
m_ViewIdArray[viewOffset] = viewId;
if (m_VBufferLighting == null)
{
// Lazy initialize.
m_VBufferLighting = new RenderTexture[k_VBufferCount];
m_VBufferLightingRT = new RenderTargetIdentifier[k_VBufferCount];
}
else if (m_VBufferLighting.Length < m_ViewCount * k_VBufferCount)
{
// Grow by reallocation and copy.
RenderTexture[] newArray = new RenderTexture[m_ViewCount * k_VBufferCount];
RenderTargetIdentifier[] newArrayRT = new RenderTargetIdentifier[m_ViewCount * k_VBufferCount];
for (int i = 0, n = m_VBufferLighting.Length; i < n; i++)
{
newArray[i] = m_VBufferLighting[i];
newArrayRT[i] = m_VBufferLightingRT[i];
}
// Reassign and release memory.
m_VBufferLighting = newArray;
m_VBufferLightingRT = newArrayRT;
}
}
Debug.Assert(m_VBufferLighting != null);
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(screenWidth, screenHeight, ref w, ref h, ref d);
for (int i = viewOffset * k_VBufferCount,
n = viewOffset * k_VBufferCount + k_VBufferCount; i < n; i++)
{
m_VBufferLighting[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_VBufferLighting[i].filterMode = FilterMode.Trilinear; // Custom
m_VBufferLighting[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout
m_VBufferLighting[i].volumeDepth = d;
m_VBufferLighting[i].enableRandomWrite = true;
m_VBufferLighting[i].Create();
m_VBufferLightingRT[i] = new RenderTargetIdentifier(m_VBufferLighting[i]);
}
}
void DestroyVBuffer(int viewId)
{
int viewOffset = ViewOffsetFromViewId(viewId);
if (viewOffset < 0)
{
// Not found.
return;
}
int lastOffset = m_ViewCount - 1;
Debug.Assert(lastOffset >= 0);
if (m_VBufferLighting != null)
{
Debug.Assert(m_VBufferLighting.Length >= m_ViewCount * k_VBufferCount);
for (int i = 0; i < k_VBufferCount; i++)
{
int viewBuffer = viewOffset * k_VBufferCount + i;
int lastBuffer = lastOffset * k_VBufferCount + i;
// Release the memory.
if (m_VBufferLighting[viewBuffer] != null)
{
m_VBufferLighting[viewBuffer].Release();
}
// Swap with the last element.
m_VBufferLighting[viewBuffer] = m_VBufferLighting[lastBuffer];
m_VBufferLightingRT[viewBuffer] = m_VBufferLightingRT[lastBuffer];
}
}
// Swap with the last element and shrink the array.
m_ViewIdArray[viewOffset] = m_ViewIdArray[lastOffset];
m_ViewCount--;
}
// Uses a logarithmic depth encoding.
// Near plane: depth = 0; far plane: depth = 1.
// x = n, y = log2(f/n), z = 1/n, w = 1/log2(f/n).
public static Vector4 ComputeLogarithmicDepthEncodingParams(float nearPlane, float farPlane)
{
Vector4 depthParams = new Vector4();
float n = nearPlane;
float f = farPlane;
depthParams.x = n;
depthParams.y = Mathf.Log(f / n, 2);
depthParams.z = 1.0f / depthParams.x;
depthParams.w = 1.0f / depthParams.y;
return depthParams;
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousFog GetGlobalFogComponent()
{
HomogeneousFog globalFogComponent = null;
HomogeneousFog[] fogComponents = Object.FindObjectsOfType(typeof(HomogeneousFog)) as HomogeneousFog[];
foreach (HomogeneousFog fogComponent in fogComponents)
{
if (fogComponent.enabled && fogComponent.volumeParameters.IsVolumeUnbounded())
{
globalFogComponent = fogComponent;
break;
}
}
return globalFogComponent;
}
RenderTargetIdentifier GetVBufferLightingHistory(int viewOffset) // From the previous frame
{
return m_VBufferLightingRT[viewOffset * k_VBufferCount + ((Time.renderedFrameCount + 0) & 1)]; // Does not work in the Scene view
}
RenderTargetIdentifier GetVBufferLightingFeedback(int viewOffset) // For the next frame
{
return m_VBufferLightingRT[viewOffset * k_VBufferCount + ((Time.renderedFrameCount + 1) & 1)]; // Does not work in the Scene view
}
RenderTargetIdentifier GetVBufferLightingIntegral(int viewOffset) // Of the current frame
{
return m_VBufferLightingRT[viewOffset * k_VBufferCount + 2];
}
public void SetVolumetricLightingData(HDCamera camera, CommandBuffer cmd)
{
HomogeneousFog globalFogComponent = GetGlobalFogComponent();
// TODO: may want to cache these results somewhere.
VolumeProperties globalFogProperties = (globalFogComponent != null) ? globalFogComponent.volumeParameters.GetProperties()
: VolumeProperties.GetNeutralVolumeProperties();
cmd.SetGlobalVector(HDShaderIDs._GlobalFog_Scattering, globalFogProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Extinction, globalFogProperties.extinction);
int w = 0, h = 0, d = 0;
Vector2 scale = ComputeVBufferResolutionAndScale(camera.screenSize.x, camera.screenSize.y, ref w, ref h, ref d);
int viewId = camera.camera.GetInstanceID();
int viewOffset = ViewOffsetFromViewId(viewId);
Debug.Assert(viewOffset >= 0 && viewOffset < 8);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferScaleAndSliceCount, new Vector4(scale.x, scale.y, d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane));
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, GetVBufferLightingIntegral(viewOffset));
}
// Ref: https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres
// The returned {x, y} coordinates (and all spheres) are all within the (-0.5, 0.5)^2 range.
// The pattern has been rotated by 15 degrees to maximize the resolution along X and Y:
// https://www.desmos.com/calculator/kcpfvltz7c
Vector2[] GetHexagonalClosePackedSpheres7()
{
Vector2[] coords = new Vector2[7];
float r = 0.17054068870105443882f;
float d = 2 * r;
float s = r * Mathf.Sqrt(3);
// Try to keep the weighted average as close to the center (0.5) as possible.
// (7)(5) ( )( ) ( )( ) ( )( ) ( )( ) ( )(o) ( )(x) (o)(x) (x)(x)
// (2)(1)(3) ( )(o)( ) (o)(x)( ) (x)(x)(o) (x)(x)(x) (x)(x)(x) (x)(x)(x) (x)(x)(x) (x)(x)(x)
// (4)(6) ( )( ) ( )( ) ( )( ) (o)( ) (x)( ) (x)(o) (x)(x) (x)(x)
coords[0] = new Vector2( 0, 0);
coords[1] = new Vector2(-d, 0);
coords[2] = new Vector2( d, 0);
coords[3] = new Vector2(-r, -s);
coords[4] = new Vector2( r, s);
coords[5] = new Vector2( r, -s);
coords[6] = new Vector2(-r, s);
// Rotate the sampling pattern by 15 degrees.
const float cos15 = 0.96592582628906828675f;
const float sin15 = 0.25881904510252076235f;
for (int i = 0; i < 7; i++)
{
Vector2 coord = coords[i];
coords[i].x = coord.x * cos15 - coord.y * sin15;
coords[i].y = coord.x * sin15 + coord.y * cos15;
}
return coords;
}
void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd)
{
if (m_VolumetricLightingPreset == VolumetricLightingPreset.Off) return;
using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
int viewId = camera.camera.GetInstanceID(); // Warning: different views can use the same camera
int viewOffset = ViewOffsetFromViewId(viewId);
Debug.Assert(viewOffset >= 0 && viewOffset < 8);
if (GetGlobalFogComponent() == null)
{
// Clear the render target instead of running the shader.
// CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// return;
// Clearing 3D textures does not seem to work!
// Use the workaround by running the full shader with no volume.
}
bool enableClustered = m_FrameSettings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;
int kernel;
if (enableReprojection)
{
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj"
: "VolumetricLightingAllLightsReproj");
}
else
{
kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered"
: "VolumetricLightingAllLights");
}
int w = 0, h = 0, d = 0;
Vector2 scale = ComputeVBufferResolutionAndScale(camera.screenSize.x, camera.screenSize.y, ref w, ref h, ref d);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
// Compose the matrix which allows us to compute the world space view direction.
// Compute it using the scaled resolution to account for the visible area of the VBuffer.
Vector4 scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y));
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false);
camera.SetupComputeShader(m_VolumetricLightingCS, cmd);
Vector2[] xySeq = GetHexagonalClosePackedSpheres7();
// This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
// Each of them is the centroid of the interval of length 2/14.
// They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
// That way, the running average position is close to 0.5.
// | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
// | | | | o | | | |
// | | o | | x | | | |
// | | x | | x | | o | |
// | | x | o | x | | x | |
// | | x | x | x | o | x | |
// | o | x | x | x | x | x | |
// | x | x | x | x | x | x | o |
// | x | x | x | x | x | x | x |
float[] zSeq = {7.0f/14.0f, 3.0f/14.0f, 11.0f/14.0f, 5.0f/14.0f, 9.0f/14.0f, 1.0f/14.0f, 13.0f/14.0f};
int rfc = Time.renderedFrameCount;
int sampleIndex = rfc % 7;
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
// TODO: set 'm_VolumetricLightingPreset'.
cmd.SetComputeVectorParam( m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset);
cmd.SetComputeMatrixParam( m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, GetVBufferLightingHistory(viewOffset)); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, GetVBufferLightingFeedback(viewOffset)); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, GetVBufferLightingIntegral(viewOffset)); // Write
// The shader defines GROUP_SIZE_1D = 16.
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1);
}
}
} // class HDRenderPipeline
} // namespace UnityEngine.Experimental.Rendering.HDPipeline
正在加载...
取消
保存