浏览代码

Merge remote-tracking branch 'mainrepo/master' into stacklit

/main
Stephane Laroche 7 年前
当前提交
8cf60948
共有 24 个文件被更改,包括 548 次插入333 次删除
  1. 7
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl
  2. 7
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl
  3. 22
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Sampling/Sampling.hlsl
  4. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl
  5. 11
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl
  6. 21
      ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeComponent.cs
  7. 16
      ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeManager.cs
  8. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/ExponentialFogEditor.cs
  9. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  10. 37
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  11. 187
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  12. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumeVoxelization.compute
  13. 160
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute
  14. 167
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  15. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute
  16. 27
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs
  17. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs.hlsl
  18. 59
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
  19. 1
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/OpaqueAtmosphericScattering.shader
  20. 14
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs
  21. 34
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs
  22. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs.meta
  23. 67
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs
  24. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs.meta

7
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl


// Intersection functions
//-----------------------------------------------------------------------------
// This implementation does not attempt to explicitly handle NaNs.
// Ref: https://tavianator.com/fast-branchless-raybounding-box-intersections-part-2-nans/
float3 rayDirInv = rcp(rayDirection); // Could be precomputed
// Could be precomputed. Clamp to avoid INF. clamp() is a single ALU on GCN.
// rcp(FLT_EPS) = 16,777,216, which is large enough for our purposes,
// yet doesn't cause a lot of numerical issues associated with FLT_MAX.
float3 rayDirInv = clamp(rcp(rayDirection), -rcp(FLT_EPS), rcp(FLT_EPS));
// Perform ray-slab intersection (component-wise).
float3 t0 = boxMin * rayDirInv - (rayOrigin * rayDirInv);

7
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/ImageBasedLighting.hlsl


out real NdotL,
out real weightOverPdf)
{
#if 0
#else
real3 N = localToWorld[2];
L = SampleHemisphereCosine(u.x, u.y, N);
NdotL = saturate(dot(N, L));
#endif
// Importance sampling weight for each sample
// pdf = N.L / PI

22
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Sampling/Sampling.hlsl


return r * real2(cosPhi, sinPhi);
}
real3 SampleSphereUniform(real u1, real u2)
{
real phi = TWO_PI * u2;
real cosTheta = 1.0 - 2.0 * u1;
return SphericalToCartesian(phi, cosTheta);
}
// Performs cosine-weighted sampling of the hemisphere.
// Ref: PBRT v3, p. 780.
real3 SampleHemisphereCosine(real u1, real u2)

return localL;
}
real3 SampleHemisphereUniform(real u1, real u2)
// Cosine-weighted sampling without the tangent frame.
// Ref: http://www.amietia.com/lambertnotangent.html
real3 SampleHemisphereCosine(real u1, real u2, real3 normal)
real phi = TWO_PI * u2;
real cosTheta = 1.0 - u1;
return SphericalToCartesian(phi, cosTheta);
real3 pointOnSphere = SampleSphereUniform(u1, u2);
return normalize(normal + pointOnSphere);
real3 SampleSphereUniform(real u1, real u2)
real3 SampleHemisphereUniform(real u1, real u2)
real cosTheta = 1.0 - 2.0 * u1;
real cosTheta = 1.0 - u1;
return SphericalToCartesian(phi, cosTheta);
}

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl


return x;
}
// "Insert" two 0 bits after each of the 10 low bits of x/
// "Insert" two 0 bits after each of the 10 low bits of x.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Part1By2(uint x)
{

return x;
}
// Inverse of Part1By1 - "delete" all odd-indexed bits/
// Inverse of Part1By1 - "delete" all odd-indexed bits.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Compact1By1(uint x)
{

return x;
}
// Inverse of Part1By2 - "delete" all bits not at positions divisible by 3/
// Inverse of Part1By2 - "delete" all bits not at positions divisible by 3.
// Ref: https://fgiesen.wordpress.com/2009/12/13/decoding-morton-codes/
uint Compact1By2(uint x)
{

11
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl


// Samples the interval of homogeneous participating medium using the closed-form tracking approach
// (proportionally to the transmittance).
// Returns the offset from the start of the interval and the weight = (transmittance / pdf).
// Ref: Production Volume Rendering, 3.6.1.
// Ref: Monte Carlo Methods for Volumetric Light Transport Simulation, p. 5.
// pdf = extinction * exp(-extinction * t) / (1 - exp(-intervalLength * extinction))
// pdf = extinction * exp(extinction * (intervalLength - t)) / (exp(intervalLength * extinction - 1)
// weight = (1 - exp(-extinction * intervalLength)) / extinction;
// weight = (1 - exp(-extinction * intervalLength)) / extinction
real c = rcp(extinction);
weight = x * rcp(extinction);
offset = -log(1 - rndVal * x) * rcp(extinction);
weight = x * c;
offset = -log(1 - rndVal * x) * c;
}
// Implements equiangular light sampling.

21
ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeComponent.cs


parameter.OnDisable();
}
// You can override this to do your own blending. Either loop through the `parameters` list
// or reference direct fields (you'll need to cast `state` to your custom type and don't
// forget to use `SetValue` on parameters, do not assign directly to the state object - and
// of course you'll need to check for the `overrideState` manually).
public virtual void Override(VolumeComponent state, float interpFactor)
{
int count = parameters.Count;
for (int i = 0; i < count; i++)
{
var stateParam = state.parameters[i];
var toParam = parameters[i];
// Keep track of the override state for debugging purpose
stateParam.overrideState = toParam.overrideState;
if (toParam.overrideState)
stateParam.Interp(stateParam, toParam, interpFactor);
}
}
public void SetAllOverridesTo(bool state)
{
SetAllOverridesTo(parameters, state);

16
ScriptableRenderPipeline/Core/CoreRP/Volume/VolumeManager.cs


if (!component.active)
continue;
var target = stack.GetComponent(component.GetType());
int count = component.parameters.Count;
for (int i = 0; i < count; i++)
{
var fromParam = target.parameters[i];
var toParam = component.parameters[i];
// Keep track of the override state for debugging purpose
fromParam.overrideState = toParam.overrideState;
if (toParam.overrideState)
fromParam.Interp(fromParam, toParam, interpFactor);
}
var state = stack.GetComponent(component.GetType());
component.Override(state, interpFactor);
}
}

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/ExponentialFogEditor.cs


using System.Collections;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;

PropertyField(m_FogHeightAttenuation);
}
}
}
}

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Source4 = Shader.PropertyToID("_Source4");
public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");

public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeProperties = Shader.PropertyToID("_VolumeProperties");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
}
}

37
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


[AddComponentMenu("Rendering/Homogeneous Density Volume", 1100)]
public class HomogeneousDensityVolume : MonoBehaviour
{
public DensityVolumeParameters parameters = new DensityVolumeParameters();
public DensityVolumeParameters parameters;
public HomogeneousDensityVolume()
{
parameters.albedo = new Color(0.5f, 0.5f, 0.5f);
parameters.meanFreePath = 10.0f;
parameters.asymmetry = 0.0f;
}
private void Awake()
{

void OnDrawGizmos()
{
if (parameters.IsLocalVolume())
{
Gizmos.color = parameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousDensityVolume GetGlobalHomogeneousDensityVolume()
{
HomogeneousDensityVolume globalVolume = null;
HomogeneousDensityVolume[] volumes = FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
if (volume.enabled && !volume.parameters.IsLocalVolume())
{
globalVolume = volume;
break;
}
}
return globalVolume;
Gizmos.color = parameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

187
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl


// Interpolation in the log space is non-linear.
// Therefore, given 'logEncodedDepth', we compute a new depth value
// which allows us to perform HW interpolation which is linear in the view space.
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth,
float ComputeLerpPositionForLogEncoding(float linearDepth,
float logEncodedDepth,
float2 VBufferSliceCount,
float4 VBufferDepthDecodingParams)
{

float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float s0 = floor(d * numSlices - 0.5);
float s1 = ceil(d * numSlices - 0.5);
float s = d * numSlices - 0.5;
float s0 = floor(s);
float s1 = ceil(s);
float d0 = saturate(s0 * rcpNumSlices + (0.5 * rcpNumSlices));
float d1 = saturate(s1 * rcpNumSlices + (0.5 * rcpNumSlices));
float z0 = DecodeLogarithmicDepthGeneralized(d0, VBufferDepthDecodingParams);

return d0 + t * rcpNumSlices;
}
// Performs trilinear reconstruction of the V-Buffer.
// If (clampToEdge == false), out-of-bounds loads return 0.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge,
float2 positionNDC, float linearDepth,
// if (correctLinearInterpolation), we use ComputeLerpPositionForLogEncoding() to correct weighting
// of both slices at the cost of extra ALUs.
//
// if (quadraticFilterXY), we perform biquadratic (3x3) reconstruction for each slice to reduce
// aliasing at the cost of extra ALUs and bandwidth.
// Warning: you MUST pass a linear sampler in order for the quadratic filter to work.
//
// Note: for correct filtering, the data has to be stored in the perceptual space.
// This means storing tone mapped radiance and transmittance instead of optical depth.
// See "A Fresh Look at Generalized Sampling", p. 51.
//
// if (clampToBorder), samples outside of the buffer return 0 (we perform a smooth fade).
// Otherwise, the sampler simply clamps the texture coordinate to the edge of the texture.
// Warning: clamping to border may not work as expected with the quadratic filter due to its extent.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBuffer, clampSampler),
float2 positionNDC,
float linearDepth,
float4 VBufferResolution,
float4 VBufferDepthDecodingParams)
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY,
bool clampToBorder)
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float w;
// Unity doesn't support samplers clamping to border, so we have to do it ourselves.
// TODO: add the proper sampler support.
bool isInBounds = Min3(uv.x, uv.y, d) > 0 && Max3(uv.x, uv.y, d) < 1;
UNITY_BRANCH if (clampToEdge || isInBounds)
if (correctLinearInterpolation)
#if 1
// Adjust the texture coordinate for HW linear filtering.
w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
}
else
{
float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
w = d;
}
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0);
float fadeWeight = 1;
if (clampToBorder)
{
// Compute the distance to the edge, and remap it to the [0, 1] range.
// TODO: add support for the HW border clamp sampler.
float weightU = saturate((1 - 2 * abs(uv.x - 0.5)) * VBufferResolution.x);
float weightV = saturate((1 - 2 * abs(uv.y - 0.5)) * VBufferResolution.y);
float weightW = saturate((1 - 2 * abs(w - 0.5)) * VBufferSliceCount.x);
fadeWeight = weightU * weightV * weightW;
else
float4 result = 0;
if (fadeWeight > 0)
return 0;
}
}
if (quadraticFilterXY)
{
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);
// Returns interpolated {volumetric radiance, transmittance}. The sampler clamps to edge.
float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler),
float2 positionNDC, float linearDepth,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams)
{
#ifdef RECONSTRUCTION_FILTER_TRILINEAR
float4 L = SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, trilinearSampler), true,
positionNDC, linearDepth,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams);
#else // Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
float2 uv = positionNDC;
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);
float2 weights[2], offsets[2];
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
// The distance between slices is log-encoded.
float z = linearDepth;
float d = EncodeLogarithmicDepthGeneralized(z, VBufferDepthEncodingParams);
result = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * VBufferResolution.zw, w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * VBufferResolution.zw, w), 0) // Top right
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * VBufferResolution.zw, w), 0) // Bottom left
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * VBufferResolution.zw, w), 0); // Bottom right
}
else
{
result = SAMPLE_TEXTURE3D_LOD(VBuffer, clampSampler, float3(uv, w), 0);
}
#if 0
// Ignore non-linearity (for performance reasons) at the cost of accuracy.
// The results are exact for a stationary camera, but can potentially cause some judder in motion.
float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
result *= fadeWeight;
}
float2 weights[2], offsets[2];
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5
return result;
}
float2 rcpRes = VBufferResolution.zw;
float4 SampleVBuffer(TEXTURE3D_ARGS(VBuffer, clampSampler),
float3 positionWS,
float4x4 viewProjMatrix,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY,
bool clampToBorder)
{
float2 positionNDC = ComputeNormalizedDeviceCoordinates(positionWS, viewProjMatrix);
float linearDepth = mul(viewProjMatrix, float4(positionWS, 1)).w;
// TODO: reconstruction should be performed in the perceptual space (e.i., after tone mapping).
// But our VBuffer is linear. How to achieve that?
// See "A Fresh Look at Generalized Sampling", p. 51.
float4 L = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * rcpRes, w), 0) // Top left
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * rcpRes, w), 0) // Top right
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * rcpRes, w), 0) // Bottom left
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * rcpRes, w), 0); // Bottom right
#endif
return SampleVBuffer(TEXTURE3D_PARAM(VBuffer, clampSampler),
positionNDC,
linearDepth,
VBufferResolution,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams,
correctLinearInterpolation,
quadraticFilterXY,
clampToBorder);
}
// TODO: add some animated noise to the reconstructed radiance.
return float4(L.rgb, Transmittance(L.a));
// Returns interpolated {volumetric radiance, transmittance}.
float4 SampleVolumetricLighting(TEXTURE3D_ARGS(VBufferLighting, clampSampler),
float2 positionNDC,
float linearDepth,
float4 VBufferResolution,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams,
bool correctLinearInterpolation,
bool quadraticFilterXY)
{
// TODO: add some slowly animated noise to the reconstructed value.
return FastTonemapInvert(SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, clampSampler),
positionNDC,
linearDepth,
VBufferResolution,
VBufferSliceCount,
VBufferDepthEncodingParams,
VBufferDepthDecodingParams,
correctLinearInterpolation,
quadraticFilterXY,
false));
}
#endif // UNITY_VBUFFER_INCLUDED

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumeVoxelization.compute


//--------------------------------------------------------------------------------------------------
StructuredBuffer<OrientedBBox> _VolumeBounds;
StructuredBuffer<DensityVolumeProperties> _VolumeProperties;
StructuredBuffer<DensityVolumeProperties> _VolumeData;
RW_TEXTURE3D(float4, _VBufferDensity); // RGB = sqrt(scattering), A = sqrt(extinction)

if (overlapFraction > 0)
{
// There is an overlap. Sample the 3D texture, or load the constant value.
voxelScattering += overlapFraction * _VolumeProperties[volumeIndex].scattering;
voxelExtinction += overlapFraction * _VolumeProperties[volumeIndex].extinction;
voxelScattering += overlapFraction * _VolumeData[volumeIndex].scattering;
voxelExtinction += overlapFraction * _VolumeData[volumeIndex].extinction;
}
#ifndef USE_CLUSTERED_LIGHTLIST

160
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute


#define VBUFFER_SLICE_COUNT 128
#endif
#define SUPPORT_ASYMMETRY 1 // Support asymmetric phase functions
#define SUPPORT_PUNCTUAL_LIGHTS 1 // Punctual lights contribute to fog lighting
#define GROUP_SIZE_1D 8

//--------------------------------------------------------------------------------------------------
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Color.hlsl"
#include "CoreRP/ShaderLibrary/Filtering.hlsl"
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl"

// Implementation
//--------------------------------------------------------------------------------------------------
// A ray with a single origin and two directions:
// one pointing at the center of the voxel, and one jittered in screen space.
float3 strataDirWS; // Normalized, tile-stratified
float3 centerDirWS; // Normalized, tile-centered
float strataDirInvViewZ; // 1 / ViewSpace(strataDirWS).z
float twoDirRatioViewZ; // ViewSpace(strataDirWS).z / ViewSpace(centerDirWS).z
float3 jitterDirWS; // Normalized, voxel-jittered in the screen space
float3 centerDirWS; // Normalized, voxel-centered in the screen space
float jitterDirInvViewZ; // 1 / ViewSpace(jitterDirWS).z
float twoDirRatioViewZ; // ViewSpace(jitterDirWS).z / ViewSpace(centerDirWS).z
// Returns a point along the stratified direction.
float ConvertLinearDepthToJitterRayDist(DualRay ray, float z)
{
return z * ray.jitterDirInvViewZ;
}
float ConvertJitterDistToCenterRayDist(DualRay ray, float t)
{
return t * ray.twoDirRatioViewZ;
}
// Returns a point along the jittered direction.
return ray.originWS + t * ray.strataDirWS;
return ray.originWS + t * ray.jitterDirWS;
// Returns a point along the centered direction. It has a special property:
// ViewSpace(GetPointAtDistance(ray, t)).z = ViewSpace(GetCenterAtDistance(ray, t)).z,
// e.i. both points computed from the same value of 't' reside on the same Z-plane in the view space.
float3 GetCenterAtDistance(DualRay ray, float t)
// Returns a point along the centered direction.
float3 GetCenterAtDistance(DualRay ray, float s)
t *= ray.twoDirRatioViewZ; // Perform the Z-coordinate conversion
return ray.originWS + t * ray.centerDirWS;
return ray.originWS + s * ray.centerDirWS;
}
struct VoxelLighting

color, attenuation);
// Important:
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

float3 coneAxisX = lenMul * light.right;
float3 coneAxisY = lenMul * light.up;
sampleLight = IntersectRayCone(ray.originWS, ray.strataDirWS,
sampleLight = IntersectRayCone(ray.originWS, ray.jitterDirWS,
light.positionWS, light.forward,
coneAxisX, coneAxisY,
t0, t1, tEntr, tExit);

float t, distSq, rcpPdf;
ImportanceSamplePunctualLight(rndVal, light.positionWS,
ray.originWS, ray.strataDirWS,
ray.originWS, ray.jitterDirWS,
tEntr, tExit, t, distSq, rcpPdf,
hackMinDistSq);

float3 L = -lightToSample * distRcp;
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, lightToSample,
distances, color, attenuation);
EvaluateLight_Punctual(context, posInput, light, unused,
0, L, lightToSample, distances, color, attenuation);
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

float3x3 rotMat = float3x3(light.right, light.up, light.forward);
float3 o = mul(rotMat, ray.originWS - light.positionWS);
float3 d = mul(rotMat, ray.strataDirWS);
float3 d = mul(rotMat, ray.jitterDirWS);
float range = light.size.x;
float3 boxPt0 = float3(-1, -1, 0);

float4 distances = float4(1, 1, 1, distProj);
float3 color; float attenuation;
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, lightToSample,
distances, color, attenuation);
EvaluateLight_Punctual(context, posInput, light, unused,
0, L, lightToSample, distances, color, attenuation);
// Ideally, all scattering calculations should use the stratified versions
// Ideally, all scattering calculations should use the jittered versions
// of the sample position and the ray direction. However, correct reprojection
// of asymmetrically scattered lighting (affected by an anisotropic phase
// function) is not possible. We work around this issue by reprojecting

void FillVolumetricLightingBuffer(LightLoopContext context, uint featureFlags,
PositionInputs posInput, DualRay ray)
{
float n = _VBufferDepthDecodingParams.x + _VBufferDepthDecodingParams.z;
float z0 = n; // Start integration from the near plane
float t0 = ray.strataDirInvViewZ * z0; // Convert view space Z to distance along the stratified ray
float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices
const float n = _VBufferDepthDecodingParams.x + _VBufferDepthDecodingParams.z;
const float z0 = n; // Start integration from the near plane
const float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices
float t0 = ConvertLinearDepthToJitterRayDist(ray, z0);
// The contribution of the ambient probe does not depend on the position,
// only on the direction and the length of the interval.

uint3 voxelCoord = uint3(posInput.positionSS, slice);
float e1 = slice * de + de; // (slice + 1) / sliceCount
#if defined(SHADER_API_METAL)
// Warning: this compiles, but it's nonsense. Use DecodeLogarithmicDepthGeneralized().
float z1 = DecodeLogarithmicDepth(e1, _VBufferDepthDecodingParams);
#else
#endif
float t1 = ray.strataDirInvViewZ * z1; // Convert view space Z to distance along the stratified ray
float t1 = ConvertLinearDepthToJitterRayDist(ray, z1);
float dt = t1 - t0;
#ifdef USE_CLUSTERED_LIGHTLIST

// Compute the -exact- position of the center of the voxel.
// It's important since the accumulated value of the integral is stored at the center.
// We will use it for participating media sampling, asymmetric scattering and reprojection.
float tc = t0 + 0.5 * dt;
float3 centerWS = GetCenterAtDistance(ray, tc);
float s = ConvertJitterDistToCenterRayDist(ray, t0 + 0.5 * dt);
float3 centerWS = GetCenterAtDistance(ray, s);
// Sample the participating medium at 'tc' (or 'centerWS').
// Sample the participating medium at the center of the voxel.
// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = LOAD_TEXTURE3D(_VBufferDensity, voxelCoord).rgb;

);
#endif
#if ENABLE_REPROJECTION
float2 reprojPosNDC = ComputeNormalizedDeviceCoordinates(centerWS, _PrevViewProjMatrix);
float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w;
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler),
false, reprojPosNDC, reprojZ,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_linear_clamp_sampler),
centerWS,
_PrevViewProjMatrix,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
false, false, true);
// Compute the exponential moving average over 'n' frames:
// X = (1 - a) * ValueAtFrame[n] + a * AverageOverPreviousFrames.

// TODO: doesn't seem to be worth it, removed for now.
// Perform temporal blending.
// Both radiance values are obtained by integrating over line segments of different length.
// Blending only makes sense if the length of both intervals is the same.
// Therefore, the reprojected radiance needs to be rescaled by (frame_dt / reproj_dt).
// Both radiance values are obtained by integrating over line segments of different length,
// with potentially different participating media coverage.
// Blending only makes sense if the voxels are virtually identical.
// Therefore, we need to rescale the history to make it match the current configuration.
// In order to do that, we integrate transmittance over the length of the ray interval
// passing through the center of the voxel. The integral can be interpreted as the amount of
// isotropically in-scattered radiance from a directional light with unit intensity.
// We ignore jittering, as we want values from the same voxel to be reporojected without
// any rescaling.
float ds = ConvertJitterDistToCenterRayDist(ray, dt);
float centerTransmInt = TransmittanceIntegralHomogeneousMedium(extinction, ds);
float reprojRcpLen = reprojSuccess ? rcp(reprojValue.a) : 0;
float lengthScale = dt * reprojRcpLen;
float reprojScale = reprojSuccess ? (centerTransmInt * rcp(reprojValue.a)) : 0;
float3 blendedRadiance = (1 - blendFactor) * lighting.radianceNoPhase + blendFactor * lengthScale * reprojRadiance;
float3 blendedRadiance = (1 - blendFactor) * lighting.radianceNoPhase + blendFactor * reprojScale * reprojRadiance;
// Store the feedback for the voxel.
// TODO: dynamic lights (which update their position, rotation, cookie or shadow at runtime)

_VBufferLightingFeedback[voxelCoord] = float4(blendedRadiance, dt);
_VBufferLightingFeedback[voxelCoord] = float4(blendedRadiance, centerTransmInt);
#if SUPPORT_ASYMMETRY
#endif
#if SUPPORT_ASYMMETRY
#else
float3 blendedRadiance = lighting.radianceNoPhase;
#endif
#if SUPPORT_ASYMMETRY
float phase = _CornetteShanksConstant;
#else
float phase = IsotropicPhaseFunction();
#endif
float4 integral = float4(totalRadiance, opticalDepth);
float phase = _CornetteShanksConstant;
// Integrate the contribution of the probe over the interval.
// Integral{a, b}{Transmittance(0, t) * L_s(t) dt} = Transmittance(0, a) * Integral{a, b}{Transmittance(0, t - a) * L_s(t) dt}.

opticalDepth += 0.5 * extinction * dt;
// Store the voxel data.
_VBufferLightingIntegral[voxelCoord] = float4(totalRadiance, opticalDepth);
// Note: for correct filtering, the data has to be stored in the perceptual space.
// This means storing the tone mapped radiance and transmittance instead of optical depth.
// See "A Fresh Look at Generalized Sampling", p. 51.
_VBufferLightingIntegral[voxelCoord] = float4(FastTonemap(totalRadiance), Transmittance(opticalDepth));
z0 = z1;
t0 = t1;
#ifdef USE_CLUSTERED_LIGHTLIST

float2 centerCoord = voxelCoord + float2(0.5, 0.5);
#if ENABLE_REPROJECTION
float2 strataCoord = centerCoord + _VBufferSampleOffset.xy;
float2 jitterCoord = centerCoord + _VBufferSampleOffset.xy;
float2 strataCoord = centerCoord;
float2 jitterCoord = centerCoord;
// Compute the (tile-centered) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
// Compute the (voxel-centered in the screen space) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
// Compute the (tile-stratified) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
float3 strataDirWS = mul(-float3(strataCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
float strataDirLenSq = dot(strataDirWS, strataDirWS);
float strataDirLenRcp = rsqrt(strataDirLenSq);
float strataDirLen = strataDirLenSq * strataDirLenRcp;
// Compute the (voxel-jittered in the screen space) ray direction s.t. its ViewSpace(rayDirWS).z = 1.
float3 jitterDirWS = mul(-float3(jitterCoord, 1), (float3x3)_VBufferCoordToViewDirWS);
float jitterDirLenSq = dot(jitterDirWS, jitterDirWS);
float jitterDirLenRcp = rsqrt(jitterDirLenSq);
float jitterDirLen = jitterDirLenSq * jitterDirLenRcp;
ray.strataDirWS = strataDirWS * strataDirLenRcp; // Normalize
ray.jitterDirWS = jitterDirWS * jitterDirLenRcp; // Normalize
ray.strataDirInvViewZ = strataDirLen; // View space Z
ray.twoDirRatioViewZ = centerDirLen * strataDirLenRcp; // View space Z ratio
ray.jitterDirInvViewZ = jitterDirLen; // View space Z
ray.twoDirRatioViewZ = centerDirLen * jitterDirLenRcp; // View space Z ratio
// TODO
LightLoopContext context;

167
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[GenerateHLSL]
public struct DensityVolumeProperties
public struct DensityVolumeData
public static DensityVolumeProperties GetNeutralProperties()
public static DensityVolumeData GetNeutralValues()
DensityVolumeProperties properties = new DensityVolumeProperties();
DensityVolumeData data;
properties.scattering = Vector3.zero;
properties.extinction = 0;
data.scattering = Vector3.zero;
data.extinction = 0;
return properties;
return data;
[Serializable]
public class DensityVolumeParameters
public class VolumeRenderingUtils
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public DensityVolumeParameters()
public static float MeanFreePathFromExtinction(float extinction)
isLocal = true;
albedo = new Color(0.5f, 0.5f, 0.5f);
meanFreePath = 10.0f;
asymmetry = 0.0f;
return 1.0f / extinction;
public bool IsLocalVolume()
public static float ExtinctionFromMeanFreePath(float meanFreePath)
return isLocal;
return 1.0f / meanFreePath;
public Vector3 GetAbsorptionCoefficient()
public static Vector3 AbsorptionFromExtinctionAndScattering(float extinction, Vector3 scattering)
float extinction = GetExtinctionCoefficient();
Vector3 scattering = GetScatteringCoefficient();
return Vector3.Max(new Vector3(extinction, extinction, extinction) - scattering, Vector3.zero);
return new Vector3(extinction, extinction, extinction) - scattering;
public Vector3 GetScatteringCoefficient()
public static Vector3 ScatteringFromExtinctionAndAlbedo(float extinction, Vector3 albedo)
float extinction = GetExtinctionCoefficient();
return new Vector3(albedo.r * extinction, albedo.g * extinction, albedo.b * extinction);
return extinction * albedo;
public float GetExtinctionCoefficient()
public static Vector3 AlbedoFromMeanFreePathAndScattering(float meanFreePath, Vector3 scattering)
return 1.0f / meanFreePath;
return meanFreePath * scattering;
}
[Serializable]
public struct DensityVolumeParameters
{
public Color albedo; // Single scattering albedo [0, 1]. Alpha is ignored
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public void Constrain()
{

albedo.a = 1.0f;
meanFreePath = Mathf.Max(meanFreePath, 1.0f);
meanFreePath = Mathf.Clamp(meanFreePath, 1.0f, float.MaxValue);
public DensityVolumeProperties GetProperties()
public DensityVolumeData GetData()
DensityVolumeProperties properties = new DensityVolumeProperties();
DensityVolumeData data = new DensityVolumeData();
properties.scattering = GetScatteringCoefficient();
properties.extinction = GetExtinctionCoefficient();
data.extinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(meanFreePath);
data.scattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(data.extinction, (Vector3)(Vector4)albedo);
return properties;
return data;
public List<OrientedBBox> bounds;
public List<DensityVolumeProperties> properties;
public List<OrientedBBox> bounds;
public List<DensityVolumeData> density;
}
public class VolumetricLightingSystem

ComputeShader m_VolumeVoxelizationCS = null;
ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
static ComputeBuffer s_VisibleVolumePropertiesBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection

{
if (preset == VolumetricLightingPreset.Off) return;
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumeBounds = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<DensityVolumeProperties>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeProperties)));
m_VolumeVoxelizationCS = asset.renderPipelineResources.volumeVoxelizationCS;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumeBounds = new List<OrientedBBox>();
m_VisibleVolumeData = new List<DensityVolumeData>();
s_VisibleVolumeBoundsBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumeDataBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(DensityVolumeData)));
}
public void Cleanup()

m_VBuffers[i].Destroy();
}
m_VBuffers = null;
m_VisibleVolumeBounds = null;
m_VisibleVolumeProperties = null;
m_VBuffers = null;
m_VisibleVolumeBounds = null;
m_VisibleVolumeData = null;
CoreUtils.SafeRelease(s_VisibleVolumePropertiesBuffer);
CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)

{
if (preset == VolumetricLightingPreset.Off) return;
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
// TODO: may want to cache these results somewhere.
DensityVolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.parameters.GetProperties()
: DensityVolumeProperties.GetNeutralProperties();
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, globalVolumeProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, globalVolumeProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
// Modify the near plane.
// Warning: it can screw up the reprojection. However, we have to do it in order for clustered lighting to work correctly.
m_VBufferNearPlane = camera.camera.nearClipPlane;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);

SetPreconvolvedAmbientLightProbe(cmd, asymmetry);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
SetPreconvolvedAmbientLightProbe(cmd, fog.asymmetry);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));

if (preset == VolumetricLightingPreset.Off) return densityVolumes;
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
if (visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{
Vector3 camPosition = camera.camera.transform.position;

}
m_VisibleVolumeBounds.Clear();
m_VisibleVolumeProperties.Clear();
m_VisibleVolumeData.Clear();
// Collect all visible finite volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];

HomogeneousDensityVolume volume = volumes[i];
// Only test active finite volumes.
if (volume.enabled && volume.parameters.IsLocalVolume())
if (volume.enabled)
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);

if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var properties = volume.parameters.GetProperties();
var data = volume.parameters.GetData();
m_VisibleVolumeProperties.Add(properties);
m_VisibleVolumeData.Add(data);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
s_VisibleVolumeDataBuffer.SetData(m_VisibleVolumeData);
densityVolumes.properties = m_VisibleVolumeProperties;
densityVolumes.density = m_VisibleVolumeData;
return densityVolumes;
}

float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeProperties, s_VisibleVolumePropertiesBuffer);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeData, s_VisibleVolumeDataBuffer);
// TODO: set the constant buffer data only once.
cmd.SetComputeMatrixParam( m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);

using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
var visualEnvironment = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
float asymmetry = globalVolume != null ? globalVolume.parameters.asymmetry : 0;
if (globalVolume == null)
if (visualEnvironment.fogType != FogType.Volumetric)
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingIntegralBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetLightingFeedbackBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
// CoreUtils.SetRenderTarget(cmd, vBuffer.GetDensityBuffer(), ClearFlag.Color, CoreUtils.clearColorAllBlack);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;

int sampleIndex = rfc % 7;
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(asymmetry));
cmd.SetComputeFloatParam( m_VolumetricLightingCS, HDShaderIDs._CornetteShanksConstant, CornetteShanksPhasePartConstant(fog.asymmetry));
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer()); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
if (enableReprojection)

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute


void SubsurfaceScattering(uint2 groupId : SV_GroupID,
uint groupThreadId : SV_GroupThreadID)
{
groupThreadId &= GROUP_SIZE_2D - 1; // Help the compiler
// Note: any factor of 64 is a suitable wave size for our algorithm.
uint waveIndex = WaveReadFirstLane(groupThreadId / 64);
uint laneIndex = groupThreadId % 64;

27
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs


using System;
using System;
using System.Diagnostics;
using UnityEngine.Rendering;

public abstract class AtmosphericScattering : VolumeComponent
{
static readonly int m_TypeParam = Shader.PropertyToID("_AtmosphericScatteringType");
// Fog Color
static readonly int m_ColorModeParam = Shader.PropertyToID("_FogColorMode");
static readonly int m_FogColorDensityParam = Shader.PropertyToID("_FogColorDensity");

public static void PushNeutralShaderParameters(CommandBuffer cmd)
{
cmd.SetGlobalFloat(m_TypeParam, (float)FogType.None);
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)FogType.None);
// In case volumetric lighting is enabled, we need to make sure that all rendering passes
// (not just the atmospheric scattering one) receive neutral parameters.
if (ShaderConfig.s_VolumetricLightingPreset != 0)
{
var data = DensityVolumeData.GetNeutralValues();
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, data.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, data.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, 0.0f);
}
// Not used by the volumetric fog.
if (frameSettings.enableAtmosphericScattering)
cmd.SetGlobalFloat(m_TypeParam, (float)type);
else
cmd.SetGlobalFloat(m_TypeParam, (float)FogType.None);
Debug.Assert(frameSettings.enableAtmosphericScattering);
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)type);
// Fog Color
cmd.SetGlobalFloat(m_ColorModeParam, (float)colorMode.value);

{
None,
Linear,
Exponential
Exponential,
Volumetric
}
[GenerateHLSL]

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs.hlsl


#define FOGTYPE_NONE (0)
#define FOGTYPE_LINEAR (1)
#define FOGTYPE_EXPONENTIAL (2)
#define FOGTYPE_VOLUMETRIC (3)
//
// UnityEngine.Experimental.Rendering.HDPipeline.FogColorMode: static fields

59
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl


#endif
CBUFFER_START(AtmosphericScattering)
float _AtmosphericScatteringType;
int _AtmosphericScatteringType;
// Common
float _FogColorMode;
float4 _FogColorDensity; // color in rgb, density in alpha

float3 fogColor = 0;
float fogFactor = 0;
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float4 volFog = SampleInScatteredRadianceAndTransmittance(TEXTURE3D_PARAM(_VBufferLighting, s_trilinear_clamp_sampler),
posInput.positionNDC, posInput.linearDepth,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
fogColor = volFog.rgb;
fogFactor = 1 - volFog.a;
#else
switch (_AtmosphericScatteringType)
{
case FOGTYPE_LINEAR:
{
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange) * saturate((_LinearFogHeightEnd - GetAbsolutePositionWS(posInput.positionWS).y) * _LinearFogHeightOneOverRange);
break;
}
case FOGTYPE_EXPONENTIAL:
{
fogColor = GetFogColor(posInput);
float distance = length(GetWorldSpaceViewDir(posInput.positionWS));
float fogHeight = max(0.0, GetAbsolutePositionWS(posInput.positionWS).y - _ExpFogBaseHeight);
fogFactor = _FogDensity * TransmittanceHomogeneousMedium(_ExpFogHeightAttenuation, fogHeight) * (1.0f - TransmittanceHomogeneousMedium(1.0f / _ExpFogDistance, distance));
break;
}
case FOGTYPE_VOLUMETRIC:
{
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float4 volFog = SampleVolumetricLighting(TEXTURE3D_PARAM(_VBufferLighting, s_linear_clamp_sampler),
posInput.positionNDC,
posInput.linearDepth,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
true, true);
if (_AtmosphericScatteringType == FOGTYPE_EXPONENTIAL)
{
fogColor = GetFogColor(posInput);
float distance = length(GetWorldSpaceViewDir(posInput.positionWS));
float fogHeight = max(0.0, GetAbsolutePositionWS(posInput.positionWS).y - _ExpFogBaseHeight);
fogFactor = _FogDensity * TransmittanceHomogeneousMedium(_ExpFogHeightAttenuation, fogHeight) * (1.0f - TransmittanceHomogeneousMedium(1.0f / _ExpFogDistance, distance));
}
else if (_AtmosphericScatteringType == FOGTYPE_LINEAR)
{
fogColor = GetFogColor(posInput);
fogFactor = _FogDensity * saturate((posInput.linearDepth - _LinearFogStart) * _LinearFogOneOverRange) * saturate((_LinearFogHeightEnd - GetAbsolutePositionWS(posInput.positionWS).y) * _LinearFogHeightOneOverRange);
}
else // NONE
{
fogFactor = 1 - volFog.a; // Opacity from transmittance
fogColor = volFog.rgb * min(rcp(fogFactor), FLT_MAX); // Un-premultiply, clamp to avoid (0 * INF = NaN)
#endif
break;
}
#endif
return float4(fogColor, fogFactor);
}

1
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/OpaqueAtmosphericScattering.shader


// #pragma enable_d3d11_debug_symbols
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/Color.hlsl"
#include "../ShaderVariables.hlsl"
#include "AtmosphericScattering/AtmosphericScattering.hlsl"

14
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs


using System;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;

public void PushFogShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
{
if (!frameSettings.enableAtmosphericScattering)
{
AtmosphericScattering.PushNeutralShaderParameters(cmd);
return;
}
switch (fogType.value)
{
case FogType.None:

case FogType.Exponential:
{
var fogSettings = VolumeManager.instance.stack.GetComponent<ExponentialFog>();
fogSettings.PushShaderParameters(cmd, frameSettings);
break;
}
case FogType.Volumetric:
{
var fogSettings = VolumeManager.instance.stack.GetComponent<VolumetricFog>();
fogSettings.PushShaderParameters(cmd, frameSettings);
break;
}

34
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs


using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using UnityEngine.Experimental.Rendering.HDPipeline;
using UnityEditor.Experimental.Rendering;
namespace UnityEditor.Experimental.Rendering.HDPipeline
{
[VolumeComponentEditor(typeof(VolumetricFog))]
public class VolumetricFogEditor : AtmosphericScatteringEditor
{
private SerializedDataParameter m_Albedo;
private SerializedDataParameter m_MeanFreePath;
private SerializedDataParameter m_Asymmetry;
public override void OnEnable()
{
base.OnEnable();
var o = new PropertyFetcher<VolumetricFog>(serializedObject);
m_Albedo = Unpack(o.Find(x => x.albedo));
m_MeanFreePath = Unpack(o.Find(x => x.meanFreePath));
m_Asymmetry = Unpack(o.Find(x => x.asymmetry));
}
public override void OnInspectorGUI()
{
PropertyField(m_Albedo);
PropertyField(m_MeanFreePath);
PropertyField(m_Asymmetry);
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/Sky/AtmosphericScattering/VolumetricFogEditor.cs.meta


fileFormatVersion: 2
guid: a530ef8d0c07494409d34294d8e04a89
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

67
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs


using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class VolumetricFog : AtmosphericScattering
{
public ColorParameter albedo = new ColorParameter(new Color(0.5f, 0.5f, 0.5f));
public MinFloatParameter meanFreePath = new MinFloatParameter(float.MaxValue, 1.0f);
public ClampedFloatParameter asymmetry = new ClampedFloatParameter(0.0f, -1.0f, 1.0f);
// Override the volume blending function.
public override void Override(VolumeComponent state, float interpFactor)
{
VolumetricFog other = state as VolumetricFog;
float thisExtinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(meanFreePath);
Vector3 thisScattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(thisExtinction, (Vector3)(Vector4)albedo.value);
float otherExtinction = VolumeRenderingUtils.ExtinctionFromMeanFreePath(other.meanFreePath);
Vector3 otherScattering = VolumeRenderingUtils.ScatteringFromExtinctionAndAlbedo(otherExtinction, (Vector3)(Vector4)other.albedo.value);
float blendExtinction = Mathf.Lerp(otherExtinction, thisExtinction, interpFactor);
Vector3 blendScattering = Vector3.Lerp(otherScattering, thisScattering, interpFactor);
float blendAsymmetry = Mathf.Lerp(other.asymmetry, asymmetry, interpFactor);
float blendMeanFreePath = VolumeRenderingUtils.MeanFreePathFromExtinction(blendExtinction);
Color blendAlbedo = (Color)(Vector4)VolumeRenderingUtils.AlbedoFromMeanFreePathAndScattering(blendMeanFreePath, blendScattering);
blendAlbedo.a = 1.0f;
if (meanFreePath.overrideState)
{
other.meanFreePath.value = blendMeanFreePath;
}
if (albedo.overrideState)
{
other.albedo.value = blendAlbedo;
}
if (asymmetry.overrideState)
{
other.asymmetry.value = blendAsymmetry;
}
}
public override void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
{
DensityVolumeParameters param;
param.albedo = albedo;
param.meanFreePath = meanFreePath;
param.asymmetry = asymmetry;
DensityVolumeData data = param.GetData();
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)FogType.Volumetric);
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, data.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, data.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs.meta


fileFormatVersion: 2
guid: 29b00527f85bb3346a4d2cb710971587
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
正在加载...
取消
保存