浏览代码
Merge pull request #755 from EvgeniiG/volumetrics
Merge pull request #755 from EvgeniiG/volumetrics
Add the initial implementation of volumetric lighting/main
GitHub
7 年前
当前提交
1461533c
共有 35 个文件被更改,包括 1618 次插入 和 71 次删除
-
24ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
-
116ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/GeometricTools.hlsl
-
6ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Random.hlsl
-
90ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/VolumeRendering.hlsl
-
1ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDAssetFactory.cs
-
26ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
-
17ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
-
5ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting.meta
-
4ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
-
3ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
-
28ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoopDef.hlsl
-
9ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
-
2ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/HDRenderPipelineResources.asset
-
1ScriptableRenderPipeline/HDRenderPipeline/HDRP/RenderPipelineResources/RenderPipelineResources.cs
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs
-
1ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderConfig.cs.hlsl
-
7ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
-
34ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
-
40ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl
-
9ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Filtering.hlsl.meta
-
113ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl
-
9ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/VBuffer.hlsl.meta
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics.meta
-
13ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources.meta
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl.meta
-
13ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.meta
-
28ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs.hlsl
-
10ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute.meta
-
493ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
-
38ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs
-
499ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
|
|||
fileFormatVersion: 2 |
|||
guid: 3002976b0b09954499dd1f6e00169b06 |
|||
guid: 5e0fe6d5bfdaab148b33c776c94a500f |
|||
timeCreated: 1474297943 |
|||
licenseType: Pro |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
#ifndef UNITY_FILTERING_INCLUDED |
|||
#define UNITY_FILTERING_INCLUDED |
|||
|
|||
// Cardinal (interpolating) B-Spline of the 2nd degree (3rd order). Support = 3x3. |
|||
// The fractional coordinate of each part is assumed to be in the [0, 1] range (centered on 0.5). |
|||
// https://www.desmos.com/calculator/47j9r9lolm |
|||
real2 BSpline2IntLeft(real2 x) |
|||
{ |
|||
return 0.5 * x * x; |
|||
} |
|||
|
|||
real2 BSpline2IntMiddle(real2 x) |
|||
{ |
|||
return (1 - x) * x + 0.5; |
|||
} |
|||
|
|||
real2 BSpline2IntRight(real2 x) |
|||
{ |
|||
return (0.5 * x - 1) * x + 0.5; |
|||
} |
|||
|
|||
// Compute weights & offsets for 4x bilinear taps for the biquadratic B-Spline filter. |
|||
// The fractional coordinate should be in the [0, 1] range (centered on 0.5). |
|||
// Inspired by: http://vec3.ca/bicubic-filtering-in-fewer-taps/ |
|||
void BiquadraticFilter(real2 fracCoord, out real2 weights[2], out real2 offsets[2]) |
|||
{ |
|||
real2 l = BSpline2IntLeft(fracCoord); |
|||
real2 m = BSpline2IntMiddle(fracCoord); |
|||
real2 r = 1 - l - m; |
|||
|
|||
// Compute offsets for 4x bilinear taps for the quadratic B-Spline reconstruction kernel. |
|||
// 0: lerp between left and middle |
|||
// 1: lerp between middle and right |
|||
weights[0] = l + 0.5 * m; |
|||
weights[1] = r + 0.5 * m; |
|||
offsets[0] = -0.5 + 0.5 * m * rcp(weights[0]); |
|||
offsets[1] = 0.5 + r * rcp(weights[1]); |
|||
} |
|||
|
|||
#endif // UNITY_FILTERING_INCLUDED |
|
|||
fileFormatVersion: 2 |
|||
guid: 54ca0b3b7814f804aac1450b38477c74 |
|||
ShaderImporter: |
|||
externalObjects: {} |
|||
defaultTextures: [] |
|||
nonModifiableTextures: [] |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
#ifndef UNITY_VBUFFER_INCLUDED |
|||
#define UNITY_VBUFFER_INCLUDED |
|||
|
|||
// Interpolation in the log space is non-linear. |
|||
// Therefore, given 'logEncodedDepth', we compute a new depth value |
|||
// which allows us to perform HW interpolation which is linear in the view space. |
|||
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth, |
|||
float4 VBufferScaleAndSliceCount, |
|||
float4 VBufferDepthEncodingParams) |
|||
{ |
|||
float z = linearDepth; |
|||
float d = logEncodedDepth; |
|||
|
|||
float numSlices = VBufferScaleAndSliceCount.z; |
|||
float rcpNumSlices = VBufferScaleAndSliceCount.w; |
|||
|
|||
float s0 = floor(d * numSlices - 0.5); |
|||
float s1 = ceil(d * numSlices - 0.5); |
|||
float d0 = saturate(s0 * rcpNumSlices + (0.5 * rcpNumSlices)); |
|||
float d1 = saturate(s1 * rcpNumSlices + (0.5 * rcpNumSlices)); |
|||
float z0 = DecodeLogarithmicDepth(d0, VBufferDepthEncodingParams); |
|||
float z1 = DecodeLogarithmicDepth(d1, VBufferDepthEncodingParams); |
|||
|
|||
// Compute the linear interpolation weight. |
|||
float t = saturate((z - z0) / (z1 - z0)); |
|||
return d0 + t * rcpNumSlices; |
|||
} |
|||
|
|||
// Performs trilinear reconstruction of the V-Buffer. |
|||
// If (clampToEdge == false), out-of-bounds loads return 0. |
|||
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge, |
|||
float2 positionNDC, float linearDepth, |
|||
float4 VBufferScaleAndSliceCount, |
|||
float4 VBufferDepthEncodingParams) |
|||
{ |
|||
float numSlices = VBufferScaleAndSliceCount.z; |
|||
float rcpNumSlices = VBufferScaleAndSliceCount.w; |
|||
|
|||
// Account for the visible area of the V-Buffer. |
|||
float2 uv = positionNDC * VBufferScaleAndSliceCount.xy; |
|||
|
|||
// The distance between slices is log-encoded. |
|||
float z = linearDepth; |
|||
float d = EncodeLogarithmicDepth(z, VBufferDepthEncodingParams); |
|||
|
|||
// Unity doesn't support samplers clamping to border, so we have to do it ourselves. |
|||
// TODO: add the proper sampler support. |
|||
bool isInBounds = Min3(uv.x, uv.y, d) > 0 && Max3(uv.x, uv.y, d) < 1; |
|||
|
|||
[branch] if (clampToEdge || isInBounds) |
|||
{ |
|||
#if 0 |
|||
// We could ignore non-linearity at the cost of accuracy. |
|||
// TODO: visually test this option (especially in motion). |
|||
float w = d; |
|||
#else |
|||
// Adjust the texture coordinate for HW trilinear sampling. |
|||
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthEncodingParams); |
|||
#endif |
|||
|
|||
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0); |
|||
} |
|||
else |
|||
{ |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
// Returns interpolated {volumetric radiance, transmittance}. The sampler clamps to edge. |
|||
float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), |
|||
float2 positionNDC, float linearDepth, |
|||
float4 VBufferResolution, |
|||
float4 VBufferScaleAndSliceCount, |
|||
float4 VBufferDepthEncodingParams) |
|||
{ |
|||
#ifdef RECONSTRUCTION_FILTER_TRILINEAR |
|||
float4 L = SampleVBuffer(TEXTURE3D_PARAM(VBufferLighting, trilinearSampler), true, |
|||
positionNDC, linearDepth, |
|||
VBufferScaleAndSliceCount, VBufferDepthEncodingParams); |
|||
#else |
|||
// Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps. |
|||
|
|||
// Account for the visible area of the V-Buffer. |
|||
float2 xy = positionNDC * (VBufferResolution.xy * VBufferScaleAndSliceCount.xy); |
|||
float2 ic = floor(xy); |
|||
float2 fc = frac(xy); |
|||
|
|||
// The distance between slices is log-encoded. |
|||
float z = linearDepth; |
|||
float d = EncodeLogarithmicDepth(z, VBufferDepthEncodingParams); |
|||
|
|||
// Adjust the texture coordinate for HW trilinear sampling. |
|||
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthEncodingParams); |
|||
|
|||
float2 weights[2], offsets[2]; |
|||
BiquadraticFilter(1 - fc, weights, offsets); // Inverse-translate the filter centered around 0.5 |
|||
|
|||
float2 rcpRes = VBufferResolution.zw; |
|||
|
|||
// TODO: reconstruction should be performed in the perceptual space (e.i., after tone mapping). |
|||
// But our VBuffer is linear. How to achieve that? |
|||
// See "A Fresh Look at Generalized Sampling", p. 51. |
|||
float4 L = (weights[0].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[0].y)) * rcpRes, w), 0) // Top left |
|||
+ (weights[1].x * weights[0].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[0].y)) * rcpRes, w), 0) // Top right |
|||
+ (weights[0].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[0].x, offsets[1].y)) * rcpRes, w), 0) // Bottom left |
|||
+ (weights[1].x * weights[1].y) * SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3((ic + float2(offsets[1].x, offsets[1].y)) * rcpRes, w), 0); // Bottom right |
|||
#endif |
|||
|
|||
// TODO: add some animated noise to the reconstructed radiance. |
|||
return float4(L.rgb, Transmittance(L.a)); |
|||
} |
|||
|
|||
#endif // UNITY_VBUFFER_INCLUDED |
|
|||
fileFormatVersion: 2 |
|||
guid: a9a6d43965a406e43b8b3c3851981e2e |
|||
ShaderImporter: |
|||
externalObjects: {} |
|||
defaultTextures: [] |
|||
nonModifiableTextures: [] |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: 1fe4fc72895e4bb4f90ff44b47e76051 |
|||
folderAsset: yes |
|||
timeCreated: 1503411233 |
|||
licenseType: Pro |
|||
DefaultImporter: |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: 8f608e240d5376341bcef2478d231457 |
|||
timeCreated: 1503411233 |
|||
licenseType: Pro |
|||
MonoImporter: |
|||
externalObjects: {} |
|||
serializedVersion: 2 |
|||
defaultReferences: [] |
|||
executionOrder: 0 |
|||
icon: {instanceID: 0} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: 333b470add5766f44a744f476efc19a8 |
|||
folderAsset: yes |
|||
timeCreated: 1503591964 |
|||
licenseType: Pro |
|||
DefaultImporter: |
|||
externalObjects: {} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: d5c6a48928753954e8b1a84bbde280ec |
|||
timeCreated: 1504275050 |
|||
licenseType: Pro |
|||
ShaderImporter: |
|||
externalObjects: {} |
|||
defaultTextures: [] |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
fileFormatVersion: 2 |
|||
guid: 3090aceb9ee51fc4f9d9830cfef9684c |
|||
timeCreated: 1504273866 |
|||
licenseType: Pro |
|||
MonoImporter: |
|||
externalObjects: {} |
|||
serializedVersion: 2 |
|||
defaultReferences: [] |
|||
executionOrder: 0 |
|||
icon: {instanceID: 0} |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
// |
|||
// This file was automatically generated. Please don't edit by hand. |
|||
// |
|||
|
|||
#ifndef VOLUMETRICLIGHTING_CS_HLSL |
|||
#define VOLUMETRICLIGHTING_CS_HLSL |
|||
// Generated from UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties |
|||
// PackingRules = Exact |
|||
struct VolumeProperties |
|||
{ |
|||
float3 scattering; |
|||
float extinction; |
|||
}; |
|||
|
|||
// |
|||
// Accessors for UnityEngine.Experimental.Rendering.HDPipeline.VolumeProperties |
|||
// |
|||
float3 GetScattering(VolumeProperties value) |
|||
{ |
|||
return value.scattering; |
|||
} |
|||
float GetExtinction(VolumeProperties value) |
|||
{ |
|||
return value.extinction; |
|||
} |
|||
|
|||
|
|||
#endif |
|
|||
fileFormatVersion: 2 |
|||
guid: 799166e2ee6a4b041bba9e74f6942097 |
|||
timeCreated: 1503570390 |
|||
licenseType: Pro |
|||
ComputeShaderImporter: |
|||
externalObjects: {} |
|||
currentAPIMask: 4 |
|||
userData: |
|||
assetBundleName: |
|||
assetBundleVariant: |
|
|||
//-------------------------------------------------------------------------------------------------- |
|||
// Definitions |
|||
//-------------------------------------------------------------------------------------------------- |
|||
|
|||
#pragma kernel VolumetricLightingAllLights VolumetricLighting=VolumetricLightingAllLights ENABLE_REPROJECTION=0 LIGHTLOOP_SINGLE_PASS |
|||
#pragma kernel VolumetricLightingAllLightsReproj VolumetricLighting=VolumetricLightingAllLightsReproj ENABLE_REPROJECTION=1 LIGHTLOOP_SINGLE_PASS |
|||
#pragma kernel VolumetricLightingClustered VolumetricLighting=VolumetricLightingClustered ENABLE_REPROJECTION=0 LIGHTLOOP_TILE_PASS USE_CLUSTERED_LIGHTLIST |
|||
#pragma kernel VolumetricLightingClusteredReproj VolumetricLighting=VolumetricLightingClusteredReproj ENABLE_REPROJECTION=1 LIGHTLOOP_TILE_PASS USE_CLUSTERED_LIGHTLIST |
|||
|
|||
#pragma enable_d3d11_debug_symbols |
|||
|
|||
#define DEBUG_REPROJECTION 0 |
|||
|
|||
#include "../../../ShaderPass/ShaderPass.cs.hlsl" |
|||
#define SHADERPASS SHADERPASS_VOLUMETRIC_LIGHTING |
|||
|
|||
#include "../../../ShaderConfig.cs.hlsl" |
|||
#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET == 1) |
|||
// E.g. for 1080p: (1920/8)x(1080/8)x(128) = 4,147,200 voxels |
|||
#define VBUFFER_TILE_SIZE 8 |
|||
#define VBUFFER_SLICE_COUNT 128 |
|||
#else |
|||
// E.g. for 1080p: (1920/4)x(1080/4)x(256) = 33,177,600 voxels |
|||
#define VBUFFER_TILE_SIZE 4 |
|||
#define VBUFFER_SLICE_COUNT 256 |
|||
#endif |
|||
|
|||
#define GROUP_SIZE_1D 16 |
|||
#define GROUP_SIZE_2D (GROUP_SIZE_1D * GROUP_SIZE_1D) |
|||
|
|||
//-------------------------------------------------------------------------------------------------- |
|||
// Included headers |
|||
//-------------------------------------------------------------------------------------------------- |
|||
|
|||
#include "CoreRP/ShaderLibrary/Common.hlsl" |
|||
#include "CoreRP/ShaderLibrary/Filtering.hlsl" |
|||
#include "CoreRP/ShaderLibrary/VolumeRendering.hlsl" |
|||
#include "CoreRP/ShaderLibrary/SpaceFillingCurves.hlsl" |
|||
|
|||
#include "../VolumetricLighting.cs.hlsl" |
|||
#include "../../../ShaderVariables.hlsl" |
|||
|
|||
#define UNITY_MATERIAL_VOLUMETRIC // Define before including Lighting.hlsl and Material.hlsl |
|||
#include "../../../Lighting/Lighting.hlsl" // Includes Material.hlsl |
|||
#include "../../../Lighting/LightEvaluation.hlsl" |
|||
#include "../../../Lighting/VBuffer.hlsl" |
|||
|
|||
//-------------------------------------------------------------------------------------------------- |
|||
// Inputs & outputs |
|||
//-------------------------------------------------------------------------------------------------- |
|||
|
|||
RW_TEXTURE3D(float4, _VBufferLightingIntegral); // RGB = radiance, A = optical depth |
|||
RW_TEXTURE3D(float4, _VBufferLightingFeedback); // RGB = radiance, A = interval length |
|||
TEXTURE3D(_VBufferLightingHistory); // RGB = radiance, A = interval length |
|||
|
|||
// TODO: avoid creating another Constant Buffer... |
|||
CBUFFER_START(UnityVolumetricLighting) |
|||
float4 _VBufferSampleOffset; // {x, y, z}, w = rendered frame count |
|||
float4x4 _VBufferCoordToViewDirWS; // Actually just 3x3, but Unity can only set 4x4 |
|||
CBUFFER_END |
|||
|
|||
//-------------------------------------------------------------------------------------------------- |
|||
// Implementation |
|||
//-------------------------------------------------------------------------------------------------- |
|||
|
|||
struct Ray |
|||
{ |
|||
float3 originWS; |
|||
float3 directionWS; // Normalized, stratified |
|||
float ratioLenToZ; // 1 / ViewSpaceZ |
|||
float3 centerDirWS; // Not normalized, centered |
|||
}; |
|||
|
|||
float3 GetPointAtDistance(Ray ray, float t) |
|||
{ |
|||
return ray.originWS + t * ray.directionWS; |
|||
} |
|||
|
|||
float3 GetCenterAtDistance(Ray ray, float t) |
|||
{ |
|||
return ray.originWS + t * ray.centerDirWS; |
|||
} |
|||
|
|||
// Computes the light integral (in-scattered radiance) within the voxel. |
|||
// Multiplication by the scattering coefficient and the phase function is performed outside. |
|||
float3 EvaluateVoxelLighting(LightLoopContext context, uint featureFlags, PositionInputs posInput, |
|||
Ray ray, float t0, float t1, float dt, float rndVal, float extinction |
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
, uint clusterIndices[2], float clusterDepths[2]) |
|||
#else |
|||
) |
|||
#endif |
|||
{ |
|||
float3 voxelRadiance = 0; |
|||
|
|||
BakeLightingData unused; // Unused for now, so define once |
|||
|
|||
if (featureFlags & LIGHTFEATUREFLAGS_DIRECTIONAL) |
|||
{ |
|||
float tOffset, weight; |
|||
ImportanceSampleHomogeneousMedium(rndVal, extinction, dt, tOffset, weight); |
|||
|
|||
float t = t0 + tOffset; |
|||
posInput.positionWS = GetPointAtDistance(ray, t); |
|||
|
|||
for (uint i = 0; i < _DirectionalLightCount; ++i) |
|||
{ |
|||
// Fetch the light. |
|||
DirectionalLightData light = _DirectionalLightDatas[i]; |
|||
float3 L = -light.forward; // Lights point backwards in Unity |
|||
|
|||
float3 color; float attenuation; |
|||
EvaluateLight_Directional(context, posInput, light, unused, 0, L, |
|||
color, attenuation); |
|||
|
|||
// Note: the 'weight' accounts for transmittance from 't0' to 't'. |
|||
float intensity = attenuation * weight; |
|||
|
|||
// Compute the amount of in-scattered radiance. |
|||
voxelRadiance += intensity * color; |
|||
} |
|||
} |
|||
|
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
// Loop over 1 or 2 light clusters. |
|||
int cluster = 0; |
|||
do |
|||
{ |
|||
float tMin = max(t0, ray.ratioLenToZ * clusterDepths[cluster]); |
|||
float tMax = t1; |
|||
|
|||
if (cluster == 0 && (clusterIndices[0] != clusterIndices[1])) |
|||
{ |
|||
tMax = min(t1, ray.ratioLenToZ * clusterDepths[1]); |
|||
} |
|||
#else |
|||
float tMin = t0; |
|||
float tMax = t1; |
|||
#endif // LIGHTLOOP_TILE_PASS |
|||
|
|||
if (featureFlags & LIGHTFEATUREFLAGS_PUNCTUAL) |
|||
{ |
|||
uint lightCount, lightStart; |
|||
|
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
GetCountAndStartCluster(posInput.tileCoord, clusterIndices[cluster], LIGHTCATEGORY_PUNCTUAL, |
|||
lightStart, lightCount); |
|||
#else |
|||
lightCount = _PunctualLightCount; |
|||
lightStart = 0; |
|||
#endif // LIGHTLOOP_TILE_PASS |
|||
|
|||
if (lightCount > 0) |
|||
{ |
|||
LightData light = FetchLight(lightStart, 0); |
|||
|
|||
uint i = 0, last = lightCount - 1; |
|||
|
|||
// Box lights require special handling (see the next while loop). |
|||
while (i <= last && light.lightType != GPULIGHTTYPE_PROJECTOR_BOX) |
|||
{ |
|||
float tEntr = tMin; |
|||
float tExit = tMax; |
|||
|
|||
bool sampleLight = true; |
|||
|
|||
// Perform ray-cone intersection for pyramid and spot lights. |
|||
if (light.lightType != GPULIGHTTYPE_POINT) |
|||
{ |
|||
float lenMul = 1; |
|||
|
|||
if (light.lightType == GPULIGHTTYPE_PROJECTOR_PYRAMID) |
|||
{ |
|||
// 'light.right' and 'light.up' vectors are pre-scaled on the CPU |
|||
// s.t. if you were to place them at the distance of 1 directly in front |
|||
// of the light, they would give you the "footprint" of the light. |
|||
// For spot lights, the cone fit is exact. |
|||
// For pyramid lights, however, this is the "inscribed" cone |
|||
// (contained within the pyramid), and we want to intersect |
|||
// the "escribed" cone (which contains the pyramid). |
|||
// Therefore, we have to scale the radii by the sqrt(2). |
|||
lenMul = rsqrt(2); |
|||
} |
|||
|
|||
float3 coneAxisX = lenMul * light.right; |
|||
float3 coneAxisY = lenMul * light.up; |
|||
|
|||
sampleLight = IntersectRayCone(ray.originWS, ray.directionWS, |
|||
light.positionWS, light.forward, |
|||
coneAxisX, coneAxisY, |
|||
tMin, tMax, tEntr, tExit); |
|||
} |
|||
|
|||
if (sampleLight) |
|||
{ |
|||
// We are unable to adequately sample features larger |
|||
// than the half of the length of the integration interval |
|||
// divided by the number of temporal samples (7). |
|||
// Therefore, we apply this hack to reduce flickering. |
|||
float hackMinDistSq = Sq(dt * (0.5 / 7)); |
|||
|
|||
float t, distSq, rcpPdf; |
|||
ImportanceSamplePunctualLight(rndVal, light.positionWS, |
|||
ray.originWS, ray.directionWS, |
|||
tEntr, tExit, t, distSq, rcpPdf, |
|||
hackMinDistSq); |
|||
|
|||
posInput.positionWS = GetPointAtDistance(ray, t); |
|||
|
|||
float3 lightToSample = posInput.positionWS - light.positionWS; |
|||
float dist = sqrt(distSq); |
|||
float3 L = -lightToSample * rsqrt(distSq); |
|||
|
|||
float3 color; float attenuation; |
|||
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, dist, distSq, |
|||
color, attenuation); |
|||
|
|||
float intensity = attenuation * rcpPdf; |
|||
|
|||
// Compute transmittance from 't0' to 't'. |
|||
intensity *= TransmittanceHomogeneousMedium(extinction, t - t0); |
|||
|
|||
// Compute the amount of in-scattered radiance. |
|||
voxelRadiance += color * intensity; |
|||
} |
|||
|
|||
light = FetchLight(lightStart, min(++i, last)); |
|||
} |
|||
|
|||
while (i <= last) // GPULIGHTTYPE_PROJECTOR_BOX |
|||
{ |
|||
light = FetchLight(lightStart, min(++i, last)); |
|||
light.lightType = GPULIGHTTYPE_PROJECTOR_BOX; |
|||
|
|||
// Convert the box light from OBB to AABB. |
|||
// 'light.right' and 'light.up' vectors are pre-scaled on the CPU by (2/w) and (2/h). |
|||
float3x3 rotMat = float3x3(light.right, light.up, light.forward); |
|||
|
|||
float3 o = mul(rotMat, ray.originWS - light.positionWS); |
|||
float3 d = mul(rotMat, ray.directionWS); |
|||
|
|||
float range = light.size.x; |
|||
float3 boxPt0 = float3(-1, -1, 0); |
|||
float3 boxPt1 = float3( 1, 1, range); |
|||
|
|||
float tEntr, tExit; |
|||
|
|||
if (IntersectRayAABB(o, d, boxPt0, boxPt1, tMin, tMax, tEntr, tExit)) |
|||
{ |
|||
float tOffset, weight; |
|||
ImportanceSampleHomogeneousMedium(rndVal, extinction, tExit - tEntr, tOffset, weight); |
|||
|
|||
float t = tEntr + tOffset; |
|||
posInput.positionWS = GetPointAtDistance(ray, t); |
|||
|
|||
float3 L = -light.forward; |
|||
|
|||
float3 color; float attenuation; |
|||
EvaluateLight_Punctual(context, posInput, light, unused, 0, L, 1, 1, |
|||
color, attenuation); |
|||
|
|||
// Note: the 'weight' accounts for transmittance from 'tEntr' to 't'. |
|||
float intensity = attenuation * weight; |
|||
|
|||
// Compute transmittance from 't0' to 'tEntr'. |
|||
intensity *= TransmittanceHomogeneousMedium(extinction, tEntr - t0); |
|||
|
|||
// Compute the amount of in-scattered radiance. |
|||
voxelRadiance += intensity * color; |
|||
} |
|||
} |
|||
} |
|||
} |
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
cluster++; |
|||
// Check whether the voxel is completely inside the light cluster. |
|||
} while ((cluster < 2) && (clusterIndices[0] != clusterIndices[1])); |
|||
#endif // LIGHTLOOP_TILE_PASS |
|||
|
|||
return voxelRadiance; |
|||
} |
|||
|
|||
// Computes the in-scattered radiance along the ray. |
|||
void FillVolumetricLightingBuffer(LightLoopContext context, uint featureFlags, |
|||
PositionInputs posInput, Ray ray) |
|||
{ |
|||
float z0 = _VBufferDepthEncodingParams.x; // Start integration from the near plane |
|||
float t0 = ray.ratioLenToZ * z0; |
|||
float de = rcp(VBUFFER_SLICE_COUNT); // Log-encoded distance between slices |
|||
|
|||
float3 totalRadiance = 0; |
|||
float opticalDepth = 0; |
|||
|
|||
uint sliceCountHack = max(VBUFFER_SLICE_COUNT, (uint)_VBufferDepthEncodingParams.x); // Prevent unrolling... |
|||
|
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
// Our voxel is not necessarily completely inside a single light cluster. |
|||
// Note that Z-binning can solve this problem, as we can iterate over all Z-bins |
|||
// to compute min/max light indices, and then use this range for the entire slice. |
|||
uint clusterIndices[2]; |
|||
float clusterDepths[2]; |
|||
clusterIndices[0] = GetLightClusterIndex(posInput.tileCoord, z0); |
|||
clusterDepths[0] = GetLightClusterMinLinearDepth(posInput.tileCoord, clusterIndices[0]); |
|||
#endif // LIGHTLOOP_TILE_PASS |
|||
|
|||
// TODO: replace 'sliceCountHack' with VBUFFER_SLICE_COUNT when the shader compiler bug is fixed. |
|||
for (uint slice = 0; slice < sliceCountHack; slice++) |
|||
{ |
|||
float e1 = slice * de + de; // (slice + 1) / sliceCount |
|||
float z1 = DecodeLogarithmicDepth(e1, _VBufferDepthEncodingParams); |
|||
float t1 = ray.ratioLenToZ * z1; |
|||
float dt = t1 - t0; |
|||
|
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
clusterIndices[1] = GetLightClusterIndex(posInput.tileCoord, z1); |
|||
clusterDepths[1] = GetLightClusterMinLinearDepth(posInput.tileCoord, clusterIndices[1]); |
|||
#endif |
|||
|
|||
// Compute the -exact- position of the center of the voxel. |
|||
// It's important since the accumulated value of the integral is stored at the center. |
|||
// We will use it for participating media sampling and reprojection. |
|||
float tc = t0 + 0.5 * dt; |
|||
float3 centerWS = GetCenterAtDistance(ray, tc); |
|||
|
|||
// Sample the participating medium at 'tc' (or 'centerWS'). |
|||
// We consider it to be constant along the interval [t0, t1] (within the voxel). |
|||
// TODO: piecewise linear. |
|||
float3 scattering = _GlobalFog_Scattering; |
|||
float extinction = _GlobalFog_Extinction; |
|||
|
|||
#if ENABLE_REPROJECTION |
|||
// This is a sequence of 7 equidistant numbers from 1/14 to 13/14. |
|||
// Each of them is the centroid of the interval of length 2/14. |
|||
float rndVal = _VBufferSampleOffset.z; |
|||
#else |
|||
float rndVal = 0.5; |
|||
#endif |
|||
|
|||
float3 voxelRadiance = EvaluateVoxelLighting(context, featureFlags, posInput, |
|||
ray, t0, t1, dt, rndVal, extinction |
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
, clusterIndices, clusterDepths); |
|||
#else |
|||
); |
|||
#endif |
|||
|
|||
#if ENABLE_REPROJECTION |
|||
// Reproject the history at 'centerWS'. |
|||
float2 reprojPosNDC = ComputeNormalizedDeviceCoordinates(centerWS, _PrevViewProjMatrix); |
|||
float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w; |
|||
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler), |
|||
false, reprojPosNDC, reprojZ, |
|||
_VBufferScaleAndSliceCount, |
|||
_VBufferDepthEncodingParams); |
|||
|
|||
// Compute the exponential moving average over 'n' frames: |
|||
// X = (1 - a) * ValueAtFrame[n] + a * AverageOverPreviousFrames. |
|||
// We want each sample to be uniformly weighted by (1 / n): |
|||
// X = (1 / n) * Sum{i from 1 to n}{ValueAtFrame[i]}. |
|||
// Therefore, we get: |
|||
// (1 - a) = (1 / n) => a = (1 - 1 / n) = (n - 1) / n, |
|||
// X = (1 / n) * ValueAtFrame[n] + (1 - 1 / n) * AverageOverPreviousFrames. |
|||
// Why does it work? We need to make the following assumption: |
|||
// AverageOverPreviousFrames ≈ AverageOverFrames[n - 1]. |
|||
// AverageOverFrames[n - 1] = (1 / (n - 1)) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}. |
|||
// This implies that the reprojected (accumulated) value has mostly converged. |
|||
// X = (1 / n) * ValueAtFrame[n] + ((n - 1) / n) * (1 / (n - 1)) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}. |
|||
// X = (1 / n) * ValueAtFrame[n] + (1 / n) * Sum{i from 1 to n - 1}{ValueAtFrame[i]}. |
|||
// X = Sum{i from 1 to n}{ValueAtFrame[i] / n}. |
|||
float numFrames = 7; |
|||
float frameWeight = 1 / numFrames; |
|||
float historyWeight = 1 - frameWeight; |
|||
|
|||
// The accuracy of the integral linearly decreases with the length of the interval. |
|||
// Therefore, reprojecting longer intervals should result in a lower confidence. |
|||
// TODO: doesn't seem to be worth it, removed for now. |
|||
|
|||
// Perform temporal blending. |
|||
// Both radiance values are obtained by integrating over line segments of different length. |
|||
// Blending only makes sense if the length of both intervals is the same. |
|||
// Therefore, the reprojected radiance needs to be rescaled by (frame_dt / reproj_dt). |
|||
bool reprojSuccess = reprojValue.a != 0; |
|||
float blendFactor = reprojSuccess ? historyWeight : 0; |
|||
float reprojRcpLen = reprojSuccess ? rcp(reprojValue.a) : 0; |
|||
float lengthScale = dt * reprojRcpLen; |
|||
float3 reprojRadiance = reprojValue.rgb; |
|||
float3 blendedRadiance = (1 - blendFactor) * voxelRadiance + blendFactor * lengthScale * reprojRadiance; |
|||
|
|||
// Store the feedback for the voxel. |
|||
// TODO: dynamic lights (which update their position, rotation, cookie or shadow at runtime) |
|||
// do not support reprojection and should neither read nor write to the history buffer. |
|||
// to the history buffer. This will cause them to alias, but it is the only way |
|||
// to prevent ghosting. |
|||
_VBufferLightingFeedback[uint3(posInput.positionSS, slice)] = float4(blendedRadiance, dt); |
|||
#else |
|||
float3 blendedRadiance = voxelRadiance; |
|||
#endif |
|||
|
|||
#if DEBUG_REPROJECTION |
|||
if (distance(voxelRadiance, reprojValue.rgb) > 0.1) blendedRadiance = float3(1000, 0, 0); |
|||
#endif |
|||
|
|||
// Compute the transmittance from the camera to 't0'. |
|||
float transmittance = Transmittance(opticalDepth); |
|||
|
|||
// Integral{a, b}{Transmittance(0, t) * L_s(t) dt} = Transmittance(0, a) * Integral{a, b}{Transmittance(0, t - a) * L_s(t) dt}. |
|||
totalRadiance += (transmittance * IsotropicPhaseFunction()) * scattering * blendedRadiance; |
|||
|
|||
// Compute the optical depth up to the center of the interval. |
|||
opticalDepth += 0.5 * extinction * dt; |
|||
|
|||
// Store the voxel data. |
|||
_VBufferLightingIntegral[uint3(posInput.positionSS, slice)] = float4(totalRadiance, opticalDepth); |
|||
|
|||
// Compute the optical depth up to the end of the interval. |
|||
opticalDepth += 0.5 * extinction * dt; |
|||
|
|||
t0 = t1; |
|||
|
|||
#ifdef LIGHTLOOP_TILE_PASS |
|||
clusterIndices[0] = clusterIndices[1]; |
|||
clusterDepths[0] = clusterDepths[1]; |
|||
#endif // LIGHTLOOP_TILE_PASS |
|||
} |
|||
} |
|||
|
|||
[numthreads(GROUP_SIZE_2D, 1, 1)] |
|||
void VolumetricLighting(uint2 groupId : SV_GroupID, |
|||
uint groupThreadId : SV_GroupThreadID) |
|||
{ |
|||
// Perform compile-time checks. |
|||
if (!IsPower2(VBUFFER_TILE_SIZE) || !IsPower2(TILE_SIZE_CLUSTERED)) return; |
|||
|
|||
// Note: any factor of 64 is a suitable wave size for our algorithm. |
|||
uint waveIndex = WaveReadFirstLane(groupThreadId / 64); |
|||
uint laneIndex = groupThreadId % 64; |
|||
uint quadIndex = laneIndex / 4; |
|||
|
|||
// Arrange threads in the Morton order to optimally match the memory layout of GCN tiles. |
|||
uint2 groupCoord = DecodeMorton2D(groupThreadId); |
|||
uint2 groupOffset = groupId * GROUP_SIZE_1D; |
|||
uint2 voxelCoord = groupOffset + groupCoord; |
|||
uint2 tileCoord = voxelCoord * VBUFFER_TILE_SIZE / TILE_SIZE_CLUSTERED; |
|||
|
|||
uint voxelsPerClusterTile = Sq((uint)(TILE_SIZE_CLUSTERED / VBUFFER_TILE_SIZE)); |
|||
|
|||
if (voxelsPerClusterTile >= 64) |
|||
{ |
|||
// TODO: this is a compile-time test, make sure the compiler actually scalarizes. |
|||
tileCoord = WaveReadFirstLane(tileCoord); |
|||
} |
|||
|
|||
[branch] if (voxelCoord.x >= (uint)_VBufferResolution.x || |
|||
voxelCoord.y >= (uint)_VBufferResolution.y) |
|||
{ |
|||
return; |
|||
} |
|||
|
|||
float2 centerCoord = voxelCoord + 0.5; |
|||
#if ENABLE_REPROJECTION |
|||
float2 sampleCoord = centerCoord + _VBufferSampleOffset.xy; |
|||
#else |
|||
float2 sampleCoord = centerCoord; |
|||
#endif |
|||
|
|||
// Compute the (stratified) ray direction s.t. its ViewSpaceZ = 1. |
|||
float3 rayDir = mul(-float3(sampleCoord, 1), (float3x3)_VBufferCoordToViewDirWS); |
|||
float lenSq = dot(rayDir, rayDir); |
|||
float lenRcp = rsqrt(lenSq); |
|||
float len = lenSq * lenRcp; |
|||
|
|||
#if ENABLE_REPROJECTION |
|||
// Compute the ray direction which passes through the center of the voxel s.t. its ViewSpaceZ = 1. |
|||
float3 rayCenterDir = mul(-float3(centerCoord, 1), (float3x3)_VBufferCoordToViewDirWS); |
|||
#else |
|||
float3 rayCenterDir = rayDir; |
|||
#endif |
|||
|
|||
Ray ray; |
|||
ray.originWS = GetCurrentViewPosition(); |
|||
ray.ratioLenToZ = len; |
|||
ray.directionWS = rayDir * lenRcp; |
|||
ray.centerDirWS = rayCenterDir * lenRcp; |
|||
|
|||
// TODO |
|||
LightLoopContext context; |
|||
context.shadowContext = InitShadowContext(); |
|||
uint featureFlags = 0xFFFFFFFF; |
|||
|
|||
PositionInputs posInput = GetPositionInput(voxelCoord, _VBufferResolution.zw, tileCoord); |
|||
|
|||
FillVolumetricLightingBuffer(context, featureFlags, posInput, ray); |
|||
} |
|
|||
namespace UnityEngine.Experimental.Rendering.HDPipeline |
|||
{ |
|||
[ExecuteInEditMode] |
|||
[AddComponentMenu("RenderPipeline/High Definition/Homogenous Fog", -1)] |
|||
public class HomogeneousFog : MonoBehaviour |
|||
{ |
|||
public VolumeParameters volumeParameters = new VolumeParameters(); |
|||
|
|||
private void Awake() |
|||
{ |
|||
} |
|||
|
|||
private void OnEnable() |
|||
{ |
|||
} |
|||
|
|||
private void OnDisable() |
|||
{ |
|||
} |
|||
|
|||
private void Update() |
|||
{ |
|||
} |
|||
|
|||
private void OnValidate() |
|||
{ |
|||
volumeParameters.Constrain(); |
|||
} |
|||
|
|||
void OnDrawGizmos() |
|||
{ |
|||
if (volumeParameters != null && !volumeParameters.IsVolumeUnbounded()) |
|||
{ |
|||
Gizmos.DrawWireCube(volumeParameters.bounds.center, volumeParameters.bounds.size); |
|||
} |
|||
} |
|||
} |
|||
} // UnityEngine.Experimental.Rendering.HDPipeline
|
|
|||
using System; |
|||
using UnityEngine.Rendering; |
|||
|
|||
namespace UnityEngine.Experimental.Rendering.HDPipeline |
|||
{ |
|||
|
|||
[GenerateHLSL] |
|||
public struct VolumeProperties |
|||
{ |
|||
public Vector3 scattering; // [0, 1], prefer sRGB
|
|||
public float extinction; // [0, 1], prefer sRGB
|
|||
|
|||
public static VolumeProperties GetNeutralVolumeProperties() |
|||
{ |
|||
VolumeProperties properties = new VolumeProperties(); |
|||
|
|||
properties.scattering = Vector3.zero; |
|||
properties.extinction = 0; |
|||
|
|||
return properties; |
|||
} |
|||
} // struct VolumeProperties
|
|||
|
|||
[Serializable] |
|||
public class VolumeParameters |
|||
{ |
|||
public Bounds bounds; // Position and dimensions in meters
|
|||
public Color albedo; // Single scattering albedo [0, 1]
|
|||
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
|
|||
|
|||
public VolumeParameters() |
|||
{ |
|||
bounds = new Bounds(Vector3.zero, Vector3.positiveInfinity); |
|||
albedo = new Color(0.5f, 0.5f, 0.5f); |
|||
meanFreePath = 10.0f; |
|||
} |
|||
|
|||
public bool IsVolumeUnbounded() |
|||
{ |
|||
return bounds.size.x == float.PositiveInfinity && |
|||
bounds.size.y == float.PositiveInfinity && |
|||
bounds.size.z == float.PositiveInfinity; |
|||
} |
|||
|
|||
public Vector3 GetAbsorptionCoefficient() |
|||
{ |
|||
float extinction = GetExtinctionCoefficient(); |
|||
Vector3 scattering = GetScatteringCoefficient(); |
|||
|
|||
return Vector3.Max(new Vector3(extinction, extinction, extinction) - scattering, Vector3.zero); |
|||
} |
|||
|
|||
public Vector3 GetScatteringCoefficient() |
|||
{ |
|||
float extinction = GetExtinctionCoefficient(); |
|||
|
|||
return new Vector3(albedo.r * extinction, albedo.g * extinction, albedo.b * extinction); |
|||
} |
|||
|
|||
public float GetExtinctionCoefficient() |
|||
{ |
|||
return 1.0f / meanFreePath; |
|||
} |
|||
|
|||
public void Constrain() |
|||
{ |
|||
bounds.size = Vector3.Max(bounds.size, Vector3.zero); |
|||
|
|||
albedo.r = Mathf.Clamp01(albedo.r); |
|||
albedo.g = Mathf.Clamp01(albedo.g); |
|||
albedo.b = Mathf.Clamp01(albedo.b); |
|||
|
|||
meanFreePath = Mathf.Max(meanFreePath, 1.0f); |
|||
} |
|||
|
|||
public VolumeProperties GetProperties() |
|||
{ |
|||
VolumeProperties properties = new VolumeProperties(); |
|||
|
|||
properties.scattering = GetScatteringCoefficient(); |
|||
properties.extinction = GetExtinctionCoefficient(); |
|||
|
|||
return properties; |
|||
} |
|||
} // class VolumeParameters
|
|||
|
|||
public partial class HDRenderPipeline : RenderPipeline |
|||
{ |
|||
public enum VolumetricLightingPreset |
|||
{ |
|||
Off, |
|||
Normal, |
|||
Ultra, |
|||
Count |
|||
}; |
|||
|
|||
VolumetricLightingPreset m_VolumetricLightingPreset |
|||
{ get { return (VolumetricLightingPreset)Math.Min(ShaderConfig.s_VolumetricLightingPreset, (int)VolumetricLightingPreset.Count); } } |
|||
|
|||
ComputeShader m_VolumetricLightingCS { get { return m_Asset.renderPipelineResources.volumetricLightingCS; } } |
|||
|
|||
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
|
|||
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
|
|||
const int k_VBufferCount = 3; // 0 and 1 - history (prev) and feedback (next), 2 - integral (curr)
|
|||
|
|||
RenderTexture[] m_VBufferLighting = null; |
|||
RenderTargetIdentifier[] m_VBufferLightingRT = null; |
|||
|
|||
int m_ViewCount = 0; |
|||
int[] m_ViewIdArray = new int[8]; |
|||
|
|||
int ViewOffsetFromViewId(int viewId) |
|||
{ |
|||
int viewOffset = -1; |
|||
|
|||
Debug.Assert(m_ViewCount == 0 || m_ViewIdArray != null); |
|||
|
|||
for (int i = 0; i < m_ViewCount; i++) |
|||
{ |
|||
if (m_ViewIdArray[i] == viewId) |
|||
{ |
|||
viewOffset = i; |
|||
} |
|||
} |
|||
|
|||
return viewOffset; |
|||
} |
|||
|
|||
public static int ComputeVBufferTileSize(VolumetricLightingPreset preset) |
|||
{ |
|||
switch (preset) |
|||
{ |
|||
case VolumetricLightingPreset.Normal: |
|||
return 8; |
|||
case VolumetricLightingPreset.Ultra: |
|||
return 4; |
|||
case VolumetricLightingPreset.Off: |
|||
return 0; |
|||
default: |
|||
Debug.Assert(false, "Encountered an unexpected VolumetricLightingPreset."); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
public static int ComputeVBufferSliceCount(VolumetricLightingPreset preset) |
|||
{ |
|||
switch (preset) |
|||
{ |
|||
case VolumetricLightingPreset.Normal: |
|||
return 128; |
|||
case VolumetricLightingPreset.Ultra: |
|||
return 256; |
|||
case VolumetricLightingPreset.Off: |
|||
return 0; |
|||
default: |
|||
Debug.Assert(false, "Encountered an unexpected VolumetricLightingPreset."); |
|||
return 0; |
|||
} |
|||
} |
|||
|
|||
// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
|
|||
// the VBuffer can potentially extend past the boundaries of the viewport.
|
|||
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
|
|||
Vector2 ComputeVBufferResolutionAndScale(float screenWidth, float screenHeight, |
|||
ref int w, ref int h, ref int d) |
|||
{ |
|||
int t = ComputeVBufferTileSize(m_VolumetricLightingPreset); |
|||
|
|||
// Ceil(ScreenSize / TileSize).
|
|||
w = ((int)screenWidth + t - 1) / t; |
|||
h = ((int)screenHeight + t - 1) / t; |
|||
d = ComputeVBufferSliceCount(m_VolumetricLightingPreset); |
|||
|
|||
return new Vector2(screenWidth / (w * t), screenHeight / (h * t)); |
|||
} |
|||
|
|||
void ResizeVBuffer(int viewId, int screenWidth, int screenHeight) |
|||
{ |
|||
int viewOffset = ViewOffsetFromViewId(viewId); |
|||
|
|||
if (viewOffset >= 0) |
|||
{ |
|||
// Found, check resolution.
|
|||
int w = 0, h = 0, d = 0; |
|||
ComputeVBufferResolutionAndScale(screenWidth, screenHeight, ref w, ref h, ref d); |
|||
|
|||
Debug.Assert(m_VBufferLighting != null); |
|||
Debug.Assert(m_VBufferLighting.Length >= (viewOffset + 1) * k_VBufferCount); |
|||
Debug.Assert(m_VBufferLighting[viewOffset * k_VBufferCount] != null); |
|||
|
|||
if (w == m_VBufferLighting[viewOffset * k_VBufferCount].width && |
|||
h == m_VBufferLighting[viewOffset * k_VBufferCount].height && |
|||
d == m_VBufferLighting[viewOffset * k_VBufferCount].volumeDepth) |
|||
{ |
|||
// Everything matches, nothing to do here.
|
|||
return; |
|||
} |
|||
} |
|||
|
|||
// Otherwise, we have to recreate the VBuffer.
|
|||
CreateVBuffer(viewId, screenWidth, screenHeight); |
|||
} |
|||
|
|||
void CreateVBuffer(int viewId, int screenWidth, int screenHeight) |
|||
{ |
|||
// Clean up first.
|
|||
DestroyVBuffer(viewId); |
|||
|
|||
int viewOffset = ViewOffsetFromViewId(viewId); |
|||
|
|||
if (viewOffset < 0) |
|||
{ |
|||
// Not found. Push back.
|
|||
viewOffset = m_ViewCount++; |
|||
Debug.Assert(viewOffset < 8); |
|||
m_ViewIdArray[viewOffset] = viewId; |
|||
|
|||
if (m_VBufferLighting == null) |
|||
{ |
|||
// Lazy initialize.
|
|||
m_VBufferLighting = new RenderTexture[k_VBufferCount]; |
|||
m_VBufferLightingRT = new RenderTargetIdentifier[k_VBufferCount]; |
|||
} |
|||
else if (m_VBufferLighting.Length < m_ViewCount * k_VBufferCount) |
|||
{ |
|||
// Grow by reallocation and copy.
|
|||
RenderTexture[] newArray = new RenderTexture[m_ViewCount * k_VBufferCount]; |
|||
RenderTargetIdentifier[] newArrayRT = new RenderTargetIdentifier[m_ViewCount * k_VBufferCount]; |
|||
|
|||
for (int i = 0, n = m_VBufferLighting.Length; i < n; i++) |
|||
{ |
|||
newArray[i] = m_VBufferLighting[i]; |
|||
newArrayRT[i] = m_VBufferLightingRT[i]; |
|||
} |
|||
|
|||
// Reassign and release memory.
|
|||
m_VBufferLighting = newArray; |
|||
m_VBufferLightingRT = newArrayRT; |
|||
} |
|||
} |
|||
|
|||
Debug.Assert(m_VBufferLighting != null); |
|||
|
|||
int w = 0, h = 0, d = 0; |
|||
ComputeVBufferResolutionAndScale(screenWidth, screenHeight, ref w, ref h, ref d); |
|||
|
|||
for (int i = viewOffset * k_VBufferCount, |
|||
n = viewOffset * k_VBufferCount + k_VBufferCount; i < n; i++) |
|||
{ |
|||
m_VBufferLighting[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear); |
|||
m_VBufferLighting[i].filterMode = FilterMode.Trilinear; // Custom
|
|||
m_VBufferLighting[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout
|
|||
m_VBufferLighting[i].volumeDepth = d; |
|||
m_VBufferLighting[i].enableRandomWrite = true; |
|||
m_VBufferLighting[i].Create(); |
|||
|
|||
m_VBufferLightingRT[i] = new RenderTargetIdentifier(m_VBufferLighting[i]); |
|||
} |
|||
} |
|||
|
|||
void DestroyVBuffer(int viewId) |
|||
{ |
|||
int viewOffset = ViewOffsetFromViewId(viewId); |
|||
|
|||
if (viewOffset < 0) |
|||
{ |
|||
// Not found.
|
|||
return; |
|||
} |
|||
|
|||
int lastOffset = m_ViewCount - 1; |
|||
Debug.Assert(lastOffset >= 0); |
|||
|
|||
if (m_VBufferLighting != null) |
|||
{ |
|||
Debug.Assert(m_VBufferLighting.Length >= m_ViewCount * k_VBufferCount); |
|||
|
|||
for (int i = 0; i < k_VBufferCount; i++) |
|||
{ |
|||
int viewBuffer = viewOffset * k_VBufferCount + i; |
|||
int lastBuffer = lastOffset * k_VBufferCount + i; |
|||
|
|||
// Release the memory.
|
|||
if (m_VBufferLighting[viewBuffer] != null) |
|||
{ |
|||
m_VBufferLighting[viewBuffer].Release(); |
|||
} |
|||
|
|||
// Swap with the last element.
|
|||
m_VBufferLighting[viewBuffer] = m_VBufferLighting[lastBuffer]; |
|||
m_VBufferLightingRT[viewBuffer] = m_VBufferLightingRT[lastBuffer]; |
|||
} |
|||
} |
|||
|
|||
// Swap with the last element and shrink the array.
|
|||
m_ViewIdArray[viewOffset] = m_ViewIdArray[lastOffset]; |
|||
m_ViewCount--; |
|||
} |
|||
|
|||
// Uses a logarithmic depth encoding.
|
|||
// Near plane: depth = 0; far plane: depth = 1.
|
|||
// x = n, y = log2(f/n), z = 1/n, w = 1/log2(f/n).
|
|||
public static Vector4 ComputeLogarithmicDepthEncodingParams(float nearPlane, float farPlane) |
|||
{ |
|||
Vector4 depthParams = new Vector4(); |
|||
|
|||
float n = nearPlane; |
|||
float f = farPlane; |
|||
|
|||
depthParams.x = n; |
|||
depthParams.y = Mathf.Log(f / n, 2); |
|||
depthParams.z = 1.0f / depthParams.x; |
|||
depthParams.w = 1.0f / depthParams.y; |
|||
|
|||
return depthParams; |
|||
} |
|||
|
|||
// Returns NULL if a global fog component does not exist, or is not enabled.
|
|||
public static HomogeneousFog GetGlobalFogComponent() |
|||
{ |
|||
HomogeneousFog globalFogComponent = null; |
|||
|
|||
HomogeneousFog[] fogComponents = Object.FindObjectsOfType(typeof(HomogeneousFog)) as HomogeneousFog[]; |
|||
|
|||
foreach (HomogeneousFog fogComponent in fogComponents) |
|||
{ |
|||
if (fogComponent.enabled && fogComponent.volumeParameters.IsVolumeUnbounded()) |
|||
{ |
|||
globalFogComponent = fogComponent; |
|||
break; |
|||
} |
|||
} |
|||
|
|||
return globalFogComponent; |
|||
} |
|||
|
|||
RenderTargetIdentifier GetVBufferLightingHistory(int viewOffset) // From the previous frame
|
|||
{ |
|||
return m_VBufferLightingRT[viewOffset * k_VBufferCount + ((Time.renderedFrameCount + 0) & 1)]; // Does not work in the Scene view
|
|||
} |
|||
|
|||
RenderTargetIdentifier GetVBufferLightingFeedback(int viewOffset) // For the next frame
|
|||
{ |
|||
return m_VBufferLightingRT[viewOffset * k_VBufferCount + ((Time.renderedFrameCount + 1) & 1)]; // Does not work in the Scene view
|
|||
} |
|||
|
|||
RenderTargetIdentifier GetVBufferLightingIntegral(int viewOffset) // Of the current frame
|
|||
{ |
|||
return m_VBufferLightingRT[viewOffset * k_VBufferCount + 2]; |
|||
} |
|||
|
|||
public void SetVolumetricLightingData(HDCamera camera, CommandBuffer cmd) |
|||
{ |
|||
HomogeneousFog globalFogComponent = GetGlobalFogComponent(); |
|||
|
|||
// TODO: may want to cache these results somewhere.
|
|||
VolumeProperties globalFogProperties = (globalFogComponent != null) ? globalFogComponent.volumeParameters.GetProperties() |
|||
: VolumeProperties.GetNeutralVolumeProperties(); |
|||
|
|||
cmd.SetGlobalVector(HDShaderIDs._GlobalFog_Scattering, globalFogProperties.scattering); |
|||
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Extinction, globalFogProperties.extinction); |
|||
|
|||
int w = 0, h = 0, d = 0; |
|||
Vector2 scale = ComputeVBufferResolutionAndScale(camera.screenSize.x, camera.screenSize.y, ref w, ref h, ref d); |
|||
|
|||
int viewId = camera.camera.GetInstanceID(); |
|||
int viewOffset = ViewOffsetFromViewId(viewId); |
|||
|
|||
Debug.Assert(viewOffset >= 0 && viewOffset < 8); |
|||
|
|||
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h)); |
|||
cmd.SetGlobalVector( HDShaderIDs._VBufferScaleAndSliceCount, new Vector4(scale.x, scale.y, d, 1.0f / d)); |
|||
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane)); |
|||
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, GetVBufferLightingIntegral(viewOffset)); |
|||
} |
|||
|
|||
// Ref: https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres
|
|||
// The returned {x, y} coordinates (and all spheres) are all within the (-0.5, 0.5)^2 range.
|
|||
// The pattern has been rotated by 15 degrees to maximize the resolution along X and Y:
|
|||
// https://www.desmos.com/calculator/kcpfvltz7c
|
|||
Vector2[] GetHexagonalClosePackedSpheres7() |
|||
{ |
|||
Vector2[] coords = new Vector2[7]; |
|||
|
|||
float r = 0.17054068870105443882f; |
|||
float d = 2 * r; |
|||
float s = r * Mathf.Sqrt(3); |
|||
|
|||
// Try to keep the weighted average as close to the center (0.5) as possible.
|
|||
// (7)(5) ( )( ) ( )( ) ( )( ) ( )( ) ( )(o) ( )(x) (o)(x) (x)(x)
|
|||
// (2)(1)(3) ( )(o)( ) (o)(x)( ) (x)(x)(o) (x)(x)(x) (x)(x)(x) (x)(x)(x) (x)(x)(x) (x)(x)(x)
|
|||
// (4)(6) ( )( ) ( )( ) ( )( ) (o)( ) (x)( ) (x)(o) (x)(x) (x)(x)
|
|||
coords[0] = new Vector2( 0, 0); |
|||
coords[1] = new Vector2(-d, 0); |
|||
coords[2] = new Vector2( d, 0); |
|||
coords[3] = new Vector2(-r, -s); |
|||
coords[4] = new Vector2( r, s); |
|||
coords[5] = new Vector2( r, -s); |
|||
coords[6] = new Vector2(-r, s); |
|||
|
|||
// Rotate the sampling pattern by 15 degrees.
|
|||
const float cos15 = 0.96592582628906828675f; |
|||
const float sin15 = 0.25881904510252076235f; |
|||
|
|||
for (int i = 0; i < 7; i++) |
|||
{ |
|||
Vector2 coord = coords[i]; |
|||
|
|||
coords[i].x = coord.x * cos15 - coord.y * sin15; |
|||
coords[i].y = coord.x * sin15 + coord.y * cos15; |
|||
} |
|||
|
|||
return coords; |
|||
} |
|||
|
|||
void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd) |
|||
{ |
|||
if (m_VolumetricLightingPreset == VolumetricLightingPreset.Off) return; |
|||
|
|||
using (new ProfilingSample(cmd, "Volumetric Lighting")) |
|||
{ |
|||
int viewId = camera.camera.GetInstanceID(); // Warning: different views can use the same camera
|
|||
int viewOffset = ViewOffsetFromViewId(viewId); |
|||
|
|||
Debug.Assert(viewOffset >= 0 && viewOffset < 8); |
|||
|
|||
if (GetGlobalFogComponent() == null) |
|||
{ |
|||
// Clear the render target instead of running the shader.
|
|||
// CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);
|
|||
// return;
|
|||
|
|||
// Clearing 3D textures does not seem to work!
|
|||
// Use the workaround by running the full shader with no volume.
|
|||
} |
|||
|
|||
bool enableClustered = m_FrameSettings.lightLoopSettings.enableTileAndCluster; |
|||
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game; |
|||
|
|||
int kernel; |
|||
|
|||
if (enableReprojection) |
|||
{ |
|||
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
|
|||
kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClusteredReproj" |
|||
: "VolumetricLightingAllLightsReproj"); |
|||
} |
|||
else |
|||
{ |
|||
kernel = m_VolumetricLightingCS.FindKernel(enableClustered ? "VolumetricLightingClustered" |
|||
: "VolumetricLightingAllLights"); |
|||
|
|||
} |
|||
|
|||
int w = 0, h = 0, d = 0; |
|||
Vector2 scale = ComputeVBufferResolutionAndScale(camera.screenSize.x, camera.screenSize.y, ref w, ref h, ref d); |
|||
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad; |
|||
|
|||
// Compose the matrix which allows us to compute the world space view direction.
|
|||
// Compute it using the scaled resolution to account for the visible area of the VBuffer.
|
|||
Vector4 scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y)); |
|||
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false); |
|||
|
|||
camera.SetupComputeShader(m_VolumetricLightingCS, cmd); |
|||
|
|||
Vector2[] xySeq = GetHexagonalClosePackedSpheres7(); |
|||
|
|||
// This is a sequence of 7 equidistant numbers from 1/14 to 13/14.
|
|||
// Each of them is the centroid of the interval of length 2/14.
|
|||
// They've been rearranged in a sequence of pairs {small, large}, s.t. (small + large) = 1.
|
|||
// That way, the running average position is close to 0.5.
|
|||
// | 6 | 2 | 4 | 1 | 5 | 3 | 7 |
|
|||
// | | | | o | | | |
|
|||
// | | o | | x | | | |
|
|||
// | | x | | x | | o | |
|
|||
// | | x | o | x | | x | |
|
|||
// | | x | x | x | o | x | |
|
|||
// | o | x | x | x | x | x | |
|
|||
// | x | x | x | x | x | x | o |
|
|||
// | x | x | x | x | x | x | x |
|
|||
float[] zSeq = {7.0f/14.0f, 3.0f/14.0f, 11.0f/14.0f, 5.0f/14.0f, 9.0f/14.0f, 1.0f/14.0f, 13.0f/14.0f}; |
|||
|
|||
int rfc = Time.renderedFrameCount; |
|||
int sampleIndex = rfc % 7; |
|||
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc); |
|||
|
|||
// TODO: set 'm_VolumetricLightingPreset'.
|
|||
cmd.SetComputeVectorParam( m_VolumetricLightingCS, HDShaderIDs._VBufferSampleOffset, offset); |
|||
cmd.SetComputeMatrixParam( m_VolumetricLightingCS, HDShaderIDs._VBufferCoordToViewDirWS, transform); |
|||
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, GetVBufferLightingHistory(viewOffset)); // Read
|
|||
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, GetVBufferLightingFeedback(viewOffset)); // Write
|
|||
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, GetVBufferLightingIntegral(viewOffset)); // Write
|
|||
|
|||
// The shader defines GROUP_SIZE_1D = 16.
|
|||
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 15) / 16, (h + 15) / 16, 1); |
|||
} |
|||
} |
|||
} // class HDRenderPipeline
|
|||
} // namespace UnityEngine.Experimental.Rendering.HDPipeline
|
撰写
预览
正在加载...
取消
保存
Reference in new issue