浏览代码

Perform proper bilateral filtering

/main
Evgenii Golubev 8 年前
当前提交
be01c5e2
共有 5 个文件被更改,包括 60 次插入48 次删除
  1. 22
      Assets/ScriptableRenderLoop/HDRenderPipeline/HDRenderPipeline.cs
  2. 69
      Assets/ScriptableRenderLoop/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader
  3. 4
      Assets/ScriptableRenderLoop/HDRenderPipeline/SceneSettings/SubsurfaceScatteringParameters.cs
  4. 3
      Assets/ScriptableRenderLoop/HDRenderPipeline/Utilities.cs
  5. 10
      Assets/ScriptableRenderLoop/ShaderLibrary/Common.hlsl

22
Assets/ScriptableRenderLoop/HDRenderPipeline/HDRenderPipeline.cs


public Vector4 screenSize;
public Matrix4x4 viewProjectionMatrix;
public Matrix4x4 invViewProjectionMatrix;
public Matrix4x4 invProjectionMatrix;
}
public class GBufferManager

m_DebugViewMaterialGBuffer = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/DebugViewMaterialGBuffer");
m_FilterSubsurfaceScattering = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/CombineSubsurfaceScattering");
m_FilterSubsurfaceScattering.DisableKeyword("FILTER_HORIZONTAL");
m_FilterSubsurfaceScattering.SetFloat("_FilterHorizontal", 0);
m_FilterSubsurfaceScattering.EnableKeyword("FILTER_HORIZONTAL");
m_FilterAndCombineSubsurfaceScattering.SetFloat("_FilterHorizontal", 1);
m_DebugDisplayShadowMap = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/DebugDisplayShadowMap");

// Currently, forward-rendered objects do not output split lighting required for the SSS pass.
if (debugParameters.ShouldUseForwardRenderingOnly()) return;
// Assume that the height of the projection window is 2 meters.
float distanceToProjectionWindow = 1.0f / Mathf.Tan(0.5f * Mathf.Deg2Rad * hdCamera.camera.fieldOfView);
// Upload the kernel data.
// Load the kernel data.
Vector4[] kernelData = new Vector4[SubsurfaceScatteringParameters.maxNumProfiles * SubsurfaceScatteringProfile.numSamples];
for (int j = 0, m = sssParameters.profiles.Length; j < m; j++)
{

}
}
MaterialPropertyBlock properties = new MaterialPropertyBlock();
m_FilterSubsurfaceScattering.SetFloat("_DistToProjWindow", distanceToProjectionWindow);
m_FilterSubsurfaceScattering.SetFloat("_BilateralScale", 0.05f * sssParameters.bilateralScale);
m_FilterSubsurfaceScattering.SetMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);
m_CameraFilteringBufferRT, m_CameraStencilBufferRT, properties);
m_CameraFilteringBufferRT, m_CameraStencilBufferRT);
m_FilterAndCombineSubsurfaceScattering.SetFloat("_DistToProjWindow", distanceToProjectionWindow);
m_FilterAndCombineSubsurfaceScattering.SetFloat("_BilateralScale", 0.05f * sssParameters.bilateralScale);
m_FilterAndCombineSubsurfaceScattering.SetMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);
m_CameraColorBufferRT, m_CameraStencilBufferRT, properties);
m_CameraColorBufferRT, m_CameraStencilBufferRT);
context.ExecuteCommandBuffer(cmd);
cmd.Dispose();

69
Assets/ScriptableRenderLoop/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader


#pragma vertex Vert
#pragma fragment Frag
#pragma multi_compile _ FILTER_HORIZONTAL
//-------------------------------------------------------------------------------------
// Include
//-------------------------------------------------------------------------------------

#define N_PROFILES 8
#define N_SAMPLES 7
float _BilateralScale; // Uses world-space units
float _DistToProjWindow; // The height of the projection window is 2 meters
float _FilterHorizontal; // Vertical = 0, horizontal = 1
float4 _FilterKernels[N_PROFILES][N_SAMPLES]; // RGB = weights, A = radial distance
float4 _FilterKernels[N_PROFILES][N_SAMPLES]; // RGB = weights, A = radial distance
float4x4 _InvProjMatrix;
SAMPLER2D(sampler_IrradianceSource);
#define bilinearSampler sampler_IrradianceSource
//-------------------------------------------------------------------------------------
// Implementation

{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float rawDepth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).r;
float centerDepth = LinearEyeDepth(rawDepth, _ZBufferParams);
float2 gBufferValue = LOAD_TEXTURE2D(_GBufferTexture2, posInput.unPositionSS).ra;
float radiusScale = gBufferValue.x;
float profileID = gBufferValue.y * N_PROFILES;
float filterRadius = radiusScale * _DistToProjWindow / centerDepth;
float2 gBufferData = LOAD_TEXTURE2D(_GBufferTexture2, posInput.unPositionSS).ra;
float radiusScale = gBufferData.x * 0.01;
int profileID = int(gBufferData.y * N_PROFILES);
// Reconstruct the view-space position.
float rawDepth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).r;
float3 centerPosVS = ComputeViewSpacePosition(posInput.positionSS, rawDepth, _InvProjMatrix);
// Compute the dimensions of the surface fragment viewed as a quad facing the camera.
float fragWidth = ddx(centerPosVS.x);
float fragheight = ddy(centerPosVS.y);
float stepSizeX = rcp(fragWidth);
float stepSizeY = rcp(fragheight);
float x, y;
sincos(PI / 3, y, x);
float2 unitDirection = _FilterHorizontal ? float2(x, y) : float2(-y, x);
float2 scaledDirection = filterRadius * unitDirection;
#ifdef FILTER_HORIZONTAL
float stepSize = stepSizeX;
float2 unitDirection = float2(1, 0);
#else
float stepSize = stepSizeY;
float2 unitDirection = float2(0, 1);
#endif
float2 scaledDirection = radiusScale * stepSize * unitDirection;
// Premultiply with the inverse of the screen size.
scaledDirection *= _ScreenSize.zw;
float inv2MaxVariance = _FilterKernels[profileID][0].a;
float3 sampleWeight = _FilterKernels[profileID][0].rgb;
float2 samplePosition = posInput.unPositionSS;
float2 samplePosition = posInput.unPositionSS;
float3 sampleWeight = _FilterKernels[profileID][0].rgb;
float3 centerIrradiance = sampleIrradiance;
// Accumulate filtered irradiance (already weighted by (albedo / Pi)).
float3 filteredIrradiance = sampleIrradiance * sampleWeight;

{
sampleWeight = _FilterKernels[profileID][i].rgb;
samplePosition = posInput.positionSS + scaledDirection * _FilterKernels[profileID][i].a;
samplePosition = posInput.unPositionSS + scaledDirection * _FilterKernels[profileID][i].a;
sampleWeight = _FilterKernels[profileID][i].rgb;
sampleIrradiance = SAMPLE_TEXTURE2D_LOD(_IrradianceSource, bilinearSampler, samplePosition, 0).rgb;
rawDepth = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, bilinearSampler, samplePosition, 0).r;
sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;
rawDepth = LOAD_TEXTURE2D(_CameraDepthTexture, samplePosition).r;
// Ref #1: Skin Rendering by Pseudo–Separable Cross Bilateral Filtering.
// Ref #2: Separable SSS, Supplementary Materials, Section E.
float depthDiff = abs(sampleDepth - centerDepth);
float scaleDiff = radiusScale * _DistToProjWindow * _BilateralScale;
float t = saturate(depthDiff / scaleDiff);
float zDistance = radiusScale * sampleDepth - (radiusScale * centerPosVS.z);
sampleWeight *= exp(-zDistance * zDistance * inv2MaxVariance);
// TODO: use real-world distances for weighting.
filteredIrradiance += lerp(sampleIrradiance, centerIrradiance, t) * sampleWeight;
filteredIrradiance += sampleIrradiance * sampleWeight;
}
return filteredIrradiance;

4
Assets/ScriptableRenderLoop/HDRenderPipeline/SceneSettings/SubsurfaceScatteringParameters.cs


m_FilterKernel[i].z *= 1.0f / weightSum.z;
}
// Store (1 / (2 * variance)) instead of the distance to the 1st sample (which is implicitly 0).
float weightedStdDev = Mathf.Lerp(maxStdDev1, maxStdDev2, m_LerpWeight);
m_FilterKernel[0].w = 1.0f / (2.0f * weightedStdDev * weightedStdDev);
m_KernelNeedsUpdate = false;
}
}

3
Assets/ScriptableRenderLoop/HDRenderPipeline/Utilities.cs


var gpuProj = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false);
var gpuVP = gpuProj * camera.worldToCameraMatrix;
hdCamera.viewProjectionMatrix = gpuVP;
hdCamera.viewProjectionMatrix = gpuVP;
hdCamera.invProjectionMatrix = gpuProj.inverse;
return hdCamera;
}

10
Assets/ScriptableRenderLoop/ShaderLibrary/Common.hlsl


posInput.positionCS *= posInput.depthVS;
}
float3 ComputeViewSpacePosition(float2 positionSS, float rawDepth, float4x4 invProjMatrix)
{
float4 positionCS = float4(positionSS * 2.0 - 1.0, rawDepth, 1.0);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;
return positionVS.xyz / positionVS.w;
}
// depthOffsetVS is always in the direction of the view vector (V)
void ApplyDepthOffsetPositionInput(float3 V, float depthOffsetVS, inout PositionInputs posInput)
{

// Just add the offset along the view vector is sufficiant for world position
posInput.positionWS += V * depthOffsetVS;
}
// Generates a triangle in homogeneous clip space, s.t.
// v0 = (-1, -1, 1), v1 = (3, -1, 1), v2 = (-1, 3, 1).

正在加载...
取消
保存