浏览代码

Merge pull request #293 from keijiro/ssao-update

[SSAO] General refactoring
/RenderPassXR_Sandbox
GitHub 8 年前
当前提交
0be7a6d4
共有 5 个文件被更改,包括 77 次插入115 次删除
  1. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  2. 80
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs
  3. 38
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl
  4. 8
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl
  5. 64
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


using (new Utilities.ProfilingSample("Build Light list and render shadows", renderContext))
{
// TODO: Everything here (SSAO, Shadow, Build light list, material and light classification can be parallelize with Async compute)
m_SsaoEffect.Render(ssaoSettingsToUse, this, hdCamera, renderContext, GetDepthTexture(), m_Asset.renderingSettings.useForwardRenderingOnly);
m_SsaoEffect.Render(ssaoSettingsToUse, this, hdCamera, renderContext, m_Asset.renderingSettings.useForwardRenderingOnly);
m_LightLoop.PrepareLightsForGPU(m_ShadowSettings, cullResults, camera);
m_LightLoop.RenderShadows(renderContext, cullResults);
renderContext.SetupCameraProperties(camera); // Need to recall SetupCameraProperties after m_ShadowPass.Render

80
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs


internal static readonly int _AOBuffer = Shader.PropertyToID("_AmbientOcclusionTexture");
internal static readonly int _TempTex1 = Shader.PropertyToID("_TempTex1");
internal static readonly int _TempTex2 = Shader.PropertyToID("_TempTex2");
internal static readonly int _CameraDepthTexture = Shader.PropertyToID("_CameraDepthTexture");
CommandBuffer m_Command;
// For the AO buffer, use R8 or RHalf if available.
static RenderTextureFormat GetAOBufferFormat()
{
if (SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.R8))
return RenderTextureFormat.R8;
if (SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RHalf))
return RenderTextureFormat.RHalf;
return RenderTextureFormat.Default;
}
public ScreenSpaceAmbientOcclusionEffect()
{}

m_Material.hideFlags = HideFlags.DontSave;
}
public void Render(ScreenSpaceAmbientOcclusionSettings.Settings settings, HDRenderPipeline hdRP, HDCamera hdCamera, ScriptableRenderContext renderContext, RenderTargetIdentifier depthID, bool isForward)
public void Render(ScreenSpaceAmbientOcclusionSettings.Settings settings, HDRenderPipeline hdRP, HDCamera hdCamera, ScriptableRenderContext renderContext, bool isForward)
const RenderTextureFormat kFormat = RenderTextureFormat.ARGB32;
const RenderTextureFormat kTempFormat = RenderTextureFormat.ARGB32;
if (m_Command == null)
{
m_Command = new CommandBuffer { name = "Ambient Occlusion" };
}
else
{
m_Command.Clear();
}
var cmd2 = new CommandBuffer { name = "Setup neutral Ambient Occlusion (1x1)" };
cmd2.SetGlobalTexture("_AmbientOcclusionTexture", PostProcessing.RuntimeUtilities.blackTexture); // Neutral is black, see the comment in the shaders
renderContext.ExecuteCommandBuffer(cmd2);
cmd2.Dispose();
return ;
m_Command.SetGlobalTexture(Uniforms._AOBuffer, PostProcessing.RuntimeUtilities.blackTexture); // Neutral is black, see the comment in the shaders
renderContext.ExecuteCommandBuffer(m_Command);
return;
}
var width = hdCamera.camera.pixelWidth;

m_Material.SetFloat(Uniforms._Downsample, 1.0f / downsize);
m_Material.SetFloat(Uniforms._SampleCount, settings.sampleCount);
// Start building a command buffer.
var cmd = new CommandBuffer { name = "Ambient Occlusion" };
cmd.SetGlobalTexture(Uniforms._CameraDepthTexture, depthID);
// Note: GBuffer is automatically bind
cmd.GetTemporaryRT(Uniforms._TempTex1, width / downsize, height / downsize, 0, kFilter, kFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, depthID);
Utilities.DrawFullScreen(cmd, m_Material, hdCamera, Uniforms._TempTex1, null, 0);
hdRP.PushFullScreenDebugTexture(cmd, Uniforms._TempTex1, hdCamera.camera, renderContext, FullScreenDebugMode.SSAOBeforeFiltering);
m_Command.GetTemporaryRT(Uniforms._TempTex1, width / downsize, height / downsize, 0, kFilter, kTempFormat, kRWMode);
Utilities.DrawFullScreen(m_Command, m_Material, hdCamera, Uniforms._TempTex1, null, 0);
hdRP.PushFullScreenDebugTexture(m_Command, Uniforms._TempTex1, hdCamera.camera, renderContext, FullScreenDebugMode.SSAOBeforeFiltering);
cmd.GetTemporaryRT(Uniforms._TempTex2, width, height, 0, kFilter, kFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(cmd, m_Material, hdCamera, Uniforms._TempTex2, null, 1);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
m_Command.GetTemporaryRT(Uniforms._TempTex2, width, height, 0, kFilter, kTempFormat, kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(m_Command, m_Material, hdCamera, Uniforms._TempTex2, null, 1);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex1);
cmd.GetTemporaryRT(Uniforms._TempTex1, width, height, 0, kFilter, kFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex2);
Utilities.DrawFullScreen(cmd, m_Material, hdCamera, Uniforms._TempTex1, null, 2);
cmd.ReleaseTemporaryRT(Uniforms._TempTex2);
m_Command.GetTemporaryRT(Uniforms._TempTex1, width, height, 0, kFilter, kTempFormat, kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex2);
Utilities.DrawFullScreen(m_Command, m_Material, hdCamera, Uniforms._TempTex1, null, 2);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex2);
cmd.GetTemporaryRT(Uniforms._AOBuffer, width, height, 0, kFilter, kFormat, kRWMode);
cmd.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(cmd, m_Material, hdCamera, Uniforms._AOBuffer, null, 3);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
m_Command.GetTemporaryRT(Uniforms._AOBuffer, width, height, 0, kFilter, GetAOBufferFormat(), kRWMode);
m_Command.SetGlobalTexture(Uniforms._MainTex, Uniforms._TempTex1);
Utilities.DrawFullScreen(m_Command, m_Material, hdCamera, Uniforms._AOBuffer, null, 3);
m_Command.ReleaseTemporaryRT(Uniforms._TempTex1);
cmd.SetGlobalTexture("_AmbientOcclusionTexture", Uniforms._AOBuffer);
hdRP.PushFullScreenDebugTexture(cmd, Uniforms._AOBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.SSAO);
m_Command.SetGlobalTexture("_AmbientOcclusionTexture", Uniforms._AOBuffer);
hdRP.PushFullScreenDebugTexture(m_Command, Uniforms._AOBuffer, hdCamera.camera, renderContext, FullScreenDebugMode.SSAO);
renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();
renderContext.ExecuteCommandBuffer(m_Command);
if (m_Command != null) m_Command.Dispose();
}
}
}

38
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl


#define UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMMON
#include "../../../../ShaderLibrary/Common.hlsl"
TEXTURE2D(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
DECLARE_GBUFFER_TEXTURE(_GBufferTexture);

return frac(52.9829189 * frac(f));
}
// Boundary check for depth sampler
// (returns a very large value if it lies out of bounds)
float CheckBounds(float2 uv, float d)
// Check if the depth value is valid.
bool CheckDepth(float rawDepth)
float ob = any(uv < 0) + any(uv > 1);
ob += (d <= 0.00001);
return rawDepth > 0.00001;
ob += (d >= 0.99999);
return rawDepth < 0.99999;
return ob * 1e8;
}
// AO/normal packed format conversion

return p.yzw * 2.0 - 1.0;
}
// Depth/normal sampling
float SampleDepth(uint2 unPositionSS)
{
float z = LOAD_TEXTURE2D(_CameraDepthTexture, unPositionSS).x;
return LinearEyeDepth(z, _ZBufferParams) + CheckBounds(float2(0.5, 0.5), z); // TODO: We should use the stencil to not affect the sky and save CheckBounds cost - also uv can't be out of bounds on xy... so put a constant here
}
half3 SampleNormal(BSDFData bsdfData)
half3 SampleNormal(uint2 unPositionSS)
float3 unused;
BSDFData bsdfData;
FETCH_GBUFFER(gbuffer, _GBufferTexture, unPositionSS);
DECODE_FROM_GBUFFER(gbuffer, 0xFFFFFFFF, bsdfData, unused);
return mul((float3x3)unity_WorldToCamera, bsdfData.normalWS);
}

return smoothstep(kGeometryCoeff, 1.0, dot(d1, d2));
}
// TODO: Test. We may need to use full matrix here to reconver VS position as it may not work in case of oblique projection (planar reflection)
// Reconstruct view-space position from UV and depth.
// p11_22 = (unity_CameraProjection._11, unity_CameraProjection._22)
// p13_31 = (unity_CameraProjection._13, unity_CameraProjection._23)
float3 ReconstructViewPos(float2 uv, float depth, float2 p11_22, float2 p13_31)
{
return float3((uv * 2.0 - 1.0 - p13_31) / p11_22 * depth, depth);
}
// Default vertex shader

8
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl


half4 p2b = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + delta * 3.2307692308);
#if defined(AO_DENOISE_CENTER_NORMAL)
half3 unused;
BSDFData bsdfData;
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS);
DECODE_FROM_GBUFFER(gbuffer, 0xFFFFFFFF, bsdfData, unused);
half3 n0 = SampleNormal(bsdfData);
half3 n0 = SampleNormal(posInput.unPositionSS);
#else
half3 n0 = GetPackedNormal(p0);
#endif

64
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl


half _Downsample;
int _SampleCount;
// Sample point picker
float3 PickSamplePoint(float2 uv, float index)
float3 SampleInsideHemisphere(float2 uv, half3 norm, int index)
// Uniformaly distributed points on a unit sphere http://goo.gl/X2F1Ho
// FIXEME: This was added to avoid a NVIDIA driver issue.
// vvvvvvvvvvvv
float u = frac(UVRandom(0.0, index + uv.x * 1e-10) + gn) * 2.0 - 1.0;
float theta = (UVRandom(1.0, index + uv.x * 1e-10) + gn) * TWO_PI;
float3 v = float3(SinCos(theta).yx * sqrt(1.0 - u * u), u);
// Make them distributed between [0, _Radius]
float l = sqrt((index + 1.0) / _SampleCount) * _Radius;
return v * l;
float2 u = frac(Hammersley2d(index, _SampleCount) + gn);
float3 v = SampleSphereUniform(u.x, u.y);
v *= sqrt((index + 1.0) / _SampleCount) * _Radius;
return faceforward(v, -norm, v);
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw / _Downsample);
float2 uv = posInput.positionSS;
PositionInputs posInput = GetPositionInput(input.positionCS.xy / _Downsample, _ScreenSize.zw);
half3 unused;
BSDFData bsdfData;
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS / _Downsample);
DECODE_FROM_GBUFFER(gbuffer, 0xFFFFFFFF, bsdfData, unused);
// Get normal, depth and view-space position of the center point.
half3 norm_o = SampleNormal(posInput.unPositionSS);
// Parameters used in coordinate conversion
float3x3 proj = (float3x3)unity_CameraProjection;
float2 p11_22 = float2(unity_CameraProjection._11, unity_CameraProjection._22);
float2 p13_31 = float2(unity_CameraProjection._13, unity_CameraProjection._23);
float depth_o_raw = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth_o = LinearEyeDepth(depth_o_raw, _ZBufferParams);
// View space normal and depth
half3 norm_o = SampleNormal(bsdfData);
float depth_o = SampleDepth(posInput.unPositionSS / _Downsample);
if (!CheckDepth(depth_o_raw)) return PackAONormal(0, norm_o); // TODO: We should use the stencil to not affect the sky
// Reconstruct the view-space position.
float3 vpos_o = ReconstructViewPos(uv, depth_o, p11_22, p13_31);
float3 vpos_o = ComputeViewSpacePosition(posInput.positionSS, depth_o_raw, _InvProjMatrix);
float ao = 0.0;

// Sample point
float3 v_s1 = PickSamplePoint(uv, s);
v_s1 = faceforward(v_s1, -norm_o, v_s1);
float3 vpos_s1 = vpos_o + v_s1;
// Reproject the sample point
float3 spos_s1 = mul(proj, vpos_s1);
float2 uv_s1_01 = (spos_s1.xy / vpos_s1.z + 1.0) * 0.5;
// Depth at the sample point
float depth_s1 = SampleDepth(uint2(uv_s1_01 * _ScreenSize.xy));
// Sample inside the hemisphere defined by the normal.
float3 vpos_s1 = vpos_o + SampleInsideHemisphere(posInput.positionSS, norm_o, s);
// Relative position of the sample point
float3 vpos_s2 = ReconstructViewPos(uv_s1_01, depth_s1, p11_22, p13_31);
float3 v_s2 = vpos_s2 - vpos_o;
// Project the sample point and get the view-space position.
float2 spos_s1 = float2(dot(unity_CameraProjection[0].xyz, vpos_s1),
dot(unity_CameraProjection[1].xyz, vpos_s1));
float2 uv_s1_01 = (spos_s1 / vpos_s1.z + 1.0) * 0.5;
float depth_s1_raw = LOAD_TEXTURE2D(_MainDepthTexture, uint2(uv_s1_01 * _ScreenSize.xy)).x;
float3 vpos_s2 = ComputeViewSpacePosition(uv_s1_01, depth_s1_raw, _InvProjMatrix);
float3 v_s2 = vpos_s2 - vpos_o;
ao += a1 / a2;
ao += CheckDepth(depth_s1_raw) ? a1 / a2 : 0;
}
// Apply intensity normalization/amplifier/contrast.

正在加载...
取消
保存