浏览代码

HDRenderPipeline: Update SSAO effect

/Branch_Batching2
Sebastien Lagarde 8 年前
当前提交
36142eb8
共有 29 个文件被更改,包括 562 次插入1043 次删除
  1. 7
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader
  2. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Editor/SceneSettingsManagementWindow.cs
  3. 22
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  4. 20
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs
  5. 13
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl
  6. 30
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl
  7. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl
  8. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/CommonSettings.cs
  9. 6
      Assets/ScriptableRenderPipeline/ShaderLibrary/CommonLighting.hlsl
  10. 3
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader
  11. 151
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs
  12. 131
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl
  13. 47
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Composition.hlsl
  14. 69
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl
  15. 83
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl
  16. 9
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace.meta
  17. 1001
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs.orig
  18. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion.meta
  19. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources.meta
  20. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader.meta
  21. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl.meta
  22. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Composition.hlsl.meta
  23. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl.meta
  24. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl.meta
  25. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader
  26. 0
      /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs.meta

7
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader


struct Attributes
{
float3 positionOS : POSITION;
uint vertexID : SV_VertexID;
};
struct Varyings

Varyings Vert(Attributes input)
{
// TODO: implement SV_vertexID full screen quad
float3 positionWS = TransformObjectToWorld(input.positionOS);
output.positionCS = TransformWorldToHClip(positionWS);
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
return output;
}

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/Editor/SceneSettingsManagementWindow.cs


CreateAsset<CommonSettings>("NewCommonSettings");
}
if (GUILayout.Button("Create new HDRI sky params"))
if (GUILayout.Button("Create new HDRI Sky params"))
if (GUILayout.Button("Create new Procedural sky params"))
if (GUILayout.Button("Create new Procedural Sky params"))
}
if (GUILayout.Button("Create new Ambient Occlusion params"))
{
CreateAsset<ScreenSpaceAmbientOcclusionSettings>("NewAmbientOcclusionParameters");
}
EditorGUILayout.Space();

22
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


m_SkyManager.skySettings = asset.skySettingsToUse;
m_PostProcessContext = new PostProcessRenderContext();
m_SsaoEffect = new ScreenSpaceAmbientOcclusionEffect(m_gbufferManager.GetGBuffers());
m_SsaoEffect = new ScreenSpaceAmbientOcclusionEffect();
m_SsaoEffect.Build(asset.renderPipelineResources);
}
void InitializeDebugMaterials()

m_SkyManager.Cleanup();
if (m_SsaoEffect != null)
m_SsaoEffect.Cleanup();
m_SsaoEffect.Cleanup();
#if UNITY_EDITOR
SupportedRenderingFeatures.active = SupportedRenderingFeatures.Default;

if (!m_Asset.renderingSettings.useForwardRenderingOnly)
{
CopyDepthBufferIfNeeded(renderContext);
// Now the depth texture and the GBuffer are ready. We can run some screen-space effects on it.
if (m_Owner.commonSettingsToUse != null)
{
m_SsaoEffect.Render(m_Owner.commonSettingsToUse.screenSpaceAmbientOcclusionSettings, camera, renderContext, GetDepthTexture());
}
}
if (debugDisplaySettings.IsDebugMaterialDisplayEnabled())

{
using (new Utilities.ProfilingSample("Build Light list and render shadows", renderContext))
{
// TODO: Everything here (SSAO, Shadow, Build light list, material and light classification can be parallelize with Async compute)
// Note: Currently there is no SSAO in forward as we don't have normal
ScreenSpaceAmbientOcclusionSettings.Settings ssaoSettings = m_Asset.ssaoSettingsToUse;
if (/* ssaoSettings != null && */ !m_Asset.renderingSettings.useForwardRenderingOnly)
{
m_SsaoEffect.Render(m_Asset.ssaoSettingsToUse, camera, renderContext, GetDepthTexture());
}
m_LightLoop.BuildGPULightLists(camera, renderContext, m_CameraDepthStencilBufferRT); // TODO: Use async compute here to run light culling during shadow
m_LightLoop.BuildGPULightLists(camera, renderContext, m_CameraDepthStencilBufferRT);
}
}

20
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipelineAsset.cs


ShadowSettings m_ShadowSettings = ShadowSettings.Default;
[SerializeField]
TextureSettings m_TextureSettings = TextureSettings.Default;
public ShadowSettings shadowSettings
{
get { return m_ShadowSettings; }

}
}
/*
*/
public SkySettings skySettingsToUse
{

return m_SkySettings;
}
}
[SerializeField]
private ScreenSpaceAmbientOcclusionSettings.Settings m_SsaoSettings;
public ScreenSpaceAmbientOcclusionSettings.Settings ssaoSettingsToUse
{
get
{
if (ScreenSpaceAmbientOcclusionSettingsSingleton.overrideSettings)
return ScreenSpaceAmbientOcclusionSettingsSingleton.overrideSettings.settings;
return m_SsaoSettings;
}
}
// Default Material / Shader
[SerializeField]
Material m_DefaultDiffuseMaterial;

13
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.hlsl


struct LightLoopContext
{
// Visible from Material
float ambientOcclusion;
// Not visible from Material (user should not use these properties in Material)
int sampleShadow;
int sampleReflection;
ShadowContext shadowContext;

return SAMPLE_TEXTURECUBE_LOD(_SkyTexture, sampler_SkyTexture, texCoord, lod);
}
}
//-----------------------------------------------------------------------------
// AmbientOcclusion
// ----------------------------------------------------------------------------
TEXTURE2D(_AmbientOcclusionTexture);
// TODO: Create a variant for it
#define APPLY_AMBIENT_OCCLUSION

30
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl


out float3 specularLighting)
{
LightLoopContext context;
#ifdef APPLY_AMBIENT_OCCLUSION
context.ambientOcclusion = LOAD_TEXTURE2D(_AmbientOcclusionTexture, unPositionSS).x;
#else
context.ambientOcclusion = 1.0;
#endif
context.sampleShadow = 0;
context.sampleReflection = 0;
context.shadowContext = InitShadowContext();

{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Directional(context, V, posInput, prelightData, _DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Directional( context, V, posInput, prelightData, _DirectionalLightDatas[i], bsdfData,
localDiffuseLighting, localSpecularLighting);
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;

{
float3 localDiffuseLighting, localSpecularLighting;
EvaluateBSDF_Punctual(context, V, posInput, prelightData, _LightDatas[FetchIndex(punctualLightStart, i)], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Punctual( context, V, posInput, prelightData, _LightDatas[FetchIndex(punctualLightStart, i)], bsdfData,
localDiffuseLighting, localSpecularLighting);
diffuseLighting += localDiffuseLighting;
specularLighting += localSpecularLighting;

if(_LightDatas[areaIndex].lightType == GPULIGHTTYPE_LINE)
{
EvaluateBSDF_Line(context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Line( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Area(context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
EvaluateBSDF_Area( context, V, posInput, prelightData, _LightDatas[areaIndex], bsdfData,
localDiffuseLighting, localSpecularLighting);
}

// TODO: currently apply GI at the same time as reflection
#ifdef PROCESS_ENV_LIGHT
// Add indirect diffuse + emissive (if any)
diffuseLighting += bakeDiffuseLighting;
diffuseLighting += bakeDiffuseLighting * context.ambientOcclusion;
#endif
ApplyDebug(context, posInput.positionWS, diffuseLighting, specularLighting);

out float3 specularLighting)
{
LightLoopContext context;
#ifdef APPLY_AMBIENT_OCCLUSION
context.ambientOcclusion = LOAD_TEXTURE2D(_AmbientOcclusionTexture, unPositionSS).x;
#else
context.ambientOcclusion = 1.0;
#endif
context.sampleShadow = 0;
context.sampleReflection = 0;
context.shadowContext = InitShadowContext();

specularLighting += iblSpecularLighting;
// Add indirect diffuse + emissive (if any)
diffuseLighting += bakeDiffuseLighting;
diffuseLighting += bakeDiffuseLighting * context.ambientOcclusion;
ApplyDebug(context, posInput.positionWS, diffuseLighting, specularLighting);
}

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


specularLighting = preLD.rgb * preLightData.specularFGD;
// Apply specular occlusion on it
specularLighting *= bsdfData.specularOcclusion;
specularLighting *= bsdfData.specularOcclusion * GetSpecularOcclusion(preLightData.NdotV, lightLoopContext.ambientOcclusion, bsdfData.roughness);
diffuseLighting = float3(0.0, 0.0, 0.0);
#endif

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/SceneSettings/CommonSettings.cs


get { return m_Settings; }
set { m_Settings = value; }
}
public ScreenSpaceAmbientOcclusionSettings screenSpaceAmbientOcclusionSettings;
}
}

6
Assets/ScriptableRenderPipeline/ShaderLibrary/CommonLighting.hlsl


return specularOcclusion * specularOcclusion;
}
// Ref: Moving Frostbite to PBR - Gotanda siggraph 2011
float GetSpecularOcclusion(float NdotV, float ambientOcclusion, float roughness)
{
return saturate(pow(NdotV + ambientOcclusion, exp2(-16.0 * roughness - 1.0)) - 1.0 + ambientOcclusion);
}
//-----------------------------------------------------------------------------
// Helper functions
//-----------------------------------------------------------------------------

3
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader


ENDHLSL
}
// 3: Composition
// 3: Final filtering
Blend Zero OneMinusSrcColor, Zero OneMinusSrcAlpha
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag

151
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs


using System;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Experimental.PostProcessing;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class ScreenSpaceAmbientOcclusionSettings : ScriptableObject
{
[Serializable]
public struct Settings
{
[Range(0, 2)]
[SerializeField]
float m_Intensity;
[SerializeField]
float m_Radius;
[Range(1, 32)]
[SerializeField]
int m_SampleCount;
[SerializeField]
bool m_Downsampling;
public float intensity { set { m_Intensity = value; OnValidate(); } get { return m_Intensity; } }
public float radius { set { m_Radius = value; OnValidate(); } get { return m_Radius; } }
public int sampleCount { set { m_SampleCount = value; OnValidate(); } get { return m_SampleCount; } }
public bool downsampling { set { m_Downsampling = value; } get { return m_Downsampling; } }
void OnValidate()
{
m_Intensity = Mathf.Min(2, Mathf.Max(0, m_Intensity));
m_Radius = Mathf.Max(0, m_Radius);
m_SampleCount = Mathf.Min(1, Mathf.Max(32, m_SampleCount));
}
public static readonly Settings s_Defaultsettings = new Settings
{
m_Intensity = 1.0f,
m_Radius = 0.5f,
m_SampleCount = 8,
m_Downsampling = true
};
}
[SerializeField]
Settings m_Settings = Settings.s_Defaultsettings;
public Settings settings
{
get { return m_Settings; }
set { m_Settings = value; }
}
}
public class ScreenSpaceAmbientOcclusionSettingsSingleton : Singleton<ScreenSpaceAmbientOcclusionSettingsSingleton>
{
private ScreenSpaceAmbientOcclusionSettings settings { get; set; }
public static ScreenSpaceAmbientOcclusionSettings overrideSettings
{
get { return instance.settings; }
set { instance.settings = value; }
}
}
public sealed class ScreenSpaceAmbientOcclusionEffect
{
static class Uniforms
{
internal static readonly int _Intensity = Shader.PropertyToID("_Intensity");
internal static readonly int _Radius = Shader.PropertyToID("_Radius");
internal static readonly int _Downsample = Shader.PropertyToID("_Downsample");
internal static readonly int _SampleCount = Shader.PropertyToID("_SampleCount");
internal static readonly int _AOBuffer = Shader.PropertyToID("_AmbientOcclusionTexture");
internal static readonly int _TempTex1 = Shader.PropertyToID("_TempTex1");
internal static readonly int _TempTex2 = Shader.PropertyToID("_TempTex2");
internal static readonly int _CameraDepthTexture = Shader.PropertyToID("_CameraDepthTexture");
}
PropertySheet m_Sheet;
readonly RenderTargetIdentifier m_AmbientOcclusionRT;
public ScreenSpaceAmbientOcclusionEffect()
{}
public void Build(RenderPipelineResources renderPipelinesResources)
{
var material = Utilities.CreateEngineMaterial("Hidden/HDPipeline/ScreenSpace/AmbientOcclusion");
// TODO: Don't we need to also free the material ?
m_Sheet = new PropertySheet(material);
}
public void Render(ScreenSpaceAmbientOcclusionSettings.Settings settings, Camera camera, ScriptableRenderContext renderContext, RenderTargetIdentifier depthID)
{
/* if (settings == null) return; */ // TODO
const RenderTextureFormat kFormat = RenderTextureFormat.ARGB32;
const RenderTextureReadWrite kRWMode = RenderTextureReadWrite.Linear;
const FilterMode kFilter = FilterMode.Bilinear;
var width = camera.pixelWidth;
var height = camera.pixelHeight;
var downsize = settings.downsampling ? 2 : 1;
// Provide the settings via uniforms.
m_Sheet.properties.SetFloat(Uniforms._Intensity, settings.intensity);
m_Sheet.properties.SetFloat(Uniforms._Radius, settings.radius);
m_Sheet.properties.SetFloat(Uniforms._Downsample, 1.0f / downsize);
m_Sheet.properties.SetFloat(Uniforms._SampleCount, settings.sampleCount);
// Start building a command buffer.
var cmd = new CommandBuffer { name = "Ambient Occlusion" };
cmd.SetGlobalTexture(Uniforms._CameraDepthTexture, depthID);
// Note: GBuffer is automatically bind
// AO estimation.
cmd.GetTemporaryRT(Uniforms._TempTex1, width / downsize, height / downsize, 0, kFilter, kFormat, kRWMode);
cmd.BlitFullscreenTriangle(depthID, Uniforms._TempTex1, m_Sheet, 0);
// Denoising (horizontal pass).
cmd.GetTemporaryRT(Uniforms._TempTex2, width, height, 0, kFilter, kFormat, kRWMode);
cmd.BlitFullscreenTriangle(Uniforms._TempTex1, Uniforms._TempTex2, m_Sheet, 1);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
// Denoising (vertical pass).
cmd.GetTemporaryRT(Uniforms._TempTex1, width, height, 0, kFilter, kFormat, kRWMode);
cmd.BlitFullscreenTriangle(Uniforms._TempTex2, Uniforms._TempTex1, m_Sheet, 2);
cmd.ReleaseTemporaryRT(Uniforms._TempTex2);
// Final filtering
cmd.GetTemporaryRT(Uniforms._AOBuffer, width, height, 0, kFilter, kFormat, kRWMode);
cmd.BlitFullscreenTriangle(Uniforms._TempTex1, Uniforms._AOBuffer, depthID, m_Sheet, 3);
cmd.ReleaseTemporaryRT(Uniforms._TempTex1);
// Setup texture for lighting pass (automagic of unity)
cmd.SetGlobalTexture("_AmbientOcclusionTexture", Uniforms._AOBuffer);
// Register the command buffer and release it.
renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();
}
public void Cleanup()
{
if (m_Sheet != null)
m_Sheet.Release();
}
}
}

131
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl


#ifndef UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMMON
#define UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMMON
#include "../../../../ShaderLibrary/Common.hlsl"
#include "../../../ShaderConfig.cs.hlsl"
#include "../../../ShaderVariables.hlsl"
#define UNITY_MATERIAL_LIT // Needs to be defined before including Material.hlsl
#include "../../../Material/Material.hlsl"
TEXTURE2D(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
DECLARE_GBUFFER_TEXTURE(_GBufferTexture);
// The constant below determines the contrast of occlusion. This allows
// users to control over/under occlusion. At the moment, this is not exposed
// to the editor because it's rarely useful.
static const float kContrast = 0.6;
// The constant below controls the geometry-awareness of the bilateral
// filter. The higher value, the more sensitive it is.
static const float kGeometryCoeff = 0.8;
// The constants below are used in the AO estimator. Beta is mainly used
// for suppressing self-shadowing noise, and Epsilon is used to prevent
// calculation underflow. See the paper (Morgan 2011 http://goo.gl/2iz3P)
// for further details of these constants.
static const float kBeta = 0.002;
// A small value used for avoiding self-occlusion.
static const float kEpsilon = 1e-6;
float2 SinCos(float theta)
{
float sn, cs;
sincos(theta, sn, cs);
return float2(sn, cs);
}
// Pseudo random number generator with 2D coordinates
float UVRandom(float u, float v)
{
float f = dot(float2(12.9898, 78.233), float2(u, v));
return frac(43758.5453 * sin(f));
}
// Interleaved gradient function from Jimenez 2014 http://goo.gl/eomGso
float GradientNoise(float2 uv)
{
uv = floor(uv * _ScreenParams.xy);
float f = dot(float2(0.06711056, 0.00583715), uv);
return frac(52.9829189 * frac(f));
}
// Boundary check for depth sampler
// (returns a very large value if it lies out of bounds)
float CheckBounds(float2 uv, float d)
{
float ob = any(uv < 0) + any(uv > 1);
#if defined(UNITY_REVERSED_Z)
ob += (d <= 0.00001);
#else
ob += (d >= 0.99999);
#endif
return ob * 1e8;
}
// AO/normal packed format conversion
half4 PackAONormal(half ao, half3 n)
{
return half4(ao, n * 0.5 + 0.5);
}
half GetPackedAO(half4 p)
{
return p.x;
}
half3 GetPackedNormal(half4 p)
{
return p.yzw * 2.0 - 1.0;
}
// Depth/normal sampling
float SampleDepth(uint2 unPositionSS)
{
float z = LOAD_TEXTURE2D(_CameraDepthTexture, unPositionSS).x;
return LinearEyeDepth(z, _ZBufferParams) + CheckBounds(uv, z); // TODO: We should use the stencil to not affect the sky and save CheckBounds cost
}
half3 SampleNormal(BSDFData bsdfData)
{
return mul((float3x3)unity_WorldToCamera, bsdfData.normalWS);
}
// Normal vector comparer (for geometry-aware weighting)
half CompareNormal(half3 d1, half3 d2)
{
return smoothstep(kGeometryCoeff, 1.0, dot(d1, d2));
}
// TODO: Test. We may need to use full matrix here to reconver VS position as it may not work in case of oblique projection (planar reflection)
// Reconstruct view-space position from UV and depth.
// p11_22 = (unity_CameraProjection._11, unity_CameraProjection._22)
// p13_31 = (unity_CameraProjection._13, unity_CameraProjection._23)
float3 ReconstructViewPos(float2 uv, float depth, float2 p11_22, float2 p13_31)
{
return float3((uv * 2.0 - 1.0 - p13_31) / p11_22 * depth, depth);
}
// Default vertex shader
struct Attributes
{
uint vertexID : SV_VertexID;
};
struct Varyings
{
float4 positionCS : SV_POSITION;
};
Varyings Vert(Attributes input)
{
Varyings output;
output.positionCS = GetFullScreenTriangleVertexPosition(input.vertexID);
return output;
}
#endif // UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMMON

47
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Composition.hlsl


#ifndef UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMPOSITION
#define UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMPOSITION
#include "CommonAmbientOcclusion.hlsl"
half _Downsample;
TEXTURE2D(_MainTex);
SAMPLER2D(sampler_MainTex);
float4 _MainTex_TexelSize;
half4 Frag(Varyings input)
{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float uv = posInput.positionSS;
float2 delta = _MainTex_TexelSize.xy / _Downsample; // TODO: is it correct, we have already bilateral upsample here ?
// 5-tap box blur filter.
half4 p0 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv);
half4 p1 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + float2(-delta.x, -delta.y));
half4 p2 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + float2(+delta.x, -delta.y));
half4 p3 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + float2(-delta.x, +delta.y));
half4 p4 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + float2(+delta.x, +delta.y));
half3 n0 = GetPackedNormal(p0);
// Geometry-aware weighting.
half w0 = 1.0;
half w1 = CompareNormal(n0, GetPackedNormal(p1));
half w2 = CompareNormal(n0, GetPackedNormal(p2));
half w3 = CompareNormal(n0, GetPackedNormal(p3));
half w4 = CompareNormal(n0, GetPackedNormal(p4));
half ao;
ao = GetPackedAO(p0) * w0;
ao += GetPackedAO(p1) * w1;
ao += GetPackedAO(p2) * w2;
ao += GetPackedAO(p3) * w3;
ao += GetPackedAO(p4) * w4;
ao /= w0 + w1 + w2 + w3 + w4;
return half4(ao, 0, 0, 0);
}
#endif // UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_COMPOSITION

69
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl


#ifndef UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_DENOISING
#define UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_DENOISING
#include "CommonAmbientOcclusion.hlsl"
half _Downsample;
TEXTURE2D(_MainTex);
SAMPLER2D(sampler_MainTex);
float4 _MainTex_TexelSize;
half4 Frag(Varyings input) : SV_Target
{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float uv = posInput.positionSS;
#if defined(AO_DENOISE_HORIZONTAL)
// Horizontal pass: Always use 2 texels interval to match to
// the dither pattern.
float2 delta = float2(_MainTex_TexelSize.x * 2.0, 0.0);
#else // AO_DENOISE_VERTICAL
// Vertical pass: Apply _Downsample to match to the dither
// pattern in the original occlusion buffer.
float2 delta = float2(0.0, _MainTex_TexelSize.y / _Downsample * 2.0);
#endif
// 5-tap Gaussian with linear sampling.
half4 p0 = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv);
half4 p1a = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv - delta * 1.3846153846);
half4 p1b = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + delta * 1.3846153846);
half4 p2a = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv - delta * 3.2307692308);
half4 p2b = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, uv + delta * 3.2307692308);
#if defined(AO_DENOISE_CENTER_NORMAL)
half3 unused;
BSDFData bsdfData;
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS);
DECODE_FROM_GBUFFER(gbuffer, 0xFFFFFFFF, bsdfData, unused);
half3 n0 = SampleNormal(bsdfData);
#else
half3 n0 = GetPackedNormal(p0);
#endif
// Geometry-aware weighting.
half w0 = 0.2270270270;
half w1a = CompareNormal(n0, GetPackedNormal(p1a)) * 0.3162162162;
half w1b = CompareNormal(n0, GetPackedNormal(p1b)) * 0.3162162162;
half w2a = CompareNormal(n0, GetPackedNormal(p2a)) * 0.0702702703;
half w2b = CompareNormal(n0, GetPackedNormal(p2b)) * 0.0702702703;
half s;
s = GetPackedAO(p0) * w0;
s += GetPackedAO(p1a) * w1a;
s += GetPackedAO(p1b) * w1b;
s += GetPackedAO(p2a) * w2a;
s += GetPackedAO(p2b) * w2b;
s /= w0 + w1a + w1b + w2a + w2b;
return PackAONormal(s, n0);
}
#endif // UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_DENOISING

83
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl


#ifndef UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_ESTIMATION
#define UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_ESTIMATION
#include "CommonAmbientOcclusion.hlsl"
half _Intensity;
float _Radius;
half _Downsample;
int _SampleCount;
// Sample point picker
float3 PickSamplePoint(float2 uv, float index)
{
// Uniformaly distributed points on a unit sphere http://goo.gl/X2F1Ho
float gn = GradientNoise(uv * _Downsample);
// FIXEME: This was added to avoid a NVIDIA driver issue.
// vvvvvvvvvvvv
float u = frac(UVRandom(0.0, index + uv.x * 1e-10) + gn) * 2.0 - 1.0;
float theta = (UVRandom(1.0, index + uv.x * 1e-10) + gn) * TWO_PI;
float3 v = float3(SinCos(theta).yx * sqrt(1.0 - u * u), u);
// Make them distributed between [0, _Radius]
float l = sqrt((index + 1.0) / _SampleCount) * _Radius;
return v * l;
}
// Distance-based AO estimator based on Morgan 2011 http://goo.gl/2iz3P
half4 Frag(Varyings input) : SV_Target
{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float uv = posInput.positionSS;
half3 unused;
BSDFData bsdfData;
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS);
DECODE_FROM_GBUFFER(gbuffer, 0xFFFFFFFF, bsdfData, unused);
// Parameters used in coordinate conversion
float3x3 proj = (float3x3)unity_CameraProjection;
float2 p11_22 = float2(unity_CameraProjection._11, unity_CameraProjection._22);
float2 p13_31 = float2(unity_CameraProjection._13, unity_CameraProjection._23);
// View space normal and depth
half3 norm_o = SampleNormal(bsdfData);
float depth_o = SampleDepth(posInput.unPositionSS);
// Reconstruct the view-space position.
float3 vpos_o = ReconstructViewPos(uv, depth_o, p11_22, p13_31);
float ao = 0.0;
// TODO: Setup several variant based on number of sample count to avoid dynamic loop here
for (int s = 0; s < _SampleCount; s++)
{
// Sample point
float3 v_s1 = PickSamplePoint(uv, s);
v_s1 = faceforward(v_s1, -norm_o, v_s1);
float3 vpos_s1 = vpos_o + v_s1;
// Reproject the sample point
float3 spos_s1 = mul(proj, vpos_s1);
float2 uv_s1_01 = (spos_s1.xy / vpos_s1.z + 1.0) * 0.5;
// Depth at the sample point
float depth_s1 = SampleDepth(uint2(uv_s1_01 * _ScreenSize.xy));
// Relative position of the sample point
float3 vpos_s2 = ReconstructViewPos(uv_s1_01, depth_s1, p11_22, p13_31);
float3 v_s2 = vpos_s2 - vpos_o;
// Estimate the obscurance value
float a1 = max(dot(v_s2, norm_o) - kBeta * depth_o, 0.0);
float a2 = dot(v_s2, v_s2) + kEpsilon;
ao += a1 / a2;
}
// Apply intensity normalization/amplifier/contrast.
ao = pow(max(0, ao * _Radius * _Intensity / _SampleCount), kContrast);
return PackAONormal(ao, norm_o);
}
#endif // UNITY_HDRENDERPIPELINE_AMBIENTOCCLUSION_ESTIMATION

9
Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace.meta


fileFormatVersion: 2
guid: dfbd1e12822bf6f4695aedb562176965
folderAsset: yes
timeCreated: 1494634490
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

1001
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs.orig
文件差异内容过多而无法显示
查看文件

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/AmbientOcclusion.shader.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/Common.hlsl.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/CommonAmbientOcclusion.hlsl.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/Composition.hlsl.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Composition.hlsl.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/Denoising.hlsl.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Denoising.hlsl.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/Estimation.hlsl.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/Estimation.hlsl.meta

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/Resources/AmbientOcclusion.shader → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/Resources/AmbientOcclusion.shader

/Assets/ScriptableRenderPipeline/HDRenderPipeline/ScreenSpace/AmbientOcclusion/AmbientOcclusion.cs.meta → /Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/AmbientOcclusion/AmbientOcclusion.cs.meta

正在加载...
取消
保存