浏览代码

Changed the way we handle reading into the bound depth buffer.

- Now Depth buffer is not copied anymore on PS4
- On other platform, depth buffer is used only once and the copy is used as a texture in all shader that need it. Depth Buffer Render Target is no longer changed.
/Branch_Batching2
Julien Ignace 7 年前
当前提交
2b3c73fa
共有 14 个文件被更改,包括 115 次插入89 次删除
  1. 4
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/Resources/DebugViewMaterialGBuffer.shader
  2. 7
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/Resources/DebugViewTiles.shader
  3. 3
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.asset
  4. 122
      Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs
  5. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop.cs
  6. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/Resources/Deferred.shader
  7. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Resources/shadeopaque.compute
  8. 20
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs
  9. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader
  10. 1
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl
  11. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl
  12. 5
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/ProceduralSkyRenderer.cs
  13. 18
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/Resources/AtmosphericScattering.hlsl
  14. 2
      Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/Resources/SkyProcedural.shader

4
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/Resources/DebugViewMaterialGBuffer.shader


DECLARE_GBUFFER_TEXTURE(_GBufferTexture);
TEXTURE2D_FLOAT(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
int _DebugViewMaterial;
struct Attributes

{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
FETCH_GBUFFER(gbuffer, _GBufferTexture, posInput.unPositionSS);

7
Assets/ScriptableRenderPipeline/HDRenderPipeline/Debug/Resources/DebugViewTiles.shader


// variable declaration
//-------------------------------------------------------------------------------------
TEXTURE2D_FLOAT(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
uint _ViewTilesFlags;
float2 _MousePixelCoord;

{
// positionCS is SV_Position
PositionInputs posInput = GetPositionInput(positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
int2 pixelCoord = posInput.unPositionSS.xy;

if (tileCoord.y < LIGHTCATEGORY_COUNT && tileCoord.x < maxLights + 3)
{
PositionInputs mousePosInput = GetPositionInput(_MousePixelCoord, _ScreenSize.zw);
float depthMouse = LOAD_TEXTURE2D(_CameraDepthTexture, mousePosInput.unPositionSS).x;
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.unPositionSS).x;
UpdatePositionInput(depthMouse, _InvViewProjMatrix, _ViewProjMatrix, mousePosInput);
uint category = (LIGHTCATEGORY_COUNT - 1) - tileCoord.y;

3
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.asset


m_Script: {fileID: 11500000, guid: f365a473b136bef4797c7281a02cd510, type: 3}
m_Name: HDRenderPipeline
m_EditorClassIdentifier:
m_LightLoopProducer: {fileID: 0}
m_LightLoopProducer: {fileID: 11400000, guid: bf8cd9ae03ff7d54c89603e67be0bfc5,
type: 2}
globalDebugSettings:
debugOverlayRatio: 0.33
displayMaterialDebug: 0

122
Assets/ScriptableRenderPipeline/HDRenderPipeline/HDRenderPipeline.cs


readonly int m_CameraColorBuffer;
readonly int m_CameraSubsurfaceBuffer;
readonly int m_CameraFilteringBuffer;
readonly int m_CameraDepthStencilBuffer;
readonly int m_CameraDepthStencilBufferCopy; // This is temporary, we will need to provide the correct opaque depth buffer to transparent without needing a copy
readonly int m_CameraStencilBuffer;
readonly int m_VelocityBuffer;
readonly int m_DistortionBuffer;

readonly RenderTargetIdentifier m_CameraSubsurfaceBufferRT;
readonly RenderTargetIdentifier m_CameraFilteringBufferRT;
readonly RenderTargetIdentifier m_CameraDepthStencilBufferRT;
readonly RenderTargetIdentifier m_CameraDepthStencilBufferCopyRT;
// 'm_CameraStencilBufferRT' is a temporary copy of the stencil buffer and should be removed
// once we are able to read from the depth buffer and perform the stencil test simultaneously.
readonly RenderTargetIdentifier m_CameraStencilBufferRT;
private RenderTexture m_CameraDepthStencilBuffer = null;
private RenderTexture m_CameraDepthStencilBufferCopy = null;
private RenderTargetIdentifier m_CameraDepthStencilBufferRT;
private RenderTargetIdentifier m_CameraDepthStencilBufferCopyRT;
// Detect when windows size is changing
int m_CurrentWidth;
int m_CurrentHeight;

m_CameraColorBuffer = Shader.PropertyToID("_CameraColorTexture");
m_CameraSubsurfaceBuffer = Shader.PropertyToID("_CameraSubsurfaceTexture");
m_CameraFilteringBuffer = Shader.PropertyToID("_CameraFilteringBuffer");
m_CameraDepthStencilBuffer = Shader.PropertyToID("_CameraDepthTexture");
m_CameraDepthStencilBufferCopy = Shader.PropertyToID("_CameraDepthTextureCopy");
m_CameraStencilBuffer = Shader.PropertyToID("_CameraStencilTexture");
m_CameraDepthStencilBufferRT = new RenderTargetIdentifier(m_CameraDepthStencilBuffer);
m_CameraDepthStencilBufferCopyRT = new RenderTargetIdentifier(m_CameraDepthStencilBufferCopy);
m_CameraStencilBufferRT = new RenderTargetIdentifier(m_CameraStencilBuffer);
m_DebugViewMaterialGBuffer = Utilities.CreateEngineMaterial("Hidden/HDRenderPipeline/DebugViewMaterialGBuffer");

};
#endif
void CreateDepthBuffer(Camera camera)
{
if (m_CameraDepthStencilBuffer != null)
{
m_CameraDepthStencilBuffer.Release();
}
m_CameraDepthStencilBuffer = new RenderTexture(camera.pixelWidth, camera.pixelHeight, 24, RenderTextureFormat.Depth);
m_CameraDepthStencilBuffer.filterMode = FilterMode.Point;
m_CameraDepthStencilBuffer.Create();
m_CameraDepthStencilBufferRT = new RenderTargetIdentifier(m_CameraDepthStencilBuffer);
if (NeedDepthBufferCopy())
{
if (m_CameraDepthStencilBufferCopy != null)
{
m_CameraDepthStencilBufferCopy.Release();
}
m_CameraDepthStencilBufferCopy = new RenderTexture(camera.pixelWidth, camera.pixelHeight, 24, RenderTextureFormat.Depth);
m_CameraDepthStencilBufferCopy.filterMode = FilterMode.Point;
m_CameraDepthStencilBufferCopy.Create();
m_CameraDepthStencilBufferCopyRT = new RenderTargetIdentifier(m_CameraDepthStencilBufferCopy);
}
}
void Resize(Camera camera)
{
// TODO: Detect if renderdoc just load and force a resize in this case, as often renderdoc require to realloc resource.

if (m_LightLoop == null)
return;
if (camera.pixelWidth != m_CurrentWidth || camera.pixelHeight != m_CurrentHeight || m_LightLoop.NeedResize())
if (camera.pixelWidth != m_CurrentWidth || camera.pixelHeight != m_CurrentHeight)
if (m_CurrentWidth > 0 && m_CurrentHeight > 0)
CreateDepthBuffer(camera);
if(m_LightLoop.NeedResize())
m_LightLoop.ReleaseResolutionDependentBuffers();
}
if (m_CurrentWidth > 0 && m_CurrentHeight > 0)
{
m_LightLoop.ReleaseResolutionDependentBuffers();
}
m_LightLoop.AllocResolutionDependentBuffers(camera.pixelWidth, camera.pixelHeight);
m_LightLoop.AllocResolutionDependentBuffers(camera.pixelWidth, camera.pixelHeight);
}
// update recorded window resolution
m_CurrentWidth = camera.pixelWidth;

m_LightLoop.PushGlobalParams(hdCamera.camera, renderContext);
}
bool NeedDepthBufferCopy()
{
// For now we consider only PS4 to be able to read from a bound depth buffer. Need to test/implement for other platforms.
return SystemInfo.graphicsDeviceType != GraphicsDeviceType.PlayStation4;
}
Texture GetDepthTexture()
{
if (NeedDepthBufferCopy())
return m_CameraDepthStencilBufferCopy;
else
return m_CameraDepthStencilBuffer;
}
private void CopyDepthBufferIfNeeded(ScriptableRenderContext renderContext)
{
var cmd = new CommandBuffer();
if (NeedDepthBufferCopy())
{
using (new Utilities.ProfilingSample("Copy depth-stencil buffer", renderContext))
{
cmd.CopyTexture(m_CameraDepthStencilBufferRT, m_CameraDepthStencilBufferCopyRT);
}
}
cmd.SetGlobalTexture("_MainDepthTexture", GetDepthTexture());
renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();
}
public override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
base.Render(renderContext, cameras);

RenderForwardOnlyOpaqueDepthPrepass(cullResults, camera, renderContext);
RenderGBuffer(cullResults, camera, renderContext);
// 'm_CameraStencilBufferRT' is a temporary copy of the stencil buffer and should be removed
// once we are able to read from the depth buffer and perform the stencil test simultaneously.
using (new Utilities.ProfilingSample("Copy depth-stencil buffer", renderContext))
// If full forward rendering, we did not do any rendering yet, so don't need to copy the buffer.
// If Deferred then the depth buffer is full (regular GBuffer + ForwardOnly depth prepass are done so we can copy it safely.
if(!m_Owner.renderingSettings.useForwardRenderingOnly)
var cmd = new CommandBuffer();
cmd.CopyTexture(m_CameraDepthStencilBufferRT, m_CameraStencilBufferRT);
renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();
CopyDepthBufferIfNeeded(renderContext);
}
if (globalDebugSettings.materialDebugSettings.debugViewMaterial != 0)

RenderForward(cullResults, camera, renderContext, true); // Render deferred or forward opaque
RenderForwardOnlyOpaque(cullResults, camera, renderContext);
// 'm_CameraDepthStencilBufferCopyRT' is a temporary copy of the depth texture and should be removed
// once we are able to read from the depth buffer during transparent pass.
using (new Utilities.ProfilingSample("Copy depth-stencil buffer after all opaque", renderContext))
// If full forward rendering, we did just rendered everything, so we can copy the depth buffer
// If Deferred nothing needs copying anymore.
if(m_Owner.renderingSettings.useForwardRenderingOnly)
var cmd = new CommandBuffer();
cmd.CopyTexture(m_CameraDepthStencilBufferRT, m_CameraDepthStencilBufferCopyRT);
renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();
CopyDepthBufferIfNeeded(renderContext);
}
RenderSky(hdCamera, renderContext);

if (enableSSS)
{
// Output split lighting for materials tagged with the SSS stencil bit.
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, globalDebugSettings.lightingDebugSettings, colorRTs, m_CameraStencilBufferRT, true, enableSSS);
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, globalDebugSettings.lightingDebugSettings, colorRTs, m_CameraDepthStencilBufferRT, new RenderTargetIdentifier(GetDepthTexture()), true, enableSSS);
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, globalDebugSettings.lightingDebugSettings, colorRTs, m_CameraStencilBufferRT, false, enableSSS);
m_LightLoop.RenderDeferredLighting(hdCamera, renderContext, globalDebugSettings.lightingDebugSettings, colorRTs, m_CameraDepthStencilBufferRT, new RenderTargetIdentifier(GetDepthTexture()), false, enableSSS);
}
// Combines specular lighting and diffuse lighting with subsurface scattering.

m_FilterSubsurfaceScattering.SetVectorArray("_HalfRcpWeightedVariances", sssParameters.halfRcpWeightedVariances);
cmd.SetGlobalTexture("_IrradianceSource", m_CameraSubsurfaceBufferRT);
Utilities.DrawFullScreen(cmd, m_FilterSubsurfaceScattering, hdCamera,
m_CameraFilteringBufferRT, m_CameraStencilBufferRT);
m_CameraFilteringBufferRT, m_CameraDepthStencilBufferRT);
// Perform the horizontal SSS filtering pass, and combine diffuse and specular lighting.
m_FilterAndCombineSubsurfaceScattering.SetMatrix("_InvProjMatrix", hdCamera.invProjectionMatrix);

Utilities.DrawFullScreen(cmd, m_FilterAndCombineSubsurfaceScattering, hdCamera,
m_CameraColorBufferRT, m_CameraStencilBufferRT);
m_CameraColorBufferRT, m_CameraDepthStencilBufferRT);
context.ExecuteCommandBuffer(cmd);
cmd.Dispose();

cmd.GetTemporaryRT(m_CameraColorBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
cmd.GetTemporaryRT(m_CameraSubsurfaceBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
cmd.GetTemporaryRT(m_CameraFilteringBuffer, w, h, 0, FilterMode.Point, RenderTextureFormat.RGB111110Float, RenderTextureReadWrite.Linear, 1, true); // Enable UAV
cmd.GetTemporaryRT(m_CameraDepthStencilBuffer, w, h, 24, FilterMode.Point, RenderTextureFormat.Depth);
cmd.GetTemporaryRT(m_CameraDepthStencilBufferCopy, w, h, 24, FilterMode.Point, RenderTextureFormat.Depth);
cmd.GetTemporaryRT(m_CameraStencilBuffer, w, h, 24, FilterMode.Point, RenderTextureFormat.Depth);
if (!m_Owner.renderingSettings.ShouldUseForwardRenderingOnly())
{

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/LightLoop.cs


public virtual void RenderDeferredLighting(HDCamera hdCamera, ScriptableRenderContext renderContext,
LightingDebugSettings lightDebugParameters,
RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier stencilBuffer,
RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthStencilBuffer, RenderTargetIdentifier depthStencilTexture,
bool outputSplitLightingForSSS, bool enableSSS) {}
public virtual void RenderForward(Camera camera, ScriptableRenderContext renderContext, bool renderOpaque) {}

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/Resources/Deferred.shader


DECLARE_GBUFFER_TEXTURE(_GBufferTexture);
TEXTURE2D_FLOAT(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
struct Attributes
{
uint vertexID : SV_VertexID;

{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(posInput.positionWS);

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Resources/shadeopaque.compute


DECLARE_GBUFFER_TEXTURE(_GBufferTexture);
TEXTURE2D_FLOAT(_CameraDepthTexture);
SAMPLER2D(sampler_CameraDepthTexture);
#ifdef OUTPUT_SPLIT_LIGHTING
RWTexture2D<float4> specularLightingUAV;
RWTexture2D<float3> diffuseLightingUAV;

// input.positionCS is SV_Position
uint2 pixelCoord = dispatchThreadId;
PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
UpdatePositionInput(depth, _InvViewProjMatrix, _ViewProjMatrix, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(posInput.positionWS);

20
Assets/ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePass.cs


public override void RenderDeferredLighting(HDCamera hdCamera, ScriptableRenderContext renderContext,
LightingDebugSettings lightDebugSettings,
RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier stencilBuffer,
RenderTargetIdentifier[] colorBuffers, RenderTargetIdentifier depthStencilBuffer, RenderTargetIdentifier depthStencilTexture,
bool outputSplitLightingForSSS, bool enableSSS)
{
var bUseClusteredForDeferred = !usingFptl;

// This is a debug brute force renderer to debug tile/cluster which render all the lights
if (outputSplitLightingForSSS)
{
Utilities.DrawFullScreen(cmd, m_SingleDeferredMaterialMRT, hdCamera, colorBuffers, stencilBuffer);
Utilities.DrawFullScreen(cmd, m_SingleDeferredMaterialMRT, hdCamera, colorBuffers, depthStencilBuffer);
Utilities.DrawFullScreen(cmd, m_SingleDeferredMaterialSRT, hdCamera, colorBuffers[0], stencilBuffer);
Utilities.DrawFullScreen(cmd, m_SingleDeferredMaterialSRT, hdCamera, colorBuffers[0], depthStencilBuffer);
}
}
else

// TODO: get rid of this by making global parameters visible to compute shaders
BindGlobalParams(cmd, shadeOpaqueShader, kernel, camera, renderContext);
cmd.SetComputeTextureParam(shadeOpaqueShader, kernel, "_CameraDepthTexture", Shader.PropertyToID("_CameraDepthTexture"));
cmd.SetComputeTextureParam(shadeOpaqueShader, kernel, "_MainDepthTexture", depthStencilTexture);
cmd.SetComputeTextureParam(shadeOpaqueShader, kernel, "_GBufferTexture0", Shader.PropertyToID("_GBufferTexture0"));
cmd.SetComputeTextureParam(shadeOpaqueShader, kernel, "_GBufferTexture1", Shader.PropertyToID("_GBufferTexture1"));
cmd.SetComputeTextureParam(shadeOpaqueShader, kernel, "_GBufferTexture2", Shader.PropertyToID("_GBufferTexture2"));

if (outputSplitLightingForSSS)
{
Utilities.SelectKeyword(m_DeferredDirectMaterialMRT, "USE_CLUSTERED_LIGHTLIST", "USE_FPTL_LIGHTLIST", bUseClusteredForDeferred);
Utilities.DrawFullScreen(cmd, m_DeferredDirectMaterialMRT, hdCamera, colorBuffers, stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredDirectMaterialMRT, hdCamera, colorBuffers, depthStencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredIndirectMaterialMRT, hdCamera, colorBuffers, stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredIndirectMaterialMRT, hdCamera, colorBuffers, depthStencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredDirectMaterialSRT, hdCamera, colorBuffers[0], stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredDirectMaterialSRT, hdCamera, colorBuffers[0], depthStencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredIndirectMaterialSRT, hdCamera, colorBuffers[0], stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredIndirectMaterialSRT, hdCamera, colorBuffers[0], depthStencilBuffer);
}
}
else

Utilities.SelectKeyword(m_DeferredAllMaterialMRT, "USE_CLUSTERED_LIGHTLIST", "USE_FPTL_LIGHTLIST", bUseClusteredForDeferred);
Utilities.DrawFullScreen(cmd, m_DeferredAllMaterialMRT, hdCamera, colorBuffers, stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredAllMaterialMRT, hdCamera, colorBuffers, depthStencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredAllMaterialSRT, hdCamera, colorBuffers[0], stencilBuffer);
Utilities.DrawFullScreen(cmd, m_DeferredAllMaterialSRT, hdCamera, colorBuffers[0], depthStencilBuffer);
}
}
}

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CombineSubsurfaceScattering.shader


float4 _HalfRcpWeightedVariances[N_PROFILES]; // RGB for chromatic, A for achromatic
float4x4 _InvProjMatrix;
TEXTURE2D_FLOAT(_CameraDepthTexture);
TEXTURE2D(_GBufferTexture2);
TEXTURE2D(_IrradianceSource);

float invDistScale = rcp(distScale);
// Reconstruct the view-space position.
float rawDepth = LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).r;
float rawDepth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).r;
float3 centerPosVS = ComputeViewSpacePosition(posInput.positionSS, rawDepth, _InvProjMatrix);
// Compute the dimensions of the surface fragment viewed as a quad facing the camera.

samplePosition = posInput.unPositionSS + scaledDirection * _FilterKernels[profileID][i].a;
sampleWeight = _FilterKernels[profileID][i].rgb;
rawDepth = LOAD_TEXTURE2D(_CameraDepthTexture, samplePosition).r;
rawDepth = LOAD_TEXTURE2D(_MainDepthTexture, samplePosition).r;
sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;
// Apply bilateral weighting.

1
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl


BSDFData bsdfData = ConvertSurfaceDataToBSDFData(surfaceData);
return float4(Linear01Depth((_MainDepthTexture.Sample(sampler_MainDepthTexture, posInput.positionSS.xy).x), _ZBufferParams).xxx , 1.0);
// TODO: we must not access bsdfData here, it break the genericity of the code!
return float4(bsdfData.color + builtinData.emissiveColor, builtinData.opacity);
}

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/ShaderVariables.hlsl


float4 unity_SHC;
CBUFFER_END
// Use the regular depth camera texture sampler for sampling this: sampler_CameraDepthTexture
TEXTURE2D_FLOAT(_CameraDepthTextureCopy);
SAMPLER2D(sampler_CameraDepthTextureCopy);
TEXTURE2D_FLOAT(_MainDepthTexture);
SAMPLER2D(sampler_MainDepthTexture);
// Main lightmap
TEXTURE2D(unity_Lightmap);

5
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/ProceduralSkyRenderer.cs


var cmd = new CommandBuffer { name = "" };
if (!renderForCubemap)
{
cmd.SetGlobalTexture("_CameraDepthTexture", builtinParams.depthBuffer);
}
cmd.DrawMesh(builtinParams.skyMesh, Matrix4x4.identity, m_ProceduralSkyMaterial, 0, 0, properties);
builtinParams.renderContext.ExecuteCommandBuffer(cmd);
cmd.Dispose();

18
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/Resources/AtmosphericScattering.hlsl


uniform float _MiePhaseAnisotropy;
uniform float _MieExtinctionFactor;
SAMPLER2D(sampler_CameraDepthTexture);
#define SRL_BilinearSampler sampler_CameraDepthTexture // Used for all textures
SAMPLER2D(sampler_MainDepthTexture);
#define SRL_BilinearSampler sampler_MainDepthTexture // Used for all textures
TEXTURE2D_FLOAT(_CameraDepthTexture);
TEXTURE2D_FLOAT(_MainDepthTexture);
TEXTURE2D(_OcclusionTexture);
float HenyeyGreensteinPhase(float g, float cosTheta) {

#if defined(ATMOSPHERICS_OCCLUSION_EDGE_FIXUP)
float4 baseUV = float4(uv.x, uv.y, 0.f, 0.f);
float cDepth = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, SRL_BilinearSampler, baseUV, 0.f).r;
float cDepth = SAMPLE_TEXTURE2D_LOD(_CMainDepthTexture, SRL_BilinearSampler, baseUV, 0.f).r;
baseUV.xy = uv + _DepthTextureScaledTexelSize.zy; xDepth.x = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.xy; xDepth.y = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.xw; xDepth.z = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.zw; xDepth.w = SAMPLE_TEXTURE2D_LOD(_CameraDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.zy; xDepth.x = SAMPLE_TEXTURE2D_LOD(_MainDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.xy; xDepth.y = SAMPLE_TEXTURE2D_LOD(_MainDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.xw; xDepth.z = SAMPLE_TEXTURE2D_LOD(_MainDepthTexture, SRL_BilinearSampler, baseUV);
baseUV.xy = uv + _DepthTextureScaledTexelSize.zw; xDepth.w = SAMPLE_TEXTURE2D_LOD(_MainDepthTexture, SRL_BilinearSampler, baseUV);
xDepth.x = LinearEyeDepth(xDepth.x, _ZBufferParams);
xDepth.y = LinearEyeDepth(xDepth.y, _ZBufferParams);

#define SURFACE_SCATTER_APPLY(i, color) color += (i.worldPos + i.scatterCoords1.xyz + i.scatterCoords2.xyz) * 0.000001f
#endif
#endif //FILE_ATMOSPHERICSCATTERING
#endif //FILE_ATMOSPHERICSCATTERING

2
Assets/ScriptableRenderPipeline/HDRenderPipeline/Sky/ProceduralSky/Resources/SkyProcedural.shader


#ifdef PERFORM_SKY_OCCLUSION_TEST
// Determine whether the sky is occluded by the scene geometry.
// Do not perform blending with the environment map if the sky is occluded.
float depthRaw = max(skyDepth, LOAD_TEXTURE2D(_CameraDepthTexture, posInput.unPositionSS).r);
float depthRaw = max(skyDepth, LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).r);
float skyTexWeight = (depthRaw > skyDepth) ? 0.0 : 1.0;
#else
float depthRaw = skyDepth;

正在加载...
取消
保存