浏览代码

Global rename

positionSS      -> positionNDC
unPositionSS -> positionSS
unTileCoord   -> tileCoord
ComputeScreenSpacePosition() -> ComputeNormalizedDeviceCoordinates()
/main
Evgenii Golubev 7 年前
当前提交
0ed1b80b
共有 29 个文件被更改,包括 113 次插入118 次删除
  1. 67
      ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl
  2. 8
      ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl
  3. 6
      ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl
  4. 6
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader
  6. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader
  7. 6
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Deferred.compute
  8. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/DeferredDirectionalShadow.compute
  9. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Shadow.hlsl
  10. 4
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl
  11. 2
      ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute
  12. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl
  13. 32
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl
  14. 2
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl
  15. 4
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader
  16. 6
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute
  17. 10
      ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader
  18. 2
      ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader
  19. 2
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/FragInputs.hlsl
  20. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDepthOnly.hlsl
  21. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDistortion.hlsl
  22. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl
  23. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl
  24. 8
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassGBuffer.hlsl
  25. 6
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl
  26. 8
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl
  27. 2
      ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VaryingMesh.hlsl
  28. 2
      ScriptableRenderPipeline/HDRenderPipeline/Sky/BlacksmithlSky/Resources/SkyBlacksmith.shader
  29. 2
      ScriptableRenderPipeline/HDRenderPipeline/Sky/OpaqueAtmosphericScattering.shader

67
ScriptableRenderPipeline/Core/ShaderLibrary/Common.hlsl


// Z buffer to linear depth.
// Correctly handles oblique view frustums. Only valid for projection matrices!
// Ref: An Efficient Depth Linearization Method for Oblique View Frustums, Eq. 6.
float LinearEyeDepth(float2 positionSS, float deviceDepth, float4 invProjParam)
float LinearEyeDepth(float2 positionNDC, float deviceDepth, float4 invProjParam)
float4 positionCS = float4(positionSS * 2.0 - 1.0, deviceDepth, 1.0);
float4 positionCS = float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float viewSpaceZ = rcp(dot(positionCS, invProjParam));
// The view space uses a right-handed coordinate system.
return -viewSpaceZ;

// (position = positionCS) => (clipSpaceTransform = use default)
// (position = positionVS) => (clipSpaceTransform = UNITY_MATRIX_P)
// (position = positionWS) => (clipSpaceTransform = UNITY_MATRIX_VP)
float2 ComputeScreenSpacePosition(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 ComputeNormalizedDeviceCoordinates(float3 position, float4x4 clipSpaceTransform = k_identity4x4)
float2 positionSS = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
float2 positionNDC = positionCS.xy * (rcp(positionCS.w) * 0.5) + 0.5;
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return positionSS;
return positionNDC;
float4 ComputeClipSpacePosition(float2 positionSS, float deviceDepth)
float4 ComputeClipSpacePosition(float2 positionNDC, float deviceDepth)
positionSS.y = 1.0 - positionSS.y;
positionNDC.y = 1.0 - positionNDC.y;
return float4(positionSS * 2.0 - 1.0, deviceDepth, 1.0);
return float4(positionNDC * 2.0 - 1.0, deviceDepth, 1.0);
float3 ComputeViewSpacePosition(float2 positionSS, float deviceDepth, float4x4 invProjMatrix)
float3 ComputeViewSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, deviceDepth);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 positionVS = mul(invProjMatrix, positionCS);
// The view space uses a right-handed coordinate system.
positionVS.z = -positionVS.z;

float3 ComputeWorldSpacePosition(float2 positionSS, float deviceDepth, float4x4 invViewProjMatrix)
float3 ComputeWorldSpacePosition(float2 positionNDC, float deviceDepth, float4x4 invViewProjMatrix)
float4 positionCS = ComputeClipSpacePosition(positionSS, deviceDepth);
float4 positionCS = ComputeClipSpacePosition(positionNDC, deviceDepth);
float4 hpositionWS = mul(invViewProjMatrix, positionCS);
return hpositionWS.xyz / hpositionWS.w;
}

struct PositionInputs
{
// TODO: improve the naming convention.
// Some options:
// positionNDC, positionSS, tileCoordSS
// pixelCoordUV, pixelCoordSS, tileCoordSS
// pixelCoordSS, pixelIndexSS, tileIndexSS
float3 positionWS; // World space position (could be camera-relative)
float2 positionSS; // Screen space pixel position : [0, 1) (with the half-pixel offset)
uint2 unPositionSS; // Screen space pixel index : [0, NumPixels)
uint2 unTileCoord; // Screen space tile index : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1]
float linearDepth; // View space Z coordinate : [Near, Far]
float3 positionWS; // World space position (could be camera-relative)
float2 positionNDC; // Normalized screen space UVs : [0, 1) (with the half-pixel offset)
uint2 positionSS; // Screen space pixel coordinates : [0, NumPixels)
uint2 tileCoord; // Screen space tile coordinates : [0, NumTiles)
float deviceDepth; // Depth from the depth buffer : [0, 1]
float linearDepth; // View space Z coordinate : [Near, Far]
// If a compute shader call this function unPositionSS is an integer usually calculate like: uint2 unPositionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
// If a compute shader call this function positionSS is an integer usually calculate like: uint2 positionSS = groupId.xy * BLOCK_SIZE + groupThreadId.xy
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize, uint2 unTileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize, uint2 tileCoord) // Specify explicit tile coordinates so that we can easily make it lane invariant for compute evaluation.
posInput.positionSS = unPositionSS;
posInput.positionNDC = positionSS;
posInput.positionSS.xy += float2(0.5, 0.5);
posInput.positionNDC.xy += float2(0.5, 0.5);
posInput.positionSS *= invScreenSize;
posInput.positionNDC *= invScreenSize;
posInput.unPositionSS = uint2(unPositionSS);
posInput.unTileCoord = unTileCoord;
posInput.positionSS = uint2(positionSS);
posInput.tileCoord = tileCoord;
PositionInputs GetPositionInput(float2 unPositionSS, float2 invScreenSize)
PositionInputs GetPositionInput(float2 positionSS, float2 invScreenSize)
return GetPositionInput(unPositionSS, invScreenSize, uint2(0, 0));
return GetPositionInput(positionSS, invScreenSize, uint2(0, 0));
}
// From forward

void UpdatePositionInput(float deviceDepth, float4x4 invViewProjMatrix, float4x4 viewProjMatrix, inout PositionInputs posInput)
{
posInput.deviceDepth = deviceDepth;
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionSS, deviceDepth, invViewProjMatrix);
posInput.positionWS = ComputeWorldSpacePosition(posInput.positionNDC, deviceDepth, invViewProjMatrix);
// The compiler should optimize this (less expensive than reconstruct depth VS from depth buffer)
posInput.linearDepth = mul(viewProjMatrix, float4(posInput.positionWS, 1.0)).w;

// LOD dithering transition helper
// LOD0 must use this function with ditherFactor 1..0
// LOD1 must use this function with ditherFactor 0..1
void LODDitheringTransition(uint2 unPositionSS, float ditherFactor)
void LODDitheringTransition(uint2 positionSS, float ditherFactor)
float p = GenerateHashedRandomFloat(unPositionSS);
float p = GenerateHashedRandomFloat(positionSS);
// We want to have a symmetry between 0..0.5 ditherFactor and 0.5..1 so no pixels are transparent during the transition
// this is handled by this test which reverse the pattern

8
ScriptableRenderPipeline/Core/ShaderLibrary/Shadow/Shadow.hlsl


// shadow sampling prototypes
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS );
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS );
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS );
#include "ShadowSampling.hlsl" // sampling patterns (don't modify)
#include "ShadowAlgorithms.hlsl" // engine default algorithms (don't modify)

return EvalShadow_PunctualDepth(shadowContext, positionWS, normalWS, shadowDataIndex, L);
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_CascadedDepth_Blend( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

6
ScriptableRenderPipeline/Core/ShaderLibrary/Tessellation.hlsl


float3 GetScreenSpaceTessFactor(float3 p0, float3 p1, float3 p2, float4x4 viewProjectionMatrix, float4 screenSize, float triangleSize)
{
// Get screen space adaptive scale factor
float2 edgeScreenPosition0 = ComputeScreenSpacePosition(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeScreenSpacePosition(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeScreenSpacePosition(p2, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition0 = ComputeNormalizedDeviceCoordinates(p0, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition1 = ComputeNormalizedDeviceCoordinates(p1, viewProjectionMatrix) * screenSize.xy;
float2 edgeScreenPosition2 = ComputeNormalizedDeviceCoordinates(p2, viewProjectionMatrix) * screenSize.xy;
float EdgeScale = 1.0 / triangleSize; // Edge size in reality, but name is simpler
float3 tessFactor;

6
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewMaterialGBuffer.shader


{
// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, UINT_MAX, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, UINT_MAX, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
// Init to not expected value

8
ScriptableRenderPipeline/HDRenderPipeline/Debug/DebugViewTiles.shader


{
// positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
int2 pixelCoord = posInput.unPositionSS.xy;
int2 pixelCoord = posInput.positionSS.xy;
int2 tileCoord = (float2)pixelCoord / GetTileSize();
int2 mouseTileCoord = _MousePixelCoord / GetTileSize();
int2 offsetInTile = pixelCoord - tileCoord * GetTileSize();

// Tile overlap counter
if (n >= 0)
{
result = OverlayHeatMap(int2(posInput.unPositionSS.xy) & (GetTileSize() - 1), n);
result = OverlayHeatMap(int2(posInput.positionSS.xy) & (GetTileSize() - 1), n);
}
#ifdef SHOW_LIGHT_CATEGORIES

if (tileCoord.y < LIGHTCATEGORY_COUNT && tileCoord.x < maxLights + 3)
{
PositionInputs mousePosInput = GetPositionInput(_MousePixelCoord, _ScreenSize.zw, mouseTileCoord);
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.unPositionSS).x;
float depthMouse = LOAD_TEXTURE2D(_MainDepthTexture, mousePosInput.positionSS).x;
UpdatePositionInput(depthMouse, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, mousePosInput);
uint category = (LIGHTCATEGORY_COUNT - 1) - tileCoord.y;

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/Deferred.shader


// input.positionCS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, uint2(input.positionCS.xy) / GetTileSize());
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
DECODE_FROM_GBUFFER(posInput.unPositionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, MATERIAL_FEATURE_MASK_FLAGS, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

6
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Deferred.compute


PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw, tileCoord);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
// For indirect case: we can still overlap inside a tile with the sky/background, reject it
// Can't rely on stencil as we are in compute shader

BSDFData bsdfData;
BakeLightingData bakeLightingData;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, bakeLightingData.bakeDiffuseLighting);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.unPositionSS), bakeLightingData.bakeShadowMask);
DecodeShadowMask(LOAD_TEXTURE2D(_ShadowMaskTexture, posInput.positionSS), bakeLightingData.bakeShadowMask);
#endif
PreLightData preLightData = GetPreLightData(V, posInput, bsdfData);

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/DeferredDirectionalShadow.compute


PositionInputs posInput = GetPositionInput(pixelCoord.xy, _ScreenSize.zw, tileCoord);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
ShadowContext shadowContext = InitShadowContext();

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/Shadow.hlsl


return EvalShadow_CascadedDepth_Blend( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
}
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 unPositionSS )
float GetDirectionalShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float3 L, float2 positionSS )
{
return GetDirectionalShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

return EvalShadow_PunctualDepth( shadowContext, algo, tex, compSamp, positionWS, normalWS, shadowDataIndex, L );
#endif
}
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 unPositionSS )
float GetPunctualShadowAttenuation( ShadowContext shadowContext, float3 positionWS, float3 normalWS, int shadowDataIndex, float4 L, float2 positionSS )
{
return GetPunctualShadowAttenuation( shadowContext, positionWS, normalWS, shadowDataIndex, L );
}

4
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/TilePassLoop.hlsl


// Calculate the offset in global light index light for current light category
int GetTileOffset(PositionInputs posInput, uint lightCategory)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
return (tileIndex.y + lightCategory * _NumTileFtplY) * _NumTileFtplX + tileIndex.x;
}

void GetCountAndStartCluster(PositionInputs posInput, uint lightCategory, out uint start, out uint lightCount)
{
uint2 tileIndex = posInput.unTileCoord;
uint2 tileIndex = posInput.tileCoord;
float logBase = g_fClustBase;
if (g_isLogBaseBufferEnabled)

2
ScriptableRenderPipeline/HDRenderPipeline/Lighting/TilePass/materialflags.compute


if (UnpackByte(LOAD_TEXTURE2D(_StencilTexture, uCrd).r) != STENCILLIGHTINGUSAGE_NO_LIGHTING) // This test is we are the sky/background or not
{
PositionInputs posInput = GetPositionInput(uCrd, invScreenSize);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.unPositionSS);
materialFeatureFlags |= MATERIAL_FEATURE_FLAGS_FROM_GBUFFER(posInput.positionSS);
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/Material/LayeredLit/LayeredLitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

32
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Lit.hlsl


uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
// Get the depth of the approximated back plane
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionSS * (depthSize >> 2), 2).r;
float pyramidDepth = LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, posInputs.positionNDC * (depthSize >> 2), 2).r;
float depth = LinearEyeDepth(pyramidDepth, _ZBufferParams);
// Distance from point to the back plane

// Must be in sync with RT declared in HDRenderPipeline.cs ::Rebuild
void EncodeIntoGBuffer( SurfaceData surfaceData,
float3 bakeDiffuseLighting,
uint2 unPositionSS,
uint2 positionSS,
#if SHADEROPTIONS_PACK_GBUFFER_IN_U16
out GBufferType0 outGBufferU0,
out GBufferType1 outGBufferU1

}
void DecodeFromGBuffer(
uint2 unPositionSS,
uint2 positionSS,
GBufferType0 inGBufferU0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBufferU1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
GBufferType0 inGBufferU0 = LOAD_TEXTURE2D(_GBufferTexture0, positionSS);
GBufferType1 inGBufferU1 = LOAD_TEXTURE2D(_GBufferTexture1, positionSS);
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, unPositionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, unPositionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, unPositionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, unPositionSS);
GBufferType0 inGBuffer0 = LOAD_TEXTURE2D(_GBufferTexture0, positionSS);
GBufferType1 inGBuffer1 = LOAD_TEXTURE2D(_GBufferTexture1, positionSS);
GBufferType2 inGBuffer2 = LOAD_TEXTURE2D(_GBufferTexture2, positionSS);
GBufferType3 inGBuffer3 = LOAD_TEXTURE2D(_GBufferTexture3, positionSS);
#endif
ZERO_INITIALIZE(BSDFData, bsdfData);

// Function call from the material classification compute shader
// Note that as we store materialId on two buffer (for anisotropy case), the code need to load 2 RGBA8 buffer
uint MaterialFeatureFlagsFromGBuffer(uint2 unPositionSS)
uint MaterialFeatureFlagsFromGBuffer(uint2 positionSS)
unPositionSS,
positionSS,
UINT_MAX,
bsdfData,
unused

[branch] if (lightData.shadowIndex >= 0)
{
#ifdef _SURFACE_TYPE_TRANSPARENT
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, N, lightData.shadowIndex, L, posInput.unPositionSS);
shadow = GetDirectionalShadowAttenuation(lightLoopContext.shadowContext, positionWS, N, lightData.shadowIndex, L, posInput.positionSS);
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.unPositionSS).x;
shadow = LOAD_TEXTURE2D(_DeferredShadowTexture, posInput.positionSS).x;
#endif
#ifdef SHADOWS_SHADOWMASK

// TODO: make projector lights cast shadows.
float3 offset = float3(0.0, 0.0, 0.0); // GetShadowPosOffset(nDotL, normal);
float4 L_dist = float4(L, sqrt(distSq));
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, N, lightData.shadowIndex, L_dist, posInput.unPositionSS);
shadow = GetPunctualShadowAttenuation(lightLoopContext.shadowContext, positionWS + offset, N, lightData.shadowIndex, L_dist, posInput.positionSS);
#ifdef SHADOWS_SHADOWMASK
// Note: Legacy Unity have two shadow mask mode. ShadowMask (ShadowMask contain static objects shadow and ShadowMap contain only dynamic objects shadow, final result is the minimun of both value)
// and ShadowMask_Distance (ShadowMask contain static objects shadow and ShadowMap contain everything and is blend with ShadowMask based on distance (Global distance setup in QualitySettigns)).

// Calculate screen space coordinates of refracted point in back plane
float4 refractedBackPointCS = mul(UNITY_MATRIX_VP, float4(refractedBackPointWS, 1.0));
float2 refractedBackPointSS = ComputeScreenSpacePosition(refractedBackPointCS);
float2 refractedBackPointSS = ComputeNormalizedDeviceCoordinates(refractedBackPointCS);
uint2 depthSize = uint2(_PyramidDepthMipSize.xy);
float refractedBackPointDepth = LinearEyeDepth(LOAD_TEXTURE2D_LOD(_PyramidDepthTexture, refractedBackPointSS * depthSize, 0).r, _ZBufferParams);

// Ambient occlusion use for indirect lighting (reflection probe, baked diffuse lighting)
#ifndef _SURFACE_TYPE_TRANSPARENT
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.unPositionSS).x;
float indirectAmbientOcclusion = 1.0 - LOAD_TEXTURE2D(_AmbientOcclusionTexture, posInput.positionSS).x;
// Ambient occlusion use for direct lighting (directional, punctual, area)
float directAmbientOcclusion = lerp(1.0, indirectAmbientOcclusion, _AmbientOcclusionParam.w);
#else

2
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/LitData.hlsl


void GetSurfaceAndBuiltinData(FragInputs input, float3 V, inout PositionInputs posInput, out SurfaceData surfaceData, out BuiltinData builtinData)
{
#ifdef LOD_FADE_CROSSFADE // enable dithering LOD transition if user select CrossFade transition in LOD group
LODDitheringTransition(posInput.unPositionSS, unity_LODFade.x);
LODDitheringTransition(posInput.positionSS, unity_LODFade.x);
#endif
ApplyDoubleSidedFlipOrMirror(input); // Apply double sided flip on the vertex normal

4
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/CopyStencilBuffer.shader


[earlydepthstencil]
float4 Frag(Varyings input) : SV_Target // use SV_StencilRef in D3D 11.3+
{
uint2 positionSS = (uint2)input.positionCS.xy;
uint2 positionNDC = (uint2)input.positionCS.xy;
_HTile[positionSS / 8] = _StencilRef;
_HTile[positionNDC / 8] = _StencilRef;
#endif
return PackByte(_StencilRef);

6
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.compute


#if SSS_USE_TANGENT_PLANE
float3 relPosVS = vec.x * tangentX + vec.y * tangentY;
float3 positionVS = centerPosVS + relPosVS;
float2 positionSS = ComputeScreenSpacePosition(positionCS, projMatrix);
float2 positionNDC = ComputeNormalizedDeviceCoordinates(positionCS, projMatrix);
position = (int2)(positionSS * _ScreenSize.xy);
position = (int2)(positionNDC * _ScreenSize.xy);
xy2 = dot(relPosVS.xy, relPosVS.xy);
#else
position = (int2)(centerCoord + vec * pixelsPerMm);

float maxDistance = _ShapeParams[profileID].a;
// Reconstruct the view-space position corresponding to the central sample.
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);
float3 cornerPosVS = ComputeViewSpacePosition(cornerPosSS, centerDepth, UNITY_MATRIX_I_P);

10
ScriptableRenderPipeline/HDRenderPipeline/Material/Lit/Resources/SubsurfaceScattering.shader


BSDFData bsdfData;
float3 unused;
DECODE_FROM_GBUFFER(posInput.unPositionSS, featureFlags, bsdfData, unused);
DECODE_FROM_GBUFFER(posInput.positionSS, featureFlags, bsdfData, unused);
int profileID = bsdfData.subsurfaceProfile;
float distScale = bsdfData.subsurfaceRadius;

// TODO: copy its neighborhood into LDS.
float2 centerPosition = posInput.unPositionSS;
float2 centerPosition = posInput.positionSS;
float2 centerPosSS = posInput.positionSS;
float2 centerPosSS = posInput.positionNDC;
float2 cornerPosSS = centerPosSS + 0.5 * _ScreenSize.zw;
float centerDepth = LOAD_TEXTURE2D(_MainDepthTexture, centerPosition).r;
float3 centerPosVS = ComputeViewSpacePosition(centerPosSS, centerDepth, UNITY_MATRIX_I_P);

#endif
// Take the first (central) sample.
float2 samplePosition = posInput.unPositionSS;
float2 samplePosition = posInput.positionSS;
float3 sampleWeight = _FilterKernelsBasic[profileID][0].rgb;
float3 sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

[unroll]
for (int i = 1; i < SSS_BASIC_N_SAMPLES; i++)
{
samplePosition = posInput.unPositionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
samplePosition = posInput.positionSS + rotatedDirection * _FilterKernelsBasic[profileID][i].a;
sampleWeight = _FilterKernelsBasic[profileID][i].rgb;
sampleIrradiance = LOAD_TEXTURE2D(_IrradianceSource, samplePosition).rgb;

2
ScriptableRenderPipeline/HDRenderPipeline/RenderPipelineResources/CameraMotionVectors.shader


float4 Frag(Varyings input) : SV_Target
{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
float4 worldPos = float4(posInput.positionWS, 1.0);
float4 prevPos = worldPos;

2
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/FragInputs.hlsl


// Contain value return by SV_POSITION (That is name positionCS in PackedVarying).
// xy: unormalized screen position (offset by 0.5), z: device depth, w: depth in view space
// Note: SV_POSITION is the result of the clip space position provide to the vertex shaders that is transform by the viewport
float4 unPositionSS; // In case depth offset is use, positionWS.w is equal to depth offset
float4 positionSS; // In case depth offset is use, positionWS.w is equal to depth offset
float3 positionWS;
float2 texCoord0;
float2 texCoord1;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDepthOnly.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassDistortion.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
// Perform alpha testing + get distortion

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForward.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw, uint2(input.unPositionSS.xy) / GetTileSize());
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw, uint2(input.positionSS.xy) / GetTileSize());
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassForwardUnlit.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

8
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassGBuffer.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
SurfaceData surfaceData;

float3 bakeDiffuseLighting = GetBakedDiffuseLigthing(surfaceData, builtinData, bsdfData, preLightData);
ENCODE_INTO_GBUFFER(surfaceData, bakeDiffuseLighting, posInput.unPositionSS, outGBuffer);
ENCODE_INTO_GBUFFER(surfaceData, bakeDiffuseLighting, posInput.positionSS, outGBuffer);
ENCODE_SHADOWMASK_INTO_GBUFFER(float4(builtinData.shadowMask0, builtinData.shadowMask1, builtinData.shadowMask2, builtinData.shadowMask3), outShadowMaskBuffer);
ENCODE_VELOCITY_INTO_GBUFFER(builtinData.velocity, outVelocityBuffer);

6
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassLightTransport.hlsl


{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
// No position and depth in case of light transport
float3 V = float3(0.0, 0.0, 1.0); // No vector view in case of light transport

8
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/ShaderPassVelocity.hlsl


struct VaryingsPassToPS
{
// Note: Z component is not use currently
// This is the clip space position. Warning, do not confuse with the value of positionCS in PackedVarying which is SV_POSITION and store in unPositionSS
// This is the clip space position. Warning, do not confuse with the value of positionCS in PackedVarying which is SV_POSITION and store in positionSS
float4 positionCS;
float4 previousPositionCS;
};

{
FragInputs input = UnpackVaryingsMeshToFragInputs(packedInput.vmesh);
// input.unPositionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.unPositionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.unPositionSS.z, input.unPositionSS.w, input.positionWS, posInput);
// input.positionSS is SV_Position
PositionInputs posInput = GetPositionInput(input.positionSS.xy, _ScreenSize.zw);
UpdatePositionInput(input.positionSS.z, input.positionSS.w, input.positionWS, posInput);
float3 V = GetWorldSpaceNormalizeViewDir(input.positionWS);
// Perform alpha testing + get velocity

2
ScriptableRenderPipeline/HDRenderPipeline/ShaderPass/VaryingMesh.hlsl


{
FragInputs output = InitializeFragInputs();
output.unPositionSS = input.positionCS; // input.positionCS is SV_Position
output.positionSS = input.positionCS; // input.positionCS is SV_Position
#ifdef VARYINGS_NEED_POSITION_WS
output.positionWS.xyz = input.interpolators0.xyz;

2
ScriptableRenderPipeline/HDRenderPipeline/Sky/BlacksmithlSky/Resources/SkyBlacksmith.shader


#ifdef PERFORM_SKY_OCCLUSION_TEST
// Determine whether the sky is occluded by the scene geometry.
// Do not perform blending with the environment map if the sky is occluded.
float deviceDepth = max(_SkyDepth, LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).r);
float deviceDepth = max(_SkyDepth, LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).r);
float skyTexWeight = (deviceDepth > _SkyDepth) ? 0.0 : 1.0;
#else
float deviceDepth = _SkyDepth;

2
ScriptableRenderPipeline/HDRenderPipeline/Sky/OpaqueAtmosphericScattering.shader


float4 Frag(Varyings input) : SV_Target
{
PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw);
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.unPositionSS).x;
float depth = LOAD_TEXTURE2D(_MainDepthTexture, posInput.positionSS).x;
UpdatePositionInput(depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_VP, posInput);
return EvaluateAtmosphericScattering(posInput);

正在加载...
取消
保存