浏览代码

Merge remote-tracking branch 'refs/remotes/origin/master' into Improve-forward-opaque-pass

/main
sebastienlagarde 7 年前
当前提交
2bc6907f
共有 40 个文件被更改,包括 750 次插入377 次删除
  1. 148
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs
  2. 14
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  3. 20
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  4. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  6. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader
  7. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
  8. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/Deferred.compute
  9. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  10. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
  11. 31
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  12. 125
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  13. 18
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs
  14. 44
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  15. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute
  16. 35
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.hlsl
  17. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.shader
  18. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPassForward.hlsl
  19. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  20. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
  21. 5
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineAsset.cs
  22. 2
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineResources.asset
  23. 1
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineResources.cs
  24. 122
      ScriptableRenderPipeline/LightweightPipeline/LWRP/LightweightPipeline.cs
  25. 30
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Core.hlsl
  26. 9
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Lighting.hlsl
  27. 4
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassLit.hlsl
  28. 6
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassShadow.hlsl
  29. 151
      ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Shadows.hlsl
  30. 8
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandard.shader
  31. 8
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandardSimpleLighting.shader
  32. 8
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandardTerrain.shader
  33. 48
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl
  34. 9
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl.meta
  35. 59
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  36. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta
  37. 85
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightScreenSpaceShadows.shader
  38. 9
      ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightScreenSpaceShadows.shader.meta
  39. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta
  40. 57
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs

148
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs


namespace UnityEngine.Experimental.Rendering
{
public struct Frustum
{
public Plane[] planes; // Left, right, top, bottom, near, far
public Vector3[] corners; // Positions of the 8 corners
// The frustum will be camera-relative if given a camera-relative VP matrix.
public static Frustum Create(Matrix4x4 viewProjMatrix, bool depth_0_1, bool reverseZ)
{
Frustum frustum = new Frustum();
frustum.planes = new Plane[6];
frustum.corners = new Vector3[8];
GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustum.planes);
float nd = -1.0f;
if (depth_0_1)
{
nd = 0.0f;
// See "Fast Extraction of Viewing Frustum Planes" by Gribb and Hartmann.
Vector3 f = new Vector3(viewProjMatrix.m20, viewProjMatrix.m21, viewProjMatrix.m22);
float s = (float)(1.0 / Math.Sqrt(f.sqrMagnitude));
Plane np = new Plane(s * f, s * viewProjMatrix.m23);
frustum.planes[4] = np;
}
if (reverseZ)
{
Plane tmp = frustum.planes[4];
frustum.planes[4] = frustum.planes[5];
frustum.planes[5] = tmp;
}
Matrix4x4 invViewProjMatrix = viewProjMatrix.inverse;
// Unproject 8 frustum points.
frustum.corners[0] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, -1, 1));
frustum.corners[1] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, -1, 1));
frustum.corners[2] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, 1, 1));
frustum.corners[3] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, 1, 1));
frustum.corners[4] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, -1, nd));
frustum.corners[5] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, -1, nd));
frustum.corners[6] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, 1, nd));
frustum.corners[7] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, 1, nd));
return frustum;
}
} // struct Frustum
[GenerateHLSL]
public struct OrientedBBox
{
public Vector3 center;
public float extentX;
public Vector3 right;
public float extentY;
public Vector3 up;
public float extentZ;
public static OrientedBBox Create(Transform t)
{
OrientedBBox obb = new OrientedBBox();
obb.center = t.position;
obb.right = t.right;
obb.up = t.up;
obb.extentX = 0.5f * t.localScale.x;
obb.extentY = 0.5f * t.localScale.y;
obb.extentZ = 0.5f * t.localScale.z;
return obb;
}
} // struct OrientedBBox
// Returns 'true' if the OBB intersects (or is inside) the frustum, 'false' otherwise.
// 'cameraRelativeOffset' can be used to intersect a world-space OBB with a camera-relative frustum.
public static bool Overlap(OrientedBBox obb, Vector3 cameraRelativeOffset,
Frustum frustum, int numPlanes, int numCorners)
{
Vector3 center = obb.center + cameraRelativeOffset;
Vector3 forward = Vector3.Cross(obb.up, obb.right);
bool overlap = true;
// Test the OBB against frustum planes. Frustum planes have inward-facing.
// The OBB is outside if it's entirely behind one of the frustum planes.
// See "Real-Time Rendering", 3rd Edition, 16.10.2.
for (int i = 0; overlap && i < numPlanes; i++)
{
Vector3 n = frustum.planes[i].normal;
float d = frustum.planes[i].distance;
// Max projection of the half-diagonal onto the normal (always positive).
float maxHalfDiagProj = obb.extentX * Mathf.Abs(Vector3.Dot(n, obb.right))
+ obb.extentY * Mathf.Abs(Vector3.Dot(n, obb.up))
+ obb.extentZ * Mathf.Abs(Vector3.Dot(n, forward));
// Negative distance -> center behind the plane (outside).
float centerToPlaneDist = Vector3.Dot(n, center) + d;
// outside = maxHalfDiagProj < -centerToPlaneDist
// outside = maxHalfDiagProj + centerToPlaneDist < 0
// overlap = overlap && !outside
overlap = overlap && (maxHalfDiagProj + centerToPlaneDist >= 0);
}
if (numCorners == 0) return overlap;
// Test the frustum corners against OBB planes. The OBB planes are outward-facing.
// The frustum is outside if all of its corners are entirely in front of one of the OBB planes.
// See "Correct Frustum Culling" by Inigo Quilez.
// We can exploit the symmetry of the box by only testing against 3 planes rather than 6.
Plane[] planes = new Plane[3];
planes[0].normal = obb.right;
planes[0].distance = obb.extentX;
planes[1].normal = obb.up;
planes[1].distance = obb.extentY;
planes[2].normal = forward;
planes[2].distance = obb.extentZ;
for (int i = 0; overlap && i < 3; i++)
{
Plane plane = planes[i];
// We need a separate counter for the "box fully inside frustum" case.
bool outsidePos = true; // Positive normal
bool outsideNeg = true; // Reversed normal
// Merge 2 loops. Continue as long as all points are outside either plane.
for (int j = 0; j < numCorners; j++)
{
float proj = Vector3.Dot(plane.normal, frustum.corners[j] - center);
outsidePos = outsidePos && ( proj > plane.distance);
outsideNeg = outsideNeg && (-proj > plane.distance);
}
overlap = overlap && !(outsidePos || outsideNeg);
}
return overlap;
}
public static readonly Matrix4x4 FlipMatrixLHSRHS = Matrix4x4.Scale(new Vector3(1, 1, -1));
public static Vector4 Plane(Vector3 position, Vector3 normal)

else
return Matrix4x4.Perspective(camera.fieldOfView, camera.aspect, camera.nearClipPlane, camera.farClipPlane);
}
}
} // class GeometryUtils
}

14
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


#endif // #ifndef real
// Target in compute shader are supported in 2018.2, for now define ours
// (Note only 45 and above support compute shader)
#ifdef SHADER_STAGE_COMPUTE
# ifndef SHADER_TARGET
# if defined(SHADER_API_METAL) || defined(SHADER_API_VULKAN)
# define SHADER_TARGET 45
# else
# define SHADER_TARGET 50
# endif
# endif
#endif
// Include language header
#if defined(SHADER_API_D3D11)
#include "API/D3D11.hlsl"

#if (SHADER_TARGET >= 45)
uint FastLog2(uint x)
{
return firstbithigh(x) - 1u;
return firstbithigh(x);
}
#endif

20
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Matrix4x4 projMatrix;
public Matrix4x4 nonJitteredProjMatrix;
public Vector4 screenSize;
public Plane[] frustumPlanes;
public Frustum frustum;
public Vector4[] frustumPlaneEquations;
public Camera camera;
public uint taaFrameIndex;

public HDCamera(Camera cam)
{
camera = cam;
frustumPlanes = new Plane[6];
frustum = new Frustum();
frustumPlaneEquations = new Vector4[6];
viewMatrixStereo = new Matrix4x4[2];

prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
}
// Warning: near and far planes appear to be broken (or rather far plane seems broken)
GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustumPlanes);
frustum = Frustum.Create(viewProjMatrix, true, true);
for (int i = 0; i < 4; i++)
// Left, right, top, bottom, near, far.
for (int i = 0; i < 6; i++)
// Left, right, top, bottom.
frustumPlaneEquations[i] = new Vector4(frustumPlanes[i].normal.x, frustumPlanes[i].normal.y, frustumPlanes[i].normal.z, frustumPlanes[i].distance);
frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
// Near, far.
Vector4 forward = (camera.cameraType == CameraType.Reflection) ? camera.worldToCameraMatrix.GetRow(2) : new Vector4(camera.transform.forward.x, camera.transform.forward.y, camera.transform.forward.z, 0.0f);
// We need to switch forward direction based on handness (Reminder: Regular camera have a negative determinant in Unity and reflection probe follow DX convention and have a positive determinant)
forward = viewParam.x < 0.0f ? forward : -forward;
frustumPlaneEquations[4] = new Vector4( forward.x, forward.y, forward.z, -Vector3.Dot(forward, relPos) - camera.nearClipPlane);
frustumPlaneEquations[5] = new Vector4(-forward.x, -forward.y, -forward.z, Vector3.Dot(forward, relPos) + camera.farClipPlane);
m_LastFrameActive = Time.frameCount;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


}
}
// The pass only requires the volume properties, and can run async.
m_VolumetricLightingModule.VoxelizeDensityVolumes(hdCamera, cmd);
// Render the volumetric lighting.
// The pass requires the volume properties, the light list and the shadows, and can run async.
m_VolumetricLightingModule.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalFog_Extinction = Shader.PropertyToID("_GlobalFog_Extinction");
public static readonly int _GlobalFog_Scattering = Shader.PropertyToID("_GlobalFog_Scattering");
public static readonly int _GlobalFog_Asymmetry = Shader.PropertyToID("_GlobalFog_Asymmetry");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _VBufferScaleAndSliceCount = Shader.PropertyToID("_VBufferScaleAndSliceCount");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader


Outputs outputs;
#ifdef OUTPUT_SPLIT_LIGHTING
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
outputs.specularLighting = float4(specularLighting, 1.0);
outputs.diffuseLighting = TagLightingForSSS(diffuseLighting);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl


#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float distVol = (lightData.lightType == GPULIGHTTYPE_PROJECTOR_BOX) ? distances.w : distances.x;
attenuation *= TransmittanceHomogeneousMedium(_GlobalFog_Extinction, distVol);
attenuation *= TransmittanceHomogeneousMedium(_GlobalExtinction, distVol);
#endif
// Projector lights always have cookies, so we can perform clipping inside the if().

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/Deferred.compute


float3 specularLighting;
LightLoop(V, posInput, preLightData, bsdfData, bakeLightingData, featureFlags, diffuseLighting, specularLighting);
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
specularLightingUAV[pixelCoord] = float4(specularLighting, 1.0);
diffuseLightingUAV[pixelCoord] = TagLightingForSSS(diffuseLighting);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


public const int k_MaxCascadeCount = 4; //Should be not less than m_Settings.directionalLightCascadeCount;
static readonly Vector3 k_BoxCullingExtentThreshold = Vector3.one * 0.01f;
// Static keyword is required here else we get a "DestroyBuffer can only be call in main thread"
// Static keyword is required here else we get a "DestroyBuffer can only be called from the main thread"
static ComputeBuffer s_DirectionalLightDatas = null;
static ComputeBuffer s_LightDatas = null;
static ComputeBuffer s_EnvLightDatas = null;

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute


CBUFFER_START(UnityVolumetricLighting)
float4 _VBufferSampleOffset; // {x, y, z}, w = rendered frame count
float4x4 _VBufferCoordToViewDirWS; // Actually just 3x3, but Unity can only set 4x4
float _CornetteShanksConstant; // CornetteShanksPhasePartConstant(_GlobalFog_Asymmetry)
float _CornetteShanksConstant; // CornetteShanksPhasePartConstant(_GlobalAsymmetry)
CBUFFER_END
//--------------------------------------------------------------------------------------------------

// Sample the participating medium at 'tc' (or 'centerWS').
// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = _GlobalFog_Scattering;
float extinction = max(_GlobalFog_Extinction, FLT_MIN); // Avoid NaNs
float asymmetry = _GlobalFog_Asymmetry;
float3 scattering = _GlobalScattering;
float extinction = max(_GlobalExtinction, FLT_MIN); // Avoid NaNs
float asymmetry = _GlobalAsymmetry;
// TODO: define a function ComputeGlobalFogCoefficients(float3 centerWS),
// which allows procedural definition of extinction and scattering.

float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w;
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler),
false, reprojPosNDC, reprojZ,
_VBufferScaleAndSliceCount,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);

31
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl


// Therefore, given 'logEncodedDepth', we compute a new depth value
// which allows us to perform HW interpolation which is linear in the view space.
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float s0 = floor(d * numSlices - 0.5);
float s1 = ceil(d * numSlices - 0.5);

// If (clampToEdge == false), out-of-bounds loads return 0.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge,
float2 positionNDC, float linearDepth,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
// Account for the visible area of the V-Buffer.
float2 uv = positionNDC * VBufferScaleAndSliceCount.xy;
float2 uv = positionNDC;
// The distance between slices is log-encoded.
float z = linearDepth;

float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthDecodingParams);
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0);

float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler),
float2 positionNDC, float linearDepth,
float4 VBufferResolution,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams)
{

VBufferScaleAndSliceCount,
VBufferSliceCount,
#else
// Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
// Account for the visible area of the V-Buffer.
float2 xy = positionNDC * (VBufferResolution.xy * VBufferScaleAndSliceCount.xy);
#else // Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
float2 uv = positionNDC;
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);

float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthDecodingParams);
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
float2 weights[2], offsets[2];

125
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


[Serializable]
public class VolumeParameters
{
public Bounds bounds; // Position and dimensions in meters
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Single global parameter for all volumes. TODO: UX

bounds = new Bounds(Vector3.zero, Vector3.positiveInfinity);
isLocal = true;
public bool IsVolumeUnbounded()
public bool IsLocalVolume()
return bounds.size.x == float.PositiveInfinity &&
bounds.size.y == float.PositiveInfinity &&
bounds.size.z == float.PositiveInfinity;
return isLocal;
}
public Vector3 GetAbsorptionCoefficient()

public void Constrain()
{
bounds.size = Vector3.Max(bounds.size, Vector3.zero);
albedo.r = Mathf.Clamp01(albedo.r);
albedo.g = Mathf.Clamp01(albedo.g);
albedo.b = Mathf.Clamp01(albedo.b);

public long viewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
public RenderTexture[] lightingRTEX = null;
public RenderTargetIdentifier[] lightingRTID = null;
public RenderTexture densityRTEX = null;
public RenderTargetIdentifier densityRTID = -1; // RenderTargetIdentifier cannot be NULL
public RenderTargetIdentifier GetLightingIntegralBuffer() // Of the current frame
{

return lightingRTID[1 + ((Time.renderedFrameCount + 1) & 1)];
}
public RenderTargetIdentifier GetDensityBuffer()
{
Debug.Assert(viewID >= 0);
return densityRTID;
}
public void Create(long viewID, int w, int h, int d)
{
Debug.Assert(viewID >= 0);

ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumes = null;
List<VolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
// Static keyword is required here else we get a "DestroyBuffer can only be called from the main thread"
static ComputeBuffer s_VisibleVolumesBuffer = null;
static ComputeBuffer s_VisibleVolumePropertiesBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>(0);
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumes = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<VolumeProperties>();
s_VisibleVolumesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(VolumeProperties)));
}
public void Cleanup()

m_VBuffers[i].Destroy();
}
m_VBuffers = null;
m_VBuffers = null;
m_VisibleVolumes = null;
m_VisibleVolumeProperties = null;
CoreUtils.SafeRelease(s_VisibleVolumesBuffer);
CoreUtils.SafeRelease(s_VisibleVolumePropertiesBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)

// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
// the VBuffer can potentially extend past the boundaries of the viewport.
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
// Note: for performance reasons, scale is unused (implicitly 1). The error is typically under 1%.
static Vector2 ComputeVBufferResolutionAndScale(VolumetricLightingPreset preset,
int screenWidth, int screenHeight,
ref int w, ref int h, ref int d)

{
if (preset == VolumetricLightingPreset.Off) return;
HomogeneousFog globalFogComponent = HomogeneousFog.GetGlobalFogComponent();
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
VolumeProperties globalFogProperties = (globalFogComponent != null) ? globalFogComponent.volumeParameters.GetProperties()
VolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.volumeParameters.GetProperties()
float asymmetry = globalFogComponent != null ? globalFogComponent.volumeParameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalFog_Scattering, globalFogProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Extinction, globalFogProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Asymmetry, asymmetry);
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, globalVolumeProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, globalVolumeProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);

cmd.SetGlobalVector( HDShaderIDs._VBufferScaleAndSliceCount, new Vector4(scale.x, scale.y, d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
public void VoxelizeDensityVolumes(HDCamera camera, CommandBuffer cmd)
{
if (preset == VolumetricLightingPreset.Off) return;
Vector3 camPosition = camera.camera.transform.position;
Vector3 camOffset = Vector3.zero; // World-origin-relative
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
camOffset = -camPosition; // Camera-relative
}
m_VisibleVolumes.Clear();
m_VisibleVolumeProperties.Clear();
// Collect all the visible volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
// Only test active finite volumes.
if (volume.enabled && volume.volumeParameters.IsLocalVolume())
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camOffset, camera.frustum, 6, 8))
{
// TODO: cache these?
var properties = volume.volumeParameters.GetProperties();
m_VisibleVolumes.Add(obb);
m_VisibleVolumeProperties.Add(properties);
}
}
}
s_VisibleVolumesBuffer.SetData(m_VisibleVolumes);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
}
// Ref: https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres
// The returned {x, y} coordinates (and all spheres) are all within the (-0.5, 0.5)^2 range.
// The pattern has been rotated by 15 degrees to maximize the resolution along X and Y:

VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
HomogeneousFog globalFogComponent = HomogeneousFog.GetGlobalFogComponent();
float asymmetry = globalFogComponent != null ? globalFogComponent.volumeParameters.asymmetry : 0;
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
if (globalFogComponent == null)
if (globalVolume == null)
{
// Clear the render target instead of running the shader.
// CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);

}
int w = 0, h = 0, d = 0;
Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
// Compute it using the scaled resolution to account for the visible area of the VBuffer.
Vector4 scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y));
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
camera.SetupComputeShader(m_VolumetricLightingCS, cmd);

18
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs


[GenerateHLSL]
public class DiffusionProfileConstants
{
public const int DIFFUSION_PROFILE_COUNT = 16; // Max. number of profiles, including the slot taken by the neutral profile
public const int DIFFUSION_PROFILE_NEUTRAL_ID = 0; // Does not result in blurring
public const int SSS_N_SAMPLES_NEAR_FIELD = 55; // Used for extreme close ups; must be a Fibonacci number
public const int SSS_N_SAMPLES_FAR_FIELD = 21; // Used at a regular distance; must be a Fibonacci number
public const int SSS_LOD_THRESHOLD = 4; // The LoD threshold of the near-field kernel (in pixels)
public const int DIFFUSION_PROFILE_COUNT = 16; // Max. number of profiles, including the slot taken by the neutral profile
public const int DIFFUSION_PROFILE_NEUTRAL_ID = 0; // Does not result in blurring
public const int SSS_N_SAMPLES_NEAR_FIELD = 55; // Used for extreme close ups; must be a Fibonacci number
public const int SSS_N_SAMPLES_FAR_FIELD = 21; // Used at a regular distance; must be a Fibonacci number
public const int SSS_LOD_THRESHOLD = 4; // The LoD threshold of the near-field kernel (in pixels)
public const int SSS_BASIC_N_SAMPLES = 11; // Must be an odd number
public const int SSS_BASIC_DISTANCE_SCALE = 3; // SSS distance units per centimeter
public const int SSS_BASIC_N_SAMPLES = 11; // Must be an odd number
public const int SSS_BASIC_DISTANCE_SCALE = 3; // SSS distance units per centimeter
// <<< Old SSS Model
}

{
public DiffusionProfile[] profiles;
[NonSerialized] public uint texturingModeFlags; // 1 bit/profile; 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 2 bit/profile; 0 = inf. thick, 1 = thin, 2 = regular
[NonSerialized] public uint texturingModeFlags; // 1 bit/profile: 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 1 bit/profile: 0 = regular, 1 = thin
[NonSerialized] public Vector4[] thicknessRemaps; // Remap: 0 = start, 1 = end - start
[NonSerialized] public Vector4[] worldScales; // X = meters per world unit; Y = world units per meter
[NonSerialized] public Vector4[] shapeParams; // RGB = S = 1 / D, A = filter radius

44
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


/* 26 */ LIGHT_FEATURE_MASK_FLAGS_OPAQUE | MATERIAL_FEATURE_MASK_FLAGS, // Catch all case with MATERIAL_FEATURE_MASK_FLAGS is needed in case we disable material classification
};
// Additional bits set in 'bsdfData.materialFeatures' to save registers and simplify feature tracking.
#define MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING ((MATERIAL_FEATURE_MASK_FLAGS + 1) << 0)
#define MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET FastLog2((MATERIAL_FEATURE_MASK_FLAGS + 1) << 1) // 2 bits
#define MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN ((MATERIAL_FEATURE_MASK_FLAGS + 1) << 3)
uint FeatureFlagsToTileVariant(uint featureFlags)
{
for (int i = 0; i < NUM_FEATURE_VARIANTS; i++)

bsdfData.diffusionProfile = diffusionProfile;
bsdfData.fresnel0 = _TransmissionTintsAndFresnel0[diffusionProfile].a;
bsdfData.subsurfaceMask = subsurfaceMask;
// Note: ApplySubsurfaceScatteringTexturingMode also test the diffusionProfile for updating diffuseColor based on SSS
bsdfData.materialFeatures |= MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING;
bsdfData.materialFeatures |= GetSubsurfaceScatteringTexturingMode(bsdfData.diffusionProfile) << MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET;
}
// Assume that bsdfData.diffusionProfile is init

bsdfData.thickness);
#endif
bsdfData.useThickObjectMode = !IsBitSet(asuint(_TransmissionFlags), diffusionProfile);
bool useThinObjectMode = IsBitSet(asuint(_TransmissionFlags), diffusionProfile);
bsdfData.materialFeatures |= useThinObjectMode ? MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN : 0;
if (bsdfData.useThickObjectMode)
if (useThinObjectMode)
{
// Apply no displacement.
bsdfData.thickness = 0;
}
else
{
// Compute the thickness in world units along the normal.
float thicknessInMeters = bsdfData.thickness * METERS_PER_MILLIMETER;

}
else
{
// Apply no displacement.
bsdfData.thickness = 0;
}
}

{
if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING))
{
bsdfData.diffuseColor = ApplySubsurfaceScatteringTexturingMode(bsdfData.diffuseColor, bsdfData.diffusionProfile);
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;
bsdfData.diffuseColor = ApplySubsurfaceScatteringTexturingMode(texturingMode, bsdfData.diffuseColor);
}
#ifdef DEBUG_DISPLAY

// Subsurface Scattering functions
//-----------------------------------------------------------------------------
bool PixelHasSubsurfaceScattering(BSDFData bsdfData)
bool ShouldOutputSplitLighting(BSDFData bsdfData)
// bsdfData.subsurfaceMask != 0 allow if sss is enabled for this pixels (i.e like per pixels feature) as in deferred case MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING alone is not sufficient
// but keep testing MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING for forward case
return bsdfData.diffusionProfile != DIFFUSION_PROFILE_NEUTRAL_ID && bsdfData.subsurfaceMask != 0 && HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING);
return HasFeatureFlag(bsdfData.materialFeatures, MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING);
}
//-----------------------------------------------------------------------------

float negatedNdotL = -NdotL;
// Apply wrapped lighting to better handle thin objects (cards) at grazing angles.
float backNdotL = bsdfData.useThickObjectMode ? negatedNdotL : wrappedNdotL;
bool useThinObjectMode = HasFeatureFlag(bsdfData.materialFeatures, MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN);
float backNdotL = useThinObjectMode ? wrappedNdotL : negatedNdotL;
// Apply BSDF-specific diffuse transmission to attenuation. See also: [SSS-NOTE-TRSM]
// We don't multiply by 'bsdfData.diffuseColor' here. It's done only once in PostEvaluateBSDF().

lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
#endif
float3 modifiedDiffuseColor;
// bsdfData.subsurfaceMask != 0 allow if sss is enabled for this pixels (i.e like per pixels feature) as in deferred case MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING alone is not sufficient
// but keep testing MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING for forward case
if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING) && bsdfData.subsurfaceMask != 0)
modifiedDiffuseColor = ApplySubsurfaceScatteringTexturingMode(bsdfData.diffuseColor, bsdfData.diffusionProfile);
else
modifiedDiffuseColor = bsdfData.diffuseColor;
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;
float3 modifiedDiffuseColor = ApplySubsurfaceScatteringTexturingMode(texturingMode, bsdfData.diffuseColor);
// Apply the albedo to the direct diffuse lighting (only once). The indirect (baked)
// diffuse lighting has already had the albedo applied in GetBakedDiffuseLigthing().

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute


// divergence of execution across the warp.
float maxDistInPixels = maxDistance * max(pixelsPerMm.x, pixelsPerMm.y);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(sssData.diffuseColor, profileID);
uint texturingMode = GetSubsurfaceScatteringTexturingMode(profileID);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(texturingMode, sssData.diffuseColor);
[branch] if (distScale == 0 || maxDistInPixels < 1)
{

35
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.hlsl


// helper functions
// ----------------------------------------------------------------------------
// Returns the modified albedo (diffuse color) for materials with subsurface scattering.
// Ref: Advanced Techniques for Realistic Real-Time Skin Rendering.
float3 ApplySubsurfaceScatteringTexturingMode(float3 color, int diffusionProfile)
// 0: [ albedo = albedo ]
// 1: [ albedo = 1 ]
// 2: [ albedo = sqrt(albedo) ]
uint GetSubsurfaceScatteringTexturingMode(int diffusionProfile)
uint texturingMode = 0;
#if defined(SHADERPASS) && (SHADERPASS == SHADERPASS_SUBSURFACE_SCATTERING)
// If the SSS pass is executed, we know we have SSS enabled.
bool enableSss = true;

// We can enter in this function even if SSS is not enabled in case of material classification per tile
// (If there is SSS inside the tile we need to enable the feature for the whole tile with neutral value)
// thus why we test != DIFFUSION_PROFILE_NEUTRAL_ID here, to be sure neutral profile don't affect the scene
if (enableSss && diffusionProfile != DIFFUSION_PROFILE_NEUTRAL_ID)
if (enableSss)
{
bool performPostScatterTexturing = IsBitSet(asuint(_TexturingModeFlags), diffusionProfile);

#if !defined(SHADERPASS) || (SHADERPASS != SHADERPASS_SUBSURFACE_SCATTERING)
color = float3(1, 1, 1);
#if defined(SHADERPASS) && (SHADERPASS != SHADERPASS_SUBSURFACE_SCATTERING)
texturingMode = 1;
color = sqrt(color);
texturingMode = 2;
}
return texturingMode;
}
// Returns the modified albedo (diffuse color) for materials with subsurface scattering.
// See GetSubsurfaceScatteringTexturingMode() above for more details.
// Ref: Advanced Techniques for Realistic Real-Time Skin Rendering.
float3 ApplySubsurfaceScatteringTexturingMode(uint texturingMode, float3 color)
{
switch (texturingMode)
{
case 2: color = sqrt(color); break;
case 1: color = 1; break;
default: color = color; break;
}
return color;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.shader


float halfRcpVariance = _HalfRcpWeightedVariances[profileID].a;
#endif
float3 albedo = ApplySubsurfaceScatteringTexturingMode(sssData.diffuseColor, profileID);
uint texturingMode = GetSubsurfaceScatteringTexturingMode(profileID);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(texturingMode, sssData.diffuseColor);
#ifndef SSS_FILTER_HORIZONTAL_AND_COMBINE
albedo = float3(1, 1, 1);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPassForward.hlsl


LightLoop(V, posInput, preLightData, bsdfData, bakeLightingData, featureFlags, diffuseLighting, specularLighting);
#ifdef OUTPUT_SPLIT_LIGHTING
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
outColor = float4(specularLighting, 1.0);
outDiffuseLighting = float4(TagLightingForSSS(diffuseLighting), 1.0);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


uint _TaaFrameIndex; // [0, 7]
// Volumetric lighting.
float4 _AmbientProbeCoeffs[7]; // 3 bands of SH, packed, rescaled and convolved with the phase function
float _GlobalFog_Asymmetry;
float3 _GlobalFog_Scattering;
float _GlobalFog_Extinction;
float _GlobalAsymmetry;
float3 _GlobalScattering;
float _GlobalExtinction;
float4 _VBufferScaleAndSliceCount; // { fracVisW, fracVisH, count, 1/count }
float4 _VBufferSliceCount; // { count, 1/count, 0, 0 }
float4 _VBufferDepthEncodingParams; // See the call site for description
float4 _VBufferDepthDecodingParams; // See the call site for description
CBUFFER_END

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl


float4 volFog = SampleInScatteredRadianceAndTransmittance(TEXTURE3D_PARAM(_VBufferLighting, s_trilinear_clamp_sampler),
posInput.positionNDC, posInput.linearDepth,
_VBufferResolution,
_VBufferScaleAndSliceCount,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
fogColor = volFog.rgb;

5
ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineAsset.cs


#if UNITY_EDITOR
[NonSerialized]
private LightweightPipelineEditorResources m_EditorResourcesAsset;
[MenuItem("Assets/Create/Rendering/Lightweight Pipeline Asset", priority = CoreUtils.assetCreateMenuPriority1)]

public Shader CopyDepthShader
{
get { return resources != null ? resources.CopyDepthShader : null; }
}
public Shader ScreenSpaceShadowShader
{
get { return resources != null ? resources.ScreenSpaceShadowShader : null; }
}
}
}

2
ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineResources.asset


m_EditorClassIdentifier:
BlitShader: {fileID: 4800000, guid: c17132b1f77d20942aa75f8429c0f8bc, type: 3}
CopyDepthShader: {fileID: 4800000, guid: d6dae50ee9e1bfa4db75f19f99355220, type: 3}
ScreenSpaceShadowShader: {fileID: 4800000, guid: 0f854b35a0cf61a429bd5dcfea30eddd,
type: 3}

1
ScriptableRenderPipeline/LightweightPipeline/LWRP/Data/LightweightPipelineResources.cs


{
public Shader BlitShader;
public Shader CopyDepthShader;
public Shader ScreenSpaceShadowShader;
}

122
ScriptableRenderPipeline/LightweightPipeline/LWRP/LightweightPipeline.cs


// we need use copyColor RT as a work RT.
public static int copyColor;
// Camera depth target. Only used when post processing or soft particles are enabled.
// Camera depth target. Only used when post processing, soft particles, or screen space shadows are enabled.
public static int depth;
// If soft particles are enabled and no depth prepass is performed we need to copy depth.

private static readonly int kMaxVertexLights = 4;
// We have no good approach exposed to skip shader variants, e.g, ideally we would like to skip _CASCADE for all punctual lights
// We combine light and shadow classification keywords to reduce the amount of shader variants.
// Lightweight shader library declares defines based on these keywords to avoid having to check them in the shaders
// Core.hlsl defines _MAIN_LIGHT_DIRECTIONAL and _MAIN_LIGHT_SPOT (point lights can't be main light)
// Shadow.hlsl defines _SHADOWS_ENABLED, _SHADOWS_SOFT, _SHADOWS_CASCADE, _SHADOWS_PERSPECTIVE
private static readonly string[] kMainLightKeywords =
{
"_MAIN_LIGHT_DIRECTIONAL_SHADOW",
"_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE",
"_MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT",
"_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT",
"_MAIN_LIGHT_SPOT_SHADOW",
"_MAIN_LIGHT_SPOT_SHADOW_SOFT"
};
private StringBuilder m_MainLightKeywordString = new StringBuilder(43);
private bool m_IsOffscreenCamera;
private Vector4 kDefaultLightPosition = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);

private const int kMaxCascades = 4;
private int m_ShadowCasterCascadesCount;
private int m_ShadowMapRTID;
private int m_ScreenSpaceShadowMapRTID;
private RenderTargetIdentifier m_ScreenSpaceShadowMapRT;
private RenderTargetIdentifier m_ColorRT;
private RenderTargetIdentifier m_CopyColorRT;
private RenderTargetIdentifier m_DepthRT;

private Material m_BlitMaterial;
private Material m_CopyDepthMaterial;
private Material m_ErrorMaterial;
private Material m_ScreenSpaceShadowsMaterial;
private int m_BlitTexID = Shader.PropertyToID("_BlitTex");
private CopyTextureSupport m_CopyTextureSupport;

ShadowConstantBuffer._ShadowmapSize = Shader.PropertyToID("_ShadowmapSize");
m_ShadowMapRTID = Shader.PropertyToID("_ShadowMap");
m_ScreenSpaceShadowMapRTID = Shader.PropertyToID("_ScreenSpaceShadowMap");
CameraRenderTargetID.color = Shader.PropertyToID("_CameraColorRT");
CameraRenderTargetID.copyColor = Shader.PropertyToID("_CameraCopyColorRT");

m_ShadowMapRT = new RenderTargetIdentifier(m_ShadowMapRTID);
m_ScreenSpaceShadowMapRT = new RenderTargetIdentifier(m_ScreenSpaceShadowMapRTID);
m_ColorRT = new RenderTargetIdentifier(CameraRenderTargetID.color);
m_CopyColorRT = new RenderTargetIdentifier(CameraRenderTargetID.copyColor);

m_BlitQuad = LightweightUtils.CreateQuadMesh(false);
m_BlitMaterial = CoreUtils.CreateEngineMaterial(m_Asset.BlitShader);
m_CopyDepthMaterial = CoreUtils.CreateEngineMaterial(m_Asset.CopyDepthShader);
m_ScreenSpaceShadowsMaterial = CoreUtils.CreateEngineMaterial(m_Asset.ScreenSpaceShadowShader);
m_ErrorMaterial = CoreUtils.CreateEngineMaterial("Hidden/InternalErrorShader");
}

LightData lightData;
InitializeLightData(visibleLights, out lightData);
ShadowPass(visibleLights, ref context, ref lightData);
bool shadows = ShadowPass(visibleLights, ref context, ref lightData);
SetupFrameRenderingConfiguration(out frameRenderingConfiguration, stereoEnabled);
SetupFrameRenderingConfiguration(out frameRenderingConfiguration, shadows, stereoEnabled);
SetupIntermediateResources(frameRenderingConfiguration, ref context);
// SetupCameraProperties does the following:

context.SetupCameraProperties(m_CurrCamera, stereoEnabled);
if (LightweightUtils.HasFlag(frameRenderingConfiguration, FrameRenderingConfiguration.DepthPrePass))
{
// Only screen space shadowmap mode is supported.
if (shadows)
ShadowCollectPass(visibleLights, ref context, ref lightData);
}
// Release temporary RT
cmd.ReleaseTemporaryRT(m_ScreenSpaceShadowMapRTID);
cmd.ReleaseTemporaryRT(CameraRenderTargetID.depthCopy);
cmd.ReleaseTemporaryRT(CameraRenderTargetID.depth);
cmd.ReleaseTemporaryRT(CameraRenderTargetID.color);

}
}
private void ShadowPass(List<VisibleLight> visibleLights, ref ScriptableRenderContext context, ref LightData lightData)
private bool ShadowPass(List<VisibleLight> visibleLights, ref ScriptableRenderContext context, ref LightData lightData)
{
if (m_Asset.AreShadowsEnabled() && lightData.mainLightIndex != -1)
{

if (!LightweightUtils.IsSupportedShadowType(mainLight.lightType))
{
Debug.LogWarning("Only directional and spot shadows are supported by LightweightPipeline.");
return;
return false;
}
// There's no way to map shadow light indices. We need to pass in the original unsorted index.

{
lightData.shadowMapSampleType = LightShadows.None;
}
return shadowsRendered;
return false;
}
private void ShadowCollectPass(List<VisibleLight> visibleLights, ref ScriptableRenderContext context, ref LightData lightData)
{
CommandBuffer cmd = CommandBufferPool.Get("Collect Shadows");
SetupShadowReceiverConstants(cmd, visibleLights[lightData.mainLightIndex]);
SetShadowCollectPassKeywords(cmd, visibleLights[lightData.mainLightIndex], ref lightData);
cmd.GetTemporaryRT(m_ScreenSpaceShadowMapRTID, m_CurrCamera.pixelWidth, m_CurrCamera.pixelHeight, 0, FilterMode.Bilinear, RenderTextureFormat.R8);
cmd.Blit(null, m_ScreenSpaceShadowMapRT, m_ScreenSpaceShadowsMaterial);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
private void DepthPass(ref ScriptableRenderContext context)

}
}
private void SetupFrameRenderingConfiguration(out FrameRenderingConfiguration configuration, bool stereoEnabled)
private void SetupFrameRenderingConfiguration(out FrameRenderingConfiguration configuration, bool shadows, bool stereoEnabled)
{
configuration = (stereoEnabled) ? FrameRenderingConfiguration.Stereo : FrameRenderingConfiguration.None;
if (stereoEnabled && XRSettings.eyeTextureDesc.dimension == TextureDimension.Tex2DArray)

}
}
if (shadows)
{
m_RequireDepthTexture = true;
if (!msaaEnabled)
intermediateTexture = true;
}
if (msaaEnabled)
{
configuration |= FrameRenderingConfiguration.Msaa;

{
// If msaa is enabled we don't use a depth renderbuffer as we might not have support to Texture2DMS to resolve depth.
// Instead we use a depth prepass and whenever depth is needed we use the 1 sample depth from prepass.
if (!msaaEnabled)
// Screen space shadows require depth before opaque shading.
if (!msaaEnabled && !shadows)
{
bool supportsDepthCopy = m_CopyTextureSupport != CopyTextureSupport.None && m_Asset.CopyDepthShader.isSupported;
m_DepthRenderBuffer = true;

// Main light has an optimized shader path for main light. This will benefit games that only care about a single light.
// Lightweight pipeline also supports only a single shadow light, if available it will be the main light.
SetupMainLightConstants(cmd, lights, lightData.mainLightIndex);
if (lightData.shadowMapSampleType != LightShadows.None)
SetupShadowReceiverConstants(cmd, lights[lightData.mainLightIndex]);
SetupAdditionalListConstants(cmd, lights, ref lightData);
}

cmd.SetGlobalVectorArray(ShadowConstantBuffer._DirShadowSplitSpheres, m_DirectionalShadowSplitDistances);
cmd.SetGlobalVector(ShadowConstantBuffer._DirShadowSplitSphereRadii, m_DirectionalShadowSplitRadii);
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset0, new Vector4(-invHalfShadowResolution, -invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset1, new Vector4( invHalfShadowResolution, -invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset2, new Vector4(-invHalfShadowResolution, invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset3, new Vector4( invHalfShadowResolution, invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset1, new Vector4(invHalfShadowResolution, -invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset2, new Vector4(-invHalfShadowResolution, invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowOffset3, new Vector4(invHalfShadowResolution, invHalfShadowResolution, 0.0f, 0.0f));
cmd.SetGlobalVector(ShadowConstantBuffer._ShadowmapSize, new Vector4(invShadowResolution, invShadowResolution, m_Asset.ShadowAtlasResolution, m_Asset.ShadowAtlasResolution));
}

int mainLightIndex = lightData.mainLightIndex;
for (int i = 0; i < kMainLightKeywords.Length; ++i)
cmd.DisableShaderKeyword(kMainLightKeywords[i]);
if (mainLightIndex != -1 && (lightData.shadowMapSampleType != LightShadows.None))
{
m_MainLightKeywordString.Length = 0;
m_MainLightKeywordString.Append("_MAIN_LIGHT");
LightType mainLightType = visibleLights[mainLightIndex].lightType;
if (mainLightType == LightType.Directional)
{
m_MainLightKeywordString.Append("_DIRECTIONAL_SHADOW");
if (m_Asset.CascadeCount > 1)
m_MainLightKeywordString.Append("_CASCADE");
}
else
{
m_MainLightKeywordString.Append("_SPOT_SHADOW");
}
if (lightData.shadowMapSampleType == LightShadows.Soft)
m_MainLightKeywordString.Append("_SOFT");
string keyword = m_MainLightKeywordString.ToString();
cmd.EnableShaderKeyword(keyword);
}
int mainLightIndex = lightData.mainLightIndex;
CoreUtils.SetKeyword(cmd, "_MAIN_LIGHT_DIRECTIONAL", mainLightIndex == -1 || visibleLights[mainLightIndex].lightType == LightType.Directional);
CoreUtils.SetKeyword(cmd, "_MAIN_LIGHT_SPOT", mainLightIndex != -1 && visibleLights[mainLightIndex].lightType == LightType.Spot);
CoreUtils.SetKeyword(cmd, "_SHADOWS_ENABLED", lightData.shadowMapSampleType != LightShadows.None);
bool linearFogModeEnabled = false;
bool exponentialFogModeEnabled = false;
if (RenderSettings.fog)

CoreUtils.SetKeyword(cmd, "FOG_LINEAR", linearFogModeEnabled);
CoreUtils.SetKeyword(cmd, "FOG_EXP2", exponentialFogModeEnabled);
}
private void SetShadowCollectPassKeywords(CommandBuffer cmd, VisibleLight shadowLight, ref LightData lightData)
{
bool cascadeShadows = shadowLight.lightType == LightType.Directional && m_Asset.CascadeCount > 1;
CoreUtils.SetKeyword(cmd, "_SHADOWS_SOFT", lightData.shadowMapSampleType == LightShadows.Soft);
CoreUtils.SetKeyword(cmd, "_SHADOWS_CASCADE", cascadeShadows);
}
private bool RenderShadows(ref CullResults cullResults, ref VisibleLight shadowLight, int shadowLightIndex, ref ScriptableRenderContext context)

30
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Core.hlsl


#include "Input.hlsl"
///////////////////////////////////////////////////////////////////////////////
// Light Classification defines //
// //
// In order to reduce shader variations main light keywords were combined //
// here we define main light type keywords. //
// Main light is either a shadow casting light or the brighest directional. //
// Lightweight pipeline doesn't support point light shadows so they can't be //
// classified as main light. //
///////////////////////////////////////////////////////////////////////////////
#if defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT)
#define _MAIN_LIGHT_DIRECTIONAL
#endif
#if defined(_MAIN_LIGHT_SPOT_SHADOW) || defined(_MAIN_LIGHT_SPOT_SHADOW_SOFT)
#define _MAIN_LIGHT_SPOT
#endif
// In case no shadow casting light we classify main light as directional
#if !defined(_MAIN_LIGHT_DIRECTIONAL) && !defined(_MAIN_LIGHT_SPOT)
#define _MAIN_LIGHT_DIRECTIONAL
#endif
#ifdef _NORMALMAP
#define OUTPUT_NORMAL(IN, OUT) OutputTangentToWorld(IN.tangent, IN.normal, OUT.tangent, OUT.binormal, OUT.normal)
#else

{
half3x3 tangentToWorld = half3x3(tangent, binormal, normal);
return normalize(mul(normalTangent, tangentToWorld));
}
// TODO: A similar function should be already available in SRP lib on master. Use that instead
float4 ComputeScreenPos(float4 positionCS)
{
float4 o = positionCS * 0.5f;
o.xy = float2(o.x, o.y * _ProjectionParams.x) + o.w;
o.zw = positionCS.zw;
return o;
}
half ComputeFogFactor(float z)

9
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Lighting.hlsl


InitializeBRDFData(albedo, metallic, specular, smoothness, alpha, brdfData);
Light mainLight = GetMainLight(inputData.positionWS);
mainLight.attenuation *= RealtimeShadowAttenuation(inputData.positionWS, inputData.shadowCoord);
#ifdef _SHADOWS_ENABLED
mainLight.attenuation *= RealtimeShadowAttenuation(inputData.shadowCoord);
#endif
MixRealtimeAndBakedGI(mainLight, inputData.normalWS, inputData.bakedGI, half4(0, 0, 0, 0));
half3 color = GlobalIllumination(brdfData, inputData.bakedGI, occlusion, inputData.normalWS, inputData.viewDirectionWS);

half4 LightweightFragmentBlinnPhong(InputData inputData, half3 diffuse, half4 specularGloss, half shininess, half3 emission, half alpha)
{
Light mainLight = GetMainLight(inputData.positionWS);
mainLight.attenuation *= RealtimeShadowAttenuation(inputData.positionWS, inputData.shadowCoord);
#ifdef _SHADOWS_ENABLED
mainLight.attenuation *= RealtimeShadowAttenuation(inputData.shadowCoord);
#endif
MixRealtimeAndBakedGI(mainLight, inputData.normalWS, inputData.bakedGI, half4(0, 0, 0, 0));
half3 attenuatedLightColor = mainLight.color * mainLight.attenuation;

4
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassLit.hlsl


half fogFactor = ComputeFogFactor(o.clipPos.z);
o.fogFactorAndVertexLight = half4(fogFactor, vertexLight);
#if defined(_SHADOWS_ENABLED) && !defined(_SHADOWS_CASCADE)
o.shadowCoord = ComputeShadowCoord(o.posWS.xyz);
#ifdef _SHADOWS_ENABLED
o.shadowCoord = ComputeScreenPos(o.clipPos);
#endif
return o;

6
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/LightweightPassShadow.hlsl


// _ShadowBias.x sign depens on if platform has reversed z buffer
clipPos.z += _ShadowBias.x;
#if defined(UNITY_REVERSED_Z)
clipPos.z = min(clipPos.z, 1.0);
#if UNITY_REVERSED_Z
clipPos.z = min(clipPos.z, clipPos.w * UNITY_NEAR_CLIP_VALUE);
clipPos.z = max(clipPos.z, 0.0);
clipPos.z = max(clipPos.z, clipPos.w * UNITY_NEAR_CLIP_VALUE);
#endif
return clipPos;
}

151
ScriptableRenderPipeline/LightweightPipeline/LWRP/ShaderLibrary/Shadows.hlsl


#define MAX_SHADOW_CASCADES 4
///////////////////////////////////////////////////////////////////////////////
// Light Classification shadow defines //
// //
// In order to reduce shader variations main light keywords were combined //
// here we define shadow keywords. //
///////////////////////////////////////////////////////////////////////////////
#if defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT) || defined(_MAIN_LIGHT_SPOT_SHADOW) || defined(_MAIN_LIGHT_SPOT_SHADOW_SOFT)
#define _SHADOWS_ENABLED
#endif
#if defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT) || defined(_MAIN_LIGHT_SPOT_SHADOW_SOFT)
#define _SHADOWS_SOFT
#endif
#if defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE) || defined(_MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT)
#define _SHADOWS_CASCADE
#endif
#if defined(_MAIN_LIGHT_SPOT_SHADOW) || defined(_MAIN_LIGHT_SPOT_SHADOW_SOFT)
#define _SHADOWS_PERSPECTIVE
#endif
TEXTURE2D(_ScreenSpaceShadowMap);
SAMPLER(sampler_ScreenSpaceShadowMap);
TEXTURE2D_SHADOW(_ShadowMap);
SAMPLER_CMP(sampler_ShadowMap);

float4 _ShadowmapSize; // (xy: 1/width and 1/height, zw: width and height)
CBUFFER_END
inline half SampleShadowmap(float4 shadowCoord)
#if UNITY_REVERSED_Z
#define BEYOND_SHADOW_FAR(shadowCoord) shadowCoord.z <= UNITY_RAW_FAR_CLIP_VALUE
#else
#define BEYOND_SHADOW_FAR(shadowCoord) shadowCoord.z >= UNITY_RAW_FAR_CLIP_VALUE
#endif
#define OUTSIDE_SHADOW_BOUNDS(shadowCoord) shadowCoord.x <= 0 || shadowCoord.x >= 1 || shadowCoord.y <= 0 || shadowCoord.y >= 1 || BEYOND_SHADOW_FAR(shadowCoord)
half GetShadowStrength()
#if defined(_SHADOWS_PERSPECTIVE)
return _ShadowData.x;
}
inline half SampleScreenSpaceShadowMap(float4 shadowCoord)
{
shadowCoord.xy = shadowCoord.xy / shadowCoord.w;
half attenuation = SAMPLE_TEXTURE2D(_ScreenSpaceShadowMap, sampler_ScreenSpaceShadowMap, shadowCoord.xy).x;
// Apply shadow strength
return LerpWhiteTo(attenuation, GetShadowStrength());
}
inline real SampleShadowmap(float4 shadowCoord)
{
#endif
half attenuation;
real attenuation;
#ifdef SHADER_API_MOBILE
// 4-tap hardware comparison
half4 attenuation4;
attenuation4.x = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset0.xyz);
attenuation4.y = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset1.xyz);
attenuation4.z = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset2.xyz);
attenuation4.w = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset3.xyz);
attenuation = dot(attenuation4, 0.25);
#else
real fetchesWeights[9];
real2 fetchesUV[9];
SampleShadow_ComputeSamples_Tent_5x5(_ShadowmapSize, shadowCoord.xy, fetchesWeights, fetchesUV);
#ifdef SHADER_API_MOBILE
// 4-tap hardware comparison
real4 attenuation4;
attenuation4.x = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset0.xyz);
attenuation4.y = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset1.xyz);
attenuation4.z = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset2.xyz);
attenuation4.w = SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, shadowCoord.xyz + _ShadowOffset3.xyz);
attenuation = dot(attenuation4, 0.25);
#else
#ifdef _SHADOWS_CASCADE //Assume screen space shadows when cascades enabled
real fetchesWeights[16];
real2 fetchesUV[16];
SampleShadow_ComputeSamples_Tent_7x7(_ShadowmapSize, shadowCoord.xy, fetchesWeights, fetchesUV);
attenuation = fetchesWeights[0] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[0].xy, shadowCoord.z));
attenuation += fetchesWeights[1] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[1].xy, shadowCoord.z));
attenuation += fetchesWeights[2] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[2].xy, shadowCoord.z));
attenuation += fetchesWeights[3] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[3].xy, shadowCoord.z));
attenuation += fetchesWeights[4] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[4].xy, shadowCoord.z));
attenuation += fetchesWeights[5] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[5].xy, shadowCoord.z));
attenuation += fetchesWeights[6] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[6].xy, shadowCoord.z));
attenuation += fetchesWeights[7] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[7].xy, shadowCoord.z));
attenuation += fetchesWeights[8] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[8].xy, shadowCoord.z));
attenuation += fetchesWeights[9] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[9].xy, shadowCoord.z));
attenuation += fetchesWeights[10] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[10].xy, shadowCoord.z));
attenuation += fetchesWeights[11] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[11].xy, shadowCoord.z));
attenuation += fetchesWeights[12] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[12].xy, shadowCoord.z));
attenuation += fetchesWeights[13] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[13].xy, shadowCoord.z));
attenuation += fetchesWeights[14] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[14].xy, shadowCoord.z));
attenuation += fetchesWeights[15] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[15].xy, shadowCoord.z));
#else
real fetchesWeights[9];
real2 fetchesUV[9];
SampleShadow_ComputeSamples_Tent_5x5(_ShadowmapSize, shadowCoord.xy, fetchesWeights, fetchesUV);
attenuation = fetchesWeights[0] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[0].xy, shadowCoord.z));
attenuation += fetchesWeights[1] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[1].xy, shadowCoord.z));
attenuation += fetchesWeights[2] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[2].xy, shadowCoord.z));
attenuation += fetchesWeights[3] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[3].xy, shadowCoord.z));
attenuation += fetchesWeights[4] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[4].xy, shadowCoord.z));
attenuation += fetchesWeights[5] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[5].xy, shadowCoord.z));
attenuation += fetchesWeights[6] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[6].xy, shadowCoord.z));
attenuation += fetchesWeights[7] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[7].xy, shadowCoord.z));
attenuation += fetchesWeights[8] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[8].xy, shadowCoord.z));
#endif
attenuation = fetchesWeights[0] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[0].xy, shadowCoord.z));
attenuation += fetchesWeights[1] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[1].xy, shadowCoord.z));
attenuation += fetchesWeights[2] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[2].xy, shadowCoord.z));
attenuation += fetchesWeights[3] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[3].xy, shadowCoord.z));
attenuation += fetchesWeights[4] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[4].xy, shadowCoord.z));
attenuation += fetchesWeights[5] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[5].xy, shadowCoord.z));
attenuation += fetchesWeights[6] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[6].xy, shadowCoord.z));
attenuation += fetchesWeights[7] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[7].xy, shadowCoord.z));
attenuation += fetchesWeights[8] * SAMPLE_TEXTURE2D_SHADOW(_ShadowMap, sampler_ShadowMap, real3(fetchesUV[8].xy, shadowCoord.z));
#endif
#endif
// Apply shadow strength
attenuation = LerpWhiteTo(attenuation, _ShadowData.x);
// TODO: We can set shadowmap sampler to clamptoborder when we don't have a shadow atlas and avoid xy coord bounds check
return (shadowCoord.x <= 0 || shadowCoord.x >= 1 || shadowCoord.y <= 0 || shadowCoord.y >= 1 || shadowCoord.z >= 1) ? 1.0 : attenuation;
return (OUTSIDE_SHADOW_BOUNDS(shadowCoord)) ? 1.0 : attenuation;
}
inline half ComputeCascadeIndex(float3 positionWS)

return mul(_WorldToShadow[0], float4(positionWS, 1.0));
}
half GetShadowStrength()
half RealtimeShadowAttenuation(float4 shadowCoord)
return _ShadowData.x;
}
half RealtimeShadowAttenuation(float3 positionWS)
{
#if !defined(_SHADOWS_ENABLED)
return 1.0;
#endif
float4 shadowCoord = ComputeShadowCoord(positionWS);
return SampleShadowmap(shadowCoord);
}
half RealtimeShadowAttenuation(float3 positionWS, float4 shadowCoord)
{
#if !defined(_SHADOWS_ENABLED)
return 1.0;
#endif
#ifdef _SHADOWS_CASCADE
shadowCoord = ComputeShadowCoord(positionWS);
#endif
return SampleShadowmap(shadowCoord);
return SampleScreenSpaceShadowMap(shadowCoord);
}
#endif

8
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandard.shader


// -------------------------------------
// Lightweight Pipeline keywords
// We have no good approach exposed to skip shader variants, e.g, ideally we would like to skip _CASCADE for all puctual lights
// Lightweight combines light classification and shadows keywords to reduce shader variants.
// Lightweight shader library declares defines based on these keywords to avoid having to check them in the shaders
// Core.hlsl defines _MAIN_LIGHT_DIRECTIONAL and _MAIN_LIGHT_SPOT (point lights can't be main light)
// Shadow.hlsl defines _SHADOWS_ENABLED, _SHADOWS_SOFT, _SHADOWS_CASCADE, _SHADOWS_PERSPECTIVE
#pragma multi_compile _ _MAIN_LIGHT_DIRECTIONAL_SHADOW _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE _MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT _MAIN_LIGHT_SPOT_SHADOW _MAIN_LIGHT_SPOT_SHADOW_SOFT
#pragma multi_compile _MAIN_LIGHT_DIRECTIONAL _MAIN_LIGHT_SPOT
#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _MAIN_LIGHT_COOKIE
#pragma multi_compile _ _ADDITIONAL_LIGHTS
#pragma multi_compile _ _VERTEX_LIGHTS

8
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandardSimpleLighting.shader


// -------------------------------------
// Lightweight Pipeline keywords
// We have no good approach exposed to skip shader variants, e.g, ideally we would like to skip _CASCADE for all puctual lights
// Lightweight combines light classification and shadows keywords to reduce shader variants.
// Lightweight shader library declares defines based on these keywords to avoid having to check them in the shaders
// Core.hlsl defines _MAIN_LIGHT_DIRECTIONAL and _MAIN_LIGHT_SPOT (point lights can't be main light)
// Shadow.hlsl defines _SHADOWS_ENABLED, _SHADOWS_SOFT, _SHADOWS_CASCADE, _SHADOWS_PERSPECTIVE
#pragma multi_compile _ _MAIN_LIGHT_DIRECTIONAL_SHADOW _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE _MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT _MAIN_LIGHT_SPOT_SHADOW _MAIN_LIGHT_SPOT_SHADOW_SOFT
#pragma multi_compile _MAIN_LIGHT_DIRECTIONAL _MAIN_LIGHT_SPOT
#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _MAIN_LIGHT_COOKIE
#pragma multi_compile _ _ADDITIONAL_LIGHTS
#pragma multi_compile _ _VERTEX_LIGHTS

8
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightStandardTerrain.shader


// -------------------------------------
// Lightweight Pipeline keywords
// We have no good approach exposed to skip shader variants, e.g, ideally we would like to skip _CASCADE for all puctual lights
// Lightweight combines light classification and shadows keywords to reduce shader variants.
// Lightweight shader library declares defines based on these keywords to avoid having to check them in the shaders
// Core.hlsl defines _MAIN_LIGHT_DIRECTIONAL and _MAIN_LIGHT_SPOT (point lights can't be main light)
// Shadow.hlsl defines _SHADOWS_ENABLED, _SHADOWS_SOFT, _SHADOWS_CASCADE, _SHADOWS_PERSPECTIVE
#pragma multi_compile _ _MAIN_LIGHT_DIRECTIONAL_SHADOW _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE _MAIN_LIGHT_DIRECTIONAL_SHADOW_SOFT _MAIN_LIGHT_DIRECTIONAL_SHADOW_CASCADE_SOFT _MAIN_LIGHT_SPOT_SHADOW _MAIN_LIGHT_SPOT_SHADOW_SOFT
#pragma multi_compile _MAIN_LIGHT_DIRECTIONAL _MAIN_LIGHT_SPOT
#pragma multi_compile _ _SHADOWS_ENABLED
#pragma multi_compile _ _MAIN_LIGHT_COOKIE
#pragma multi_compile _ _ADDITIONAL_LIGHTS
#pragma multi_compile _ _VERTEX_LIGHTS

48
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl


//
// This file was automatically generated. Please don't edit by hand.
//
#ifndef GEOMETRYUTILS_CS_HLSL
#define GEOMETRYUTILS_CS_HLSL
// Generated from UnityEngine.Experimental.Rendering.OrientedBBox
// PackingRules = Exact
struct OrientedBBox
{
float3 center;
float extentX;
float3 right;
float extentY;
float3 up;
float extentZ;
};
//
// Accessors for UnityEngine.Experimental.Rendering.OrientedBBox
//
float3 GetCenter(OrientedBBox value)
{
return value.center;
}
float GetExtentX(OrientedBBox value)
{
return value.extentX;
}
float3 GetRight(OrientedBBox value)
{
return value.right;
}
float GetExtentY(OrientedBBox value)
{
return value.extentY;
}
float3 GetUp(OrientedBBox value)
{
return value.up;
}
float GetExtentZ(OrientedBBox value)
{
return value.extentZ;
}
#endif

9
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl.meta


fileFormatVersion: 2
guid: c168c5bb5d2fae84499b8ee8f2a3cdbc
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

59
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("Rendering/Homogeneous Density Volume", 1100)]
public class HomogeneousDensityVolume : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
volumeParameters.Constrain();
}
void OnDrawGizmos()
{
if (volumeParameters.IsLocalVolume())
{
Gizmos.color = volumeParameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousDensityVolume GetGlobalHomogeneousDensityVolume()
{
HomogeneousDensityVolume globalVolume = null;
HomogeneousDensityVolume[] volumes = FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
if (volume.enabled && !volume.volumeParameters.IsLocalVolume())
{
globalVolume = volume;
break;
}
}
return globalVolume;
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta


fileFormatVersion: 2
guid: 1c273c50d71d46a4f98a1d23256a8c63
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

85
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightScreenSpaceShadows.shader


Shader "Hidden/LightweightPipeline/ScreenSpaceShadows"
{
SubShader
{
Tags{ "RenderPipeline" = "LightweightPipeline" }
HLSLINCLUDE
//Keep compiler quiet about Shadows.hlsl.
#include "CoreRP/ShaderLibrary/Common.hlsl"
#include "CoreRP/ShaderLibrary/EntityLighting.hlsl"
#include "CoreRP/ShaderLibrary/ImageBasedLighting.hlsl"
#include "LWRP/ShaderLibrary/Core.hlsl"
#include "LWRP/ShaderLibrary/Shadows.hlsl"
TEXTURE2D(_CameraDepthTexture);
SAMPLER(sampler_CameraDepthTexture);
struct VertexInput
{
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct Interpolators
{
half4 pos : SV_POSITION;
half4 texcoord : TEXCOORD0;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
Interpolators Vertex(VertexInput i)
{
Interpolators o;
UNITY_SETUP_INSTANCE_ID(i);
UNITY_TRANSFER_INSTANCE_ID(i, o);
o.pos = TransformObjectToHClip(i.vertex.xyz);
float4 projPos = o.pos * 0.5;
projPos.xy = projPos.xy + projPos.w;
o.texcoord.xy = i.texcoord;
o.texcoord.zw = projPos.xy;
return o;
}
half Fragment(Interpolators i) : SV_Target
{
UNITY_SETUP_INSTANCE_ID(i);
float deviceDepth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, i.texcoord.xy);
#if UNITY_REVERSED_Z
deviceDepth = 1 - deviceDepth;
#endif
deviceDepth = 2 * deviceDepth - 1; //NOTE: Currently must massage depth before computing CS position.
float3 vpos = ComputeViewSpacePosition(i.texcoord.zw, deviceDepth, unity_CameraInvProjection);
float3 wpos = mul(unity_CameraToWorld, float4(vpos, 1)).xyz;
//Fetch shadow coordinates for cascade.
float4 coords = ComputeShadowCoord(wpos);
return SampleShadowmap(coords);
}
ENDHLSL
Pass
{
ZTest Always ZWrite Off
HLSLPROGRAM
#pragma multi_compile _ _SHADOWS_SOFT
#pragma multi_compile _ _SHADOWS_CASCADE
#pragma vertex Vertex
#pragma fragment Fragment
ENDHLSL
}
}
}

9
ScriptableRenderPipeline/LightweightPipeline/LWRP/Shaders/LightweightScreenSpaceShadows.shader.meta


fileFormatVersion: 2
guid: 0f854b35a0cf61a429bd5dcfea30eddd
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta


fileFormatVersion: 2
guid: 8f608e240d5376341bcef2478d231457
timeCreated: 1503411233
licenseType: Pro
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

57
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("Rendering/Homogenous Fog", 1100)]
public class HomogeneousFog : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
volumeParameters.Constrain();
}
void OnDrawGizmos()
{
if (volumeParameters != null && !volumeParameters.IsVolumeUnbounded())
{
Gizmos.DrawWireCube(volumeParameters.bounds.center, volumeParameters.bounds.size);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousFog GetGlobalFogComponent()
{
HomogeneousFog globalFogComponent = null;
HomogeneousFog[] fogComponents = FindObjectsOfType(typeof(HomogeneousFog)) as HomogeneousFog[];
foreach (HomogeneousFog fogComponent in fogComponents)
{
if (fogComponent.enabled && fogComponent.volumeParameters.IsVolumeUnbounded())
{
globalFogComponent = fogComponent;
break;
}
}
return globalFogComponent;
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline
正在加载...
取消
保存