浏览代码

Merge pull request #940 from EvgeniiG/fix_sss_radius_bug

Fix SSS radius bug (+ misc changes)
/main
GitHub 6 年前
当前提交
dc676759
共有 26 个文件被更改,包括 484 次插入195 次删除
  1. 148
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs
  2. 14
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  3. 20
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  4. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  5. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  6. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader
  7. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl
  8. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/Deferred.compute
  9. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs
  10. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute
  11. 31
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl
  12. 125
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  13. 18
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs
  14. 44
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl
  15. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute
  16. 35
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.hlsl
  17. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.shader
  18. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPassForward.hlsl
  19. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  20. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl
  21. 48
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl
  22. 9
      ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl.meta
  23. 59
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  24. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta
  25. 13
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta
  26. 57
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs

148
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs


namespace UnityEngine.Experimental.Rendering
{
public struct Frustum
{
public Plane[] planes; // Left, right, top, bottom, near, far
public Vector3[] corners; // Positions of the 8 corners
// The frustum will be camera-relative if given a camera-relative VP matrix.
public static Frustum Create(Matrix4x4 viewProjMatrix, bool depth_0_1, bool reverseZ)
{
Frustum frustum = new Frustum();
frustum.planes = new Plane[6];
frustum.corners = new Vector3[8];
GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustum.planes);
float nd = -1.0f;
if (depth_0_1)
{
nd = 0.0f;
// See "Fast Extraction of Viewing Frustum Planes" by Gribb and Hartmann.
Vector3 f = new Vector3(viewProjMatrix.m20, viewProjMatrix.m21, viewProjMatrix.m22);
float s = (float)(1.0 / Math.Sqrt(f.sqrMagnitude));
Plane np = new Plane(s * f, s * viewProjMatrix.m23);
frustum.planes[4] = np;
}
if (reverseZ)
{
Plane tmp = frustum.planes[4];
frustum.planes[4] = frustum.planes[5];
frustum.planes[5] = tmp;
}
Matrix4x4 invViewProjMatrix = viewProjMatrix.inverse;
// Unproject 8 frustum points.
frustum.corners[0] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, -1, 1));
frustum.corners[1] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, -1, 1));
frustum.corners[2] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, 1, 1));
frustum.corners[3] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, 1, 1));
frustum.corners[4] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, -1, nd));
frustum.corners[5] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, -1, nd));
frustum.corners[6] = invViewProjMatrix.MultiplyPoint(new Vector3(-1, 1, nd));
frustum.corners[7] = invViewProjMatrix.MultiplyPoint(new Vector3( 1, 1, nd));
return frustum;
}
} // struct Frustum
[GenerateHLSL]
public struct OrientedBBox
{
public Vector3 center;
public float extentX;
public Vector3 right;
public float extentY;
public Vector3 up;
public float extentZ;
public static OrientedBBox Create(Transform t)
{
OrientedBBox obb = new OrientedBBox();
obb.center = t.position;
obb.right = t.right;
obb.up = t.up;
obb.extentX = 0.5f * t.localScale.x;
obb.extentY = 0.5f * t.localScale.y;
obb.extentZ = 0.5f * t.localScale.z;
return obb;
}
} // struct OrientedBBox
// Returns 'true' if the OBB intersects (or is inside) the frustum, 'false' otherwise.
// 'cameraRelativeOffset' can be used to intersect a world-space OBB with a camera-relative frustum.
public static bool Overlap(OrientedBBox obb, Vector3 cameraRelativeOffset,
Frustum frustum, int numPlanes, int numCorners)
{
Vector3 center = obb.center + cameraRelativeOffset;
Vector3 forward = Vector3.Cross(obb.up, obb.right);
bool overlap = true;
// Test the OBB against frustum planes. Frustum planes have inward-facing.
// The OBB is outside if it's entirely behind one of the frustum planes.
// See "Real-Time Rendering", 3rd Edition, 16.10.2.
for (int i = 0; overlap && i < numPlanes; i++)
{
Vector3 n = frustum.planes[i].normal;
float d = frustum.planes[i].distance;
// Max projection of the half-diagonal onto the normal (always positive).
float maxHalfDiagProj = obb.extentX * Mathf.Abs(Vector3.Dot(n, obb.right))
+ obb.extentY * Mathf.Abs(Vector3.Dot(n, obb.up))
+ obb.extentZ * Mathf.Abs(Vector3.Dot(n, forward));
// Negative distance -> center behind the plane (outside).
float centerToPlaneDist = Vector3.Dot(n, center) + d;
// outside = maxHalfDiagProj < -centerToPlaneDist
// outside = maxHalfDiagProj + centerToPlaneDist < 0
// overlap = overlap && !outside
overlap = overlap && (maxHalfDiagProj + centerToPlaneDist >= 0);
}
if (numCorners == 0) return overlap;
// Test the frustum corners against OBB planes. The OBB planes are outward-facing.
// The frustum is outside if all of its corners are entirely in front of one of the OBB planes.
// See "Correct Frustum Culling" by Inigo Quilez.
// We can exploit the symmetry of the box by only testing against 3 planes rather than 6.
Plane[] planes = new Plane[3];
planes[0].normal = obb.right;
planes[0].distance = obb.extentX;
planes[1].normal = obb.up;
planes[1].distance = obb.extentY;
planes[2].normal = forward;
planes[2].distance = obb.extentZ;
for (int i = 0; overlap && i < 3; i++)
{
Plane plane = planes[i];
// We need a separate counter for the "box fully inside frustum" case.
bool outsidePos = true; // Positive normal
bool outsideNeg = true; // Reversed normal
// Merge 2 loops. Continue as long as all points are outside either plane.
for (int j = 0; j < numCorners; j++)
{
float proj = Vector3.Dot(plane.normal, frustum.corners[j] - center);
outsidePos = outsidePos && ( proj > plane.distance);
outsideNeg = outsideNeg && (-proj > plane.distance);
}
overlap = overlap && !(outsidePos || outsideNeg);
}
return overlap;
}
public static readonly Matrix4x4 FlipMatrixLHSRHS = Matrix4x4.Scale(new Vector3(1, 1, -1));
public static Vector4 Plane(Vector3 position, Vector3 normal)

else
return Matrix4x4.Perspective(camera.fieldOfView, camera.aspect, camera.nearClipPlane, camera.farClipPlane);
}
}
} // class GeometryUtils
}

14
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


#endif // #ifndef real
// Target in compute shader are supported in 2018.2, for now define ours
// (Note only 45 and above support compute shader)
#ifdef SHADER_STAGE_COMPUTE
# ifndef SHADER_TARGET
# if defined(SHADER_API_METAL) || defined(SHADER_API_VULKAN)
# define SHADER_TARGET 45
# else
# define SHADER_TARGET 50
# endif
# endif
#endif
// Include language header
#if defined(SHADER_API_D3D11)
#include "API/D3D11.hlsl"

#if (SHADER_TARGET >= 45)
uint FastLog2(uint x)
{
return firstbithigh(x) - 1u;
return firstbithigh(x);
}
#endif

20
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


public Matrix4x4 projMatrix;
public Matrix4x4 nonJitteredProjMatrix;
public Vector4 screenSize;
public Plane[] frustumPlanes;
public Frustum frustum;
public Vector4[] frustumPlaneEquations;
public Camera camera;
public uint taaFrameIndex;

public HDCamera(Camera cam)
{
camera = cam;
frustumPlanes = new Plane[6];
frustum = new Frustum();
frustumPlaneEquations = new Vector4[6];
viewMatrixStereo = new Matrix4x4[2];

prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
}
// Warning: near and far planes appear to be broken (or rather far plane seems broken)
GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustumPlanes);
frustum = Frustum.Create(viewProjMatrix, true, true);
for (int i = 0; i < 4; i++)
// Left, right, top, bottom, near, far.
for (int i = 0; i < 6; i++)
// Left, right, top, bottom.
frustumPlaneEquations[i] = new Vector4(frustumPlanes[i].normal.x, frustumPlanes[i].normal.y, frustumPlanes[i].normal.z, frustumPlanes[i].distance);
frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
// Near, far.
Vector4 forward = (camera.cameraType == CameraType.Reflection) ? camera.worldToCameraMatrix.GetRow(2) : new Vector4(camera.transform.forward.x, camera.transform.forward.y, camera.transform.forward.z, 0.0f);
// We need to switch forward direction based on handness (Reminder: Regular camera have a negative determinant in Unity and reflection probe follow DX convention and have a positive determinant)
forward = viewParam.x < 0.0f ? forward : -forward;
frustumPlaneEquations[4] = new Vector4( forward.x, forward.y, forward.z, -Vector3.Dot(forward, relPos) - camera.nearClipPlane);
frustumPlaneEquations[5] = new Vector4(-forward.x, -forward.y, -forward.z, Vector3.Dot(forward, relPos) + camera.farClipPlane);
m_LastFrameActive = Time.frameCount;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


}
}
// The pass only requires the volume properties, and can run async.
m_VolumetricLightingModule.VoxelizeDensityVolumes(hdCamera, cmd);
// Render the volumetric lighting.
// The pass requires the volume properties, the light list and the shadows, and can run async.
m_VolumetricLightingModule.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalFog_Extinction = Shader.PropertyToID("_GlobalFog_Extinction");
public static readonly int _GlobalFog_Scattering = Shader.PropertyToID("_GlobalFog_Scattering");
public static readonly int _GlobalFog_Asymmetry = Shader.PropertyToID("_GlobalFog_Asymmetry");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _VBufferScaleAndSliceCount = Shader.PropertyToID("_VBufferScaleAndSliceCount");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Deferred.shader


Outputs outputs;
#ifdef OUTPUT_SPLIT_LIGHTING
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
outputs.specularLighting = float4(specularLighting, 1.0);
outputs.diffuseLighting = TagLightingForSSS(diffuseLighting);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightEvaluation.hlsl


#if (SHADEROPTIONS_VOLUMETRIC_LIGHTING_PRESET != 0)
float distVol = (lightData.lightType == GPULIGHTTYPE_PROJECTOR_BOX) ? distances.w : distances.x;
attenuation *= TransmittanceHomogeneousMedium(_GlobalFog_Extinction, distVol);
attenuation *= TransmittanceHomogeneousMedium(_GlobalExtinction, distVol);
#endif
// Projector lights always have cookies, so we can perform clipping inside the if().

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/Deferred.compute


float3 specularLighting;
LightLoop(V, posInput, preLightData, bsdfData, bakeLightingData, featureFlags, diffuseLighting, specularLighting);
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
specularLightingUAV[pixelCoord] = float4(specularLighting, 1.0);
diffuseLightingUAV[pixelCoord] = TagLightingForSSS(diffuseLighting);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/LightLoop/LightLoop.cs


public const int k_MaxCascadeCount = 4; //Should be not less than m_Settings.directionalLightCascadeCount;
static readonly Vector3 k_BoxCullingExtentThreshold = Vector3.one * 0.01f;
// Static keyword is required here else we get a "DestroyBuffer can only be call in main thread"
// Static keyword is required here else we get a "DestroyBuffer can only be called from the main thread"
static ComputeBuffer s_DirectionalLightDatas = null;
static ComputeBuffer s_LightDatas = null;
static ComputeBuffer s_EnvLightDatas = null;

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/Resources/VolumetricLighting.compute


CBUFFER_START(UnityVolumetricLighting)
float4 _VBufferSampleOffset; // {x, y, z}, w = rendered frame count
float4x4 _VBufferCoordToViewDirWS; // Actually just 3x3, but Unity can only set 4x4
float _CornetteShanksConstant; // CornetteShanksPhasePartConstant(_GlobalFog_Asymmetry)
float _CornetteShanksConstant; // CornetteShanksPhasePartConstant(_GlobalAsymmetry)
CBUFFER_END
//--------------------------------------------------------------------------------------------------

// Sample the participating medium at 'tc' (or 'centerWS').
// We consider it to be constant along the interval [t0, t1] (within the voxel).
// TODO: piecewise linear.
float3 scattering = _GlobalFog_Scattering;
float extinction = max(_GlobalFog_Extinction, FLT_MIN); // Avoid NaNs
float asymmetry = _GlobalFog_Asymmetry;
float3 scattering = _GlobalScattering;
float extinction = max(_GlobalExtinction, FLT_MIN); // Avoid NaNs
float asymmetry = _GlobalAsymmetry;
// TODO: define a function ComputeGlobalFogCoefficients(float3 centerWS),
// which allows procedural definition of extinction and scattering.

float reprojZ = mul(_PrevViewProjMatrix, float4(centerWS, 1)).w;
float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_trilinear_clamp_sampler),
false, reprojPosNDC, reprojZ,
_VBufferScaleAndSliceCount,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);

31
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VBuffer.hlsl


// Therefore, given 'logEncodedDepth', we compute a new depth value
// which allows us to perform HW interpolation which is linear in the view space.
float ComputeLerpPositionForLogEncoding(float linearDepth, float logEncodedDepth,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
float s0 = floor(d * numSlices - 0.5);
float s1 = ceil(d * numSlices - 0.5);

// If (clampToEdge == false), out-of-bounds loads return 0.
float4 SampleVBuffer(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler), bool clampToEdge,
float2 positionNDC, float linearDepth,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float numSlices = VBufferScaleAndSliceCount.z;
float rcpNumSlices = VBufferScaleAndSliceCount.w;
float numSlices = VBufferSliceCount.x;
float rcpNumSlices = VBufferSliceCount.y;
// Account for the visible area of the V-Buffer.
float2 uv = positionNDC * VBufferScaleAndSliceCount.xy;
float2 uv = positionNDC;
// The distance between slices is log-encoded.
float z = linearDepth;

float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthDecodingParams);
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
return SAMPLE_TEXTURE3D_LOD(VBufferLighting, trilinearSampler, float3(uv, w), 0);

float4 SampleInScatteredRadianceAndTransmittance(TEXTURE3D_ARGS(VBufferLighting, trilinearSampler),
float2 positionNDC, float linearDepth,
float4 VBufferResolution,
float4 VBufferScaleAndSliceCount,
float2 VBufferSliceCount,
float4 VBufferDepthEncodingParams,
float4 VBufferDepthDecodingParams)
{

VBufferScaleAndSliceCount,
VBufferSliceCount,
#else
// Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
// Account for the visible area of the V-Buffer.
float2 xy = positionNDC * (VBufferResolution.xy * VBufferScaleAndSliceCount.xy);
#else // Perform biquadratic reconstruction in XY, linear in Z, using 4x trilinear taps.
float2 uv = positionNDC;
float2 xy = uv * VBufferResolution.xy;
float2 ic = floor(xy);
float2 fc = frac(xy);

float w = d;
#else
// Adjust the texture coordinate for HW trilinear sampling.
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferScaleAndSliceCount, VBufferDepthDecodingParams);
float w = ComputeLerpPositionForLogEncoding(z, d, VBufferSliceCount, VBufferDepthDecodingParams);
#endif
float2 weights[2], offsets[2];

125
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


[Serializable]
public class VolumeParameters
{
public Bounds bounds; // Position and dimensions in meters
public bool isLocal; // Enables voxelization
public Color albedo; // Single scattering albedo [0, 1]
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Single global parameter for all volumes. TODO: UX

bounds = new Bounds(Vector3.zero, Vector3.positiveInfinity);
isLocal = true;
public bool IsVolumeUnbounded()
public bool IsLocalVolume()
return bounds.size.x == float.PositiveInfinity &&
bounds.size.y == float.PositiveInfinity &&
bounds.size.z == float.PositiveInfinity;
return isLocal;
}
public Vector3 GetAbsorptionCoefficient()

public void Constrain()
{
bounds.size = Vector3.Max(bounds.size, Vector3.zero);
albedo.r = Mathf.Clamp01(albedo.r);
albedo.g = Mathf.Clamp01(albedo.g);
albedo.b = Mathf.Clamp01(albedo.b);

public long viewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
public RenderTexture[] lightingRTEX = null;
public RenderTargetIdentifier[] lightingRTID = null;
public RenderTexture densityRTEX = null;
public RenderTargetIdentifier densityRTID = -1; // RenderTargetIdentifier cannot be NULL
public RenderTargetIdentifier GetLightingIntegralBuffer() // Of the current frame
{

return lightingRTID[1 + ((Time.renderedFrameCount + 1) & 1)];
}
public RenderTargetIdentifier GetDensityBuffer()
{
Debug.Assert(viewID >= 0);
return densityRTID;
}
public void Create(long viewID, int w, int h, int d)
{
Debug.Assert(viewID >= 0);

ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumes = null;
List<VolumeProperties> m_VisibleVolumeProperties = null;
public const int k_MaxVisibleVolumeCount = 512;
// Static keyword is required here else we get a "DestroyBuffer can only be called from the main thread"
static ComputeBuffer s_VisibleVolumesBuffer = null;
static ComputeBuffer s_VisibleVolumePropertiesBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f;
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>(0);
m_VolumetricLightingCS = asset.renderPipelineResources.volumetricLightingCS;
m_VBuffers = new List<VBuffer>();
m_VisibleVolumes = new List<OrientedBBox>();
m_VisibleVolumeProperties = new List<VolumeProperties>();
s_VisibleVolumesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(OrientedBBox)));
s_VisibleVolumePropertiesBuffer = new ComputeBuffer(k_MaxVisibleVolumeCount, System.Runtime.InteropServices.Marshal.SizeOf(typeof(VolumeProperties)));
}
public void Cleanup()

m_VBuffers[i].Destroy();
}
m_VBuffers = null;
m_VBuffers = null;
m_VisibleVolumes = null;
m_VisibleVolumeProperties = null;
CoreUtils.SafeRelease(s_VisibleVolumesBuffer);
CoreUtils.SafeRelease(s_VisibleVolumePropertiesBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)

// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
// the VBuffer can potentially extend past the boundaries of the viewport.
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
// Note: for performance reasons, scale is unused (implicitly 1). The error is typically under 1%.
static Vector2 ComputeVBufferResolutionAndScale(VolumetricLightingPreset preset,
int screenWidth, int screenHeight,
ref int w, ref int h, ref int d)

{
if (preset == VolumetricLightingPreset.Off) return;
HomogeneousFog globalFogComponent = HomogeneousFog.GetGlobalFogComponent();
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
VolumeProperties globalFogProperties = (globalFogComponent != null) ? globalFogComponent.volumeParameters.GetProperties()
VolumeProperties globalVolumeProperties = (globalVolume != null) ? globalVolume.volumeParameters.GetProperties()
float asymmetry = globalFogComponent != null ? globalFogComponent.volumeParameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalFog_Scattering, globalFogProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Extinction, globalFogProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalFog_Asymmetry, asymmetry);
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
cmd.SetGlobalVector(HDShaderIDs._GlobalScattering, globalVolumeProperties.scattering);
cmd.SetGlobalFloat( HDShaderIDs._GlobalExtinction, globalVolumeProperties.extinction);
cmd.SetGlobalFloat( HDShaderIDs._GlobalAsymmetry, asymmetry);
Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);

cmd.SetGlobalVector( HDShaderIDs._VBufferScaleAndSliceCount, new Vector4(scale.x, scale.y, d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
public void VoxelizeDensityVolumes(HDCamera camera, CommandBuffer cmd)
{
if (preset == VolumetricLightingPreset.Off) return;
Vector3 camPosition = camera.camera.transform.position;
Vector3 camOffset = Vector3.zero; // World-origin-relative
if (ShaderConfig.s_CameraRelativeRendering != 0)
{
camOffset = -camPosition; // Camera-relative
}
m_VisibleVolumes.Clear();
m_VisibleVolumeProperties.Clear();
// Collect all the visible volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
// Only test active finite volumes.
if (volume.enabled && volume.volumeParameters.IsLocalVolume())
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camOffset, camera.frustum, 6, 8))
{
// TODO: cache these?
var properties = volume.volumeParameters.GetProperties();
m_VisibleVolumes.Add(obb);
m_VisibleVolumeProperties.Add(properties);
}
}
}
s_VisibleVolumesBuffer.SetData(m_VisibleVolumes);
s_VisibleVolumePropertiesBuffer.SetData(m_VisibleVolumeProperties);
}
// Ref: https://en.wikipedia.org/wiki/Close-packing_of_equal_spheres
// The returned {x, y} coordinates (and all spheres) are all within the (-0.5, 0.5)^2 range.
// The pattern has been rotated by 15 degrees to maximize the resolution along X and Y:

VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
HomogeneousFog globalFogComponent = HomogeneousFog.GetGlobalFogComponent();
float asymmetry = globalFogComponent != null ? globalFogComponent.volumeParameters.asymmetry : 0;
HomogeneousDensityVolume globalVolume = HomogeneousDensityVolume.GetGlobalHomogeneousDensityVolume();
float asymmetry = globalVolume != null ? globalVolume.volumeParameters.asymmetry : 0;
if (globalFogComponent == null)
if (globalVolume == null)
{
// Clear the render target instead of running the shader.
// CoreUtils.SetRenderTarget(cmd, GetVBufferLightingIntegral(viewOffset), ClearFlag.Color, CoreUtils.clearColorAllBlack);

}
int w = 0, h = 0, d = 0;
Vector2 scale = ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
ComputeVBufferResolutionAndScale(preset, (int)camera.screenSize.x, (int)camera.screenSize.y, ref w, ref h, ref d);
// Compute it using the scaled resolution to account for the visible area of the VBuffer.
Vector4 scaledRes = new Vector4(w * scale.x, h * scale.y, 1.0f / (w * scale.x), 1.0f / (h * scale.y));
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, scaledRes, camera.viewMatrix, false);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
camera.SetupComputeShader(m_VolumetricLightingCS, cmd);

18
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/DiffusionProfile/DiffusionProfileSettings.cs


[GenerateHLSL]
public class DiffusionProfileConstants
{
public const int DIFFUSION_PROFILE_COUNT = 16; // Max. number of profiles, including the slot taken by the neutral profile
public const int DIFFUSION_PROFILE_NEUTRAL_ID = 0; // Does not result in blurring
public const int SSS_N_SAMPLES_NEAR_FIELD = 55; // Used for extreme close ups; must be a Fibonacci number
public const int SSS_N_SAMPLES_FAR_FIELD = 21; // Used at a regular distance; must be a Fibonacci number
public const int SSS_LOD_THRESHOLD = 4; // The LoD threshold of the near-field kernel (in pixels)
public const int DIFFUSION_PROFILE_COUNT = 16; // Max. number of profiles, including the slot taken by the neutral profile
public const int DIFFUSION_PROFILE_NEUTRAL_ID = 0; // Does not result in blurring
public const int SSS_N_SAMPLES_NEAR_FIELD = 55; // Used for extreme close ups; must be a Fibonacci number
public const int SSS_N_SAMPLES_FAR_FIELD = 21; // Used at a regular distance; must be a Fibonacci number
public const int SSS_LOD_THRESHOLD = 4; // The LoD threshold of the near-field kernel (in pixels)
public const int SSS_BASIC_N_SAMPLES = 11; // Must be an odd number
public const int SSS_BASIC_DISTANCE_SCALE = 3; // SSS distance units per centimeter
public const int SSS_BASIC_N_SAMPLES = 11; // Must be an odd number
public const int SSS_BASIC_DISTANCE_SCALE = 3; // SSS distance units per centimeter
// <<< Old SSS Model
}

{
public DiffusionProfile[] profiles;
[NonSerialized] public uint texturingModeFlags; // 1 bit/profile; 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 2 bit/profile; 0 = inf. thick, 1 = thin, 2 = regular
[NonSerialized] public uint texturingModeFlags; // 1 bit/profile: 0 = PreAndPostScatter, 1 = PostScatter
[NonSerialized] public uint transmissionFlags; // 1 bit/profile: 0 = regular, 1 = thin
[NonSerialized] public Vector4[] thicknessRemaps; // Remap: 0 = start, 1 = end - start
[NonSerialized] public Vector4[] worldScales; // X = meters per world unit; Y = world units per meter
[NonSerialized] public Vector4[] shapeParams; // RGB = S = 1 / D, A = filter radius

44
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Lit/Lit.hlsl


/* 26 */ LIGHT_FEATURE_MASK_FLAGS_OPAQUE | MATERIAL_FEATURE_MASK_FLAGS, // Catch all case with MATERIAL_FEATURE_MASK_FLAGS is needed in case we disable material classification
};
// Additional bits set in 'bsdfData.materialFeatures' to save registers and simplify feature tracking.
#define MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING ((MATERIAL_FEATURE_MASK_FLAGS + 1) << 0)
#define MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET FastLog2((MATERIAL_FEATURE_MASK_FLAGS + 1) << 1) // 2 bits
#define MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN ((MATERIAL_FEATURE_MASK_FLAGS + 1) << 3)
uint FeatureFlagsToTileVariant(uint featureFlags)
{
for (int i = 0; i < NUM_FEATURE_VARIANTS; i++)

bsdfData.diffusionProfile = diffusionProfile;
bsdfData.fresnel0 = _TransmissionTintsAndFresnel0[diffusionProfile].a;
bsdfData.subsurfaceMask = subsurfaceMask;
// Note: ApplySubsurfaceScatteringTexturingMode also test the diffusionProfile for updating diffuseColor based on SSS
bsdfData.materialFeatures |= MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING;
bsdfData.materialFeatures |= GetSubsurfaceScatteringTexturingMode(bsdfData.diffusionProfile) << MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET;
}
// Assume that bsdfData.diffusionProfile is init

bsdfData.thickness);
#endif
bsdfData.useThickObjectMode = !IsBitSet(asuint(_TransmissionFlags), diffusionProfile);
bool useThinObjectMode = IsBitSet(asuint(_TransmissionFlags), diffusionProfile);
bsdfData.materialFeatures |= useThinObjectMode ? MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN : 0;
if (bsdfData.useThickObjectMode)
if (useThinObjectMode)
{
// Apply no displacement.
bsdfData.thickness = 0;
}
else
{
// Compute the thickness in world units along the normal.
float thicknessInMeters = bsdfData.thickness * METERS_PER_MILLIMETER;

}
else
{
// Apply no displacement.
bsdfData.thickness = 0;
}
}

{
if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING))
{
bsdfData.diffuseColor = ApplySubsurfaceScatteringTexturingMode(bsdfData.diffuseColor, bsdfData.diffusionProfile);
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;
bsdfData.diffuseColor = ApplySubsurfaceScatteringTexturingMode(texturingMode, bsdfData.diffuseColor);
}
#ifdef DEBUG_DISPLAY

// Subsurface Scattering functions
//-----------------------------------------------------------------------------
bool PixelHasSubsurfaceScattering(BSDFData bsdfData)
bool ShouldOutputSplitLighting(BSDFData bsdfData)
// bsdfData.subsurfaceMask != 0 allow if sss is enabled for this pixels (i.e like per pixels feature) as in deferred case MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING alone is not sufficient
// but keep testing MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING for forward case
return bsdfData.diffusionProfile != DIFFUSION_PROFILE_NEUTRAL_ID && bsdfData.subsurfaceMask != 0 && HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING);
return HasFeatureFlag(bsdfData.materialFeatures, MATERIAL_FEATURE_FLAGS_SSS_OUTPUT_SPLIT_LIGHTING);
}
//-----------------------------------------------------------------------------

float negatedNdotL = -NdotL;
// Apply wrapped lighting to better handle thin objects (cards) at grazing angles.
float backNdotL = bsdfData.useThickObjectMode ? negatedNdotL : wrappedNdotL;
bool useThinObjectMode = HasFeatureFlag(bsdfData.materialFeatures, MATERIAL_FEATURE_FLAGS_TRANSMISSION_MODE_THIN);
float backNdotL = useThinObjectMode ? wrappedNdotL : negatedNdotL;
// Apply BSDF-specific diffuse transmission to attenuation. See also: [SSS-NOTE-TRSM]
// We don't multiply by 'bsdfData.diffuseColor' here. It's done only once in PostEvaluateBSDF().

lerp(_AmbientOcclusionParam.rgb, float3(1.0, 1.0, 1.0), directAmbientOcclusion);
#endif
float3 modifiedDiffuseColor;
// bsdfData.subsurfaceMask != 0 allow if sss is enabled for this pixels (i.e like per pixels feature) as in deferred case MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING alone is not sufficient
// but keep testing MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING for forward case
if (HasFeatureFlag(bsdfData.materialFeatures, MATERIALFEATUREFLAGS_LIT_SUBSURFACE_SCATTERING) && bsdfData.subsurfaceMask != 0)
modifiedDiffuseColor = ApplySubsurfaceScatteringTexturingMode(bsdfData.diffuseColor, bsdfData.diffusionProfile);
else
modifiedDiffuseColor = bsdfData.diffuseColor;
uint texturingMode = (bsdfData.materialFeatures >> MATERIAL_FEATURE_FLAGS_SSS_TEXTURING_MODE_OFFSET) & 3;
float3 modifiedDiffuseColor = ApplySubsurfaceScatteringTexturingMode(texturingMode, bsdfData.diffuseColor);
// Apply the albedo to the direct diffuse lighting (only once). The indirect (baked)
// diffuse lighting has already had the albedo applied in GetBakedDiffuseLigthing().

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.compute


// divergence of execution across the warp.
float maxDistInPixels = maxDistance * max(pixelsPerMm.x, pixelsPerMm.y);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(sssData.diffuseColor, profileID);
uint texturingMode = GetSubsurfaceScatteringTexturingMode(profileID);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(texturingMode, sssData.diffuseColor);
[branch] if (distScale == 0 || maxDistInPixels < 1)
{

35
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.hlsl


// helper functions
// ----------------------------------------------------------------------------
// Returns the modified albedo (diffuse color) for materials with subsurface scattering.
// Ref: Advanced Techniques for Realistic Real-Time Skin Rendering.
float3 ApplySubsurfaceScatteringTexturingMode(float3 color, int diffusionProfile)
// 0: [ albedo = albedo ]
// 1: [ albedo = 1 ]
// 2: [ albedo = sqrt(albedo) ]
uint GetSubsurfaceScatteringTexturingMode(int diffusionProfile)
uint texturingMode = 0;
#if defined(SHADERPASS) && (SHADERPASS == SHADERPASS_SUBSURFACE_SCATTERING)
// If the SSS pass is executed, we know we have SSS enabled.
bool enableSss = true;

// We can enter in this function even if SSS is not enabled in case of material classification per tile
// (If there is SSS inside the tile we need to enable the feature for the whole tile with neutral value)
// thus why we test != DIFFUSION_PROFILE_NEUTRAL_ID here, to be sure neutral profile don't affect the scene
if (enableSss && diffusionProfile != DIFFUSION_PROFILE_NEUTRAL_ID)
if (enableSss)
{
bool performPostScatterTexturing = IsBitSet(asuint(_TexturingModeFlags), diffusionProfile);

#if !defined(SHADERPASS) || (SHADERPASS != SHADERPASS_SUBSURFACE_SCATTERING)
color = float3(1, 1, 1);
#if defined(SHADERPASS) && (SHADERPASS != SHADERPASS_SUBSURFACE_SCATTERING)
texturingMode = 1;
color = sqrt(color);
texturingMode = 2;
}
return texturingMode;
}
// Returns the modified albedo (diffuse color) for materials with subsurface scattering.
// See GetSubsurfaceScatteringTexturingMode() above for more details.
// Ref: Advanced Techniques for Realistic Real-Time Skin Rendering.
float3 ApplySubsurfaceScatteringTexturingMode(uint texturingMode, float3 color)
{
switch (texturingMode)
{
case 2: color = sqrt(color); break;
case 1: color = 1; break;
default: color = color; break;
}
return color;

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScattering.shader


float halfRcpVariance = _HalfRcpWeightedVariances[profileID].a;
#endif
float3 albedo = ApplySubsurfaceScatteringTexturingMode(sssData.diffuseColor, profileID);
uint texturingMode = GetSubsurfaceScatteringTexturingMode(profileID);
float3 albedo = ApplySubsurfaceScatteringTexturingMode(texturingMode, sssData.diffuseColor);
#ifndef SSS_FILTER_HORIZONTAL_AND_COMBINE
albedo = float3(1, 1, 1);

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderPass/ShaderPassForward.hlsl


LightLoop(V, posInput, preLightData, bsdfData, bakeLightingData, featureFlags, diffuseLighting, specularLighting);
#ifdef OUTPUT_SPLIT_LIGHTING
if (_EnableSubsurfaceScattering != 0 && PixelHasSubsurfaceScattering(bsdfData))
if (_EnableSubsurfaceScattering != 0 && ShouldOutputSplitLighting(bsdfData))
{
outColor = float4(specularLighting, 1.0);
outDiffuseLighting = float4(TagLightingForSSS(diffuseLighting), 1.0);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


uint _TaaFrameIndex; // [0, 7]
// Volumetric lighting.
float4 _AmbientProbeCoeffs[7]; // 3 bands of SH, packed, rescaled and convolved with the phase function
float _GlobalFog_Asymmetry;
float3 _GlobalFog_Scattering;
float _GlobalFog_Extinction;
float _GlobalAsymmetry;
float3 _GlobalScattering;
float _GlobalExtinction;
float4 _VBufferScaleAndSliceCount; // { fracVisW, fracVisH, count, 1/count }
float4 _VBufferSliceCount; // { count, 1/count, 0, 0 }
float4 _VBufferDepthEncodingParams; // See the call site for description
float4 _VBufferDepthDecodingParams; // See the call site for description
CBUFFER_END

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.hlsl


float4 volFog = SampleInScatteredRadianceAndTransmittance(TEXTURE3D_PARAM(_VBufferLighting, s_trilinear_clamp_sampler),
posInput.positionNDC, posInput.linearDepth,
_VBufferResolution,
_VBufferScaleAndSliceCount,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams);
fogColor = volFog.rgb;

48
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl


//
// This file was automatically generated. Please don't edit by hand.
//
#ifndef GEOMETRYUTILS_CS_HLSL
#define GEOMETRYUTILS_CS_HLSL
// Generated from UnityEngine.Experimental.Rendering.OrientedBBox
// PackingRules = Exact
struct OrientedBBox
{
float3 center;
float extentX;
float3 right;
float extentY;
float3 up;
float extentZ;
};
//
// Accessors for UnityEngine.Experimental.Rendering.OrientedBBox
//
float3 GetCenter(OrientedBBox value)
{
return value.center;
}
float GetExtentX(OrientedBBox value)
{
return value.extentX;
}
float3 GetRight(OrientedBBox value)
{
return value.right;
}
float GetExtentY(OrientedBBox value)
{
return value.extentY;
}
float3 GetUp(OrientedBBox value)
{
return value.up;
}
float GetExtentZ(OrientedBBox value)
{
return value.extentZ;
}
#endif

9
ScriptableRenderPipeline/Core/CoreRP/GeometryUtils.cs.hlsl.meta


fileFormatVersion: 2
guid: c168c5bb5d2fae84499b8ee8f2a3cdbc
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

59
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("Rendering/Homogeneous Density Volume", 1100)]
public class HomogeneousDensityVolume : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
volumeParameters.Constrain();
}
void OnDrawGizmos()
{
if (volumeParameters.IsLocalVolume())
{
Gizmos.color = volumeParameters.albedo;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawWireCube(Vector3.zero, Vector3.one);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousDensityVolume GetGlobalHomogeneousDensityVolume()
{
HomogeneousDensityVolume globalVolume = null;
HomogeneousDensityVolume[] volumes = FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
foreach (HomogeneousDensityVolume volume in volumes)
{
if (volume.enabled && !volume.volumeParameters.IsLocalVolume())
{
globalVolume = volume;
break;
}
}
return globalVolume;
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs.meta


fileFormatVersion: 2
guid: 1c273c50d71d46a4f98a1d23256a8c63
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

13
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs.meta


fileFormatVersion: 2
guid: 8f608e240d5376341bcef2478d231457
timeCreated: 1503411233
licenseType: Pro
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

57
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousFog.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("Rendering/Homogenous Fog", 1100)]
public class HomogeneousFog : MonoBehaviour
{
public VolumeParameters volumeParameters = new VolumeParameters();
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
volumeParameters.Constrain();
}
void OnDrawGizmos()
{
if (volumeParameters != null && !volumeParameters.IsVolumeUnbounded())
{
Gizmos.DrawWireCube(volumeParameters.bounds.center, volumeParameters.bounds.size);
}
}
// Returns NULL if a global fog component does not exist, or is not enabled.
public static HomogeneousFog GetGlobalFogComponent()
{
HomogeneousFog globalFogComponent = null;
HomogeneousFog[] fogComponents = FindObjectsOfType(typeof(HomogeneousFog)) as HomogeneousFog[];
foreach (HomogeneousFog fogComponent in fogComponents)
{
if (fogComponent.enabled && fogComponent.volumeParameters.IsVolumeUnbounded())
{
globalFogComponent = fogComponent;
break;
}
}
return globalFogComponent;
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline
正在加载...
取消
保存