浏览代码

Merge branch 'master' into feature/SSR

# Conflicts:
#	ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
/main
Frédéric Vauchelles 7 年前
当前提交
6a14305e
共有 21 个文件被更改,包括 463 次插入182 次删除
  1. 6
      ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl
  2. 19
      ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs
  3. 3
      ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md
  4. 16
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  5. 3
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs
  6. 24
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  7. 44
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs
  8. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs
  9. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute
  10. 342
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  11. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl
  12. 8
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer.meta
  13. 45
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs
  14. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs.meta
  15. 52
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs
  16. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs.meta
  17. 32
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPCustomBuildProcessor.cs
  18. 11
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPCustomBuildProcessor.cs.meta
  19. 0
      /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPVariantStripper.cs
  20. 0
      /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPVariantStripper.cs.meta

6
ScriptableRenderPipeline/Core/CoreRP/ShaderLibrary/Common.hlsl


// saturate(d) to clamp the output of the function to the [n, f] range.
// z = 1/c * (pow(c * (f - n) + 1, d) - 1) + n
// = 1/c * pow(c * (f - n) + 1, d) + n - 1/c
// = L * pow(M, d) + N
// = 1/c * exp2(d * log2(c * (f - n) + 1)) + (n - 1/c)
// = L * exp2(d * M) + N
// Use abs() to avoid the compiler warning.
return decodingParams.x * pow(abs(decodingParams.y), d) + decodingParams.z;
return decodingParams.x * exp2(d * decodingParams.y) + decodingParams.z;
}
// 'z' is the view-space Z position (linear depth).

19
ScriptableRenderPipeline/Core/CoreRP/Utilities/CoreUtils.cs


}
}
static Texture3D m_BlackVolumeTexture;
public static Texture3D blackVolumeTexture
{
get
{
if (m_BlackVolumeTexture == null)
{
Color[] colors = { Color.black };
m_BlackVolumeTexture = new Texture3D(1, 1, 1, TextureFormat.ARGB32, false);
m_BlackVolumeTexture.SetPixels(colors, 0);
m_BlackVolumeTexture.Apply();
}
return m_BlackVolumeTexture;
}
}
public static void ClearRenderTarget(CommandBuffer cmd, ClearFlag clearFlag, Color clearColor)
{
if (clearFlag != ClearFlag.None)

bool fogEnable = true;
#if UNITY_EDITOR
fogEnable = Application.isPlaying;
if (camera.cameraType == CameraType.SceneView)
{
fogEnable = false;

3
ScriptableRenderPipeline/HDRenderPipeline/CHANGELOG.md


### Improvements
- Configure the volumetric lighting code path to be on by default
- Trigger a build exception when trying to build an unsupported platform
- Introduce the VolumetricLightingController component, which can (and should) be placed on the camera, and allows one to control the near and the far plane of the V-Buffer (volumetric "froxel" buffer) along with the depth distribution (from logarithmic to linear)
### Changed, Removals and deprecations
- Remove Resource folder of PreIntegratedFGD and add the resource to RenderPipeline Asset

- Fix alpha blending of volumetric lighting with transparent objects.
- Fix the near plane of the V-Buffer causing out-of-bounds look-ups in the clustered data structure.
- Depth and color pyramid are properly computed and sampled when the camera renders inside a viewport of a RTHandle.
- Fix decal atlas debug view to work correctly when shadow atlas view is also enabled
## [2018.1.0b13]

16
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


// Warning: different views can use the same camera!
public long GetViewID()
{
if (camera.cameraType == CameraType.Game)
{
long viewID = camera.GetInstanceID();
// Make it positive.
viewID += (-(long)int.MinValue) + 1;
Debug.Assert(viewID > 0);
return viewID;
}
else
{
return 0;
}
long viewID = camera.GetInstanceID();
// Make it positive.
viewID += (-(long)int.MinValue) + 1;
return viewID;
}
public void Reset()

3
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Decal/DecalSystem.cs


{
using (new ProfilingSample(cmd, "Display Decal Atlas", CustomSamplerId.DisplayDebugDecalsAtlas.GetSampler()))
{
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1,1,0,0), new Vector4(width / hdCamera.actualWidth, overlaySize / hdCamera.actualHeight, x / hdCamera.actualWidth, y / hdCamera.actualHeight), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
cmd.SetViewport(new Rect(x, y, overlaySize, overlaySize));
HDUtils.BlitQuad(cmd, Atlas.AtlasTexture, new Vector4(1, 1, 0 ,0), new Vector4(1, 1, 0, 0), (int)debugDisplaySettings.decalsDebugSettings.m_MipLevel, true);
HDUtils.NextOverlayCoord(ref x, ref y, overlaySize, overlaySize, hdCamera.actualWidth);
}
}

24
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


int m_CurrentHeight;
// Use to detect frame changes
int m_FrameCount;
uint m_FrameCount;
float m_LastTime, m_Time;
public int GetCurrentShadowCount() { return m_LightLoop.GetCurrentShadowCount(); }

bool IsSupportedPlatform()
{
// Note: If you add new platform in this function, think about adding support when building the player to in HDRPCustomBuildProcessor.cs
if (!SystemInfo.supportsComputeShaders)
return false;

}
// Warning: (resolutionChanged == false) if you open a new Editor tab of the same size!
m_VolumetricLightingSystem.ResizeVBuffer(hdCamera, hdCamera.actualWidth, hdCamera.actualHeight);
m_VolumetricLightingSystem.ResizeVBufferAndUpdateProperties(hdCamera, m_FrameCount);
// update recorded window resolution
m_CurrentWidth = hdCamera.actualWidth;

m_DbufferManager.PushGlobalParams(cmd, m_FrameSettings);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd, m_FrameCount);
var ssRefraction = VolumeManager.instance.stack.GetComponent<ScreenSpaceRefraction>()
?? ScreenSpaceRefraction.@default;

cmd.SetGlobalTexture(HDShaderIDs._ColorPyramidTexture, Texture2D.blackTexture);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidSize, Vector4.one);
cmd.SetGlobalVector(HDShaderIDs._ColorPyramidScale, Vector4.one);
}
}
}
bool IsConsolePlatform()

// Therefore, outside of the Play Mode we update the time at 60 fps,
// and in the Play Mode we rely on 'Time.frameCount'.
float t = Time.realtimeSinceStartup;
int c = Time.frameCount;
uint c = (uint)Time.frameCount;
bool newFrame;

}
}
{
// Set fog parameters for volumetric lighting.
var visualEnv = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
}
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings, m_FrameCount);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings, m_FrameCount);
RenderDeferredLighting(hdCamera, cmd);

void RenderSky(HDCamera hdCamera, CommandBuffer cmd)
{
// Rendering the sky is the first time in the frame where we need fog parameters so we push them here for the whole frame.
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
m_SkyManager.RenderSky(hdCamera, m_LightLoop.GetCurrentSunLight(), m_CameraColorBuffer, m_CameraDepthStencilBuffer, m_CurrentDebugDisplaySettings, cmd);

44
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDStringConstants.cs


public static readonly int _Source4 = Shader.PropertyToID("_Source4");
public static readonly int _Result1 = Shader.PropertyToID("_Result1");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _CornetteShanksConstant = Shader.PropertyToID("_CornetteShanksConstant");
public static readonly int _VBufferResolution = Shader.PropertyToID("_VBufferResolution");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");
public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity");
public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting");
public static readonly int _VBufferLightingIntegral = Shader.PropertyToID("_VBufferLightingIntegral");
public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
public static readonly int _AtmosphericScatteringType = Shader.PropertyToID("_AtmosphericScatteringType");
public static readonly int _AmbientProbeCoeffs = Shader.PropertyToID("_AmbientProbeCoeffs");
public static readonly int _GlobalExtinction = Shader.PropertyToID("_GlobalExtinction");
public static readonly int _GlobalScattering = Shader.PropertyToID("_GlobalScattering");
public static readonly int _GlobalAsymmetry = Shader.PropertyToID("_GlobalAsymmetry");
public static readonly int _CornetteShanksConstant = Shader.PropertyToID("_CornetteShanksConstant");
public static readonly int _VBufferResolution = Shader.PropertyToID("_VBufferResolution");
public static readonly int _VBufferSliceCount = Shader.PropertyToID("_VBufferSliceCount");
public static readonly int _VBufferDepthEncodingParams = Shader.PropertyToID("_VBufferDepthEncodingParams");
public static readonly int _VBufferDepthDecodingParams = Shader.PropertyToID("_VBufferDepthDecodingParams");
public static readonly int _VBufferPrevResolution = Shader.PropertyToID("_VBufferPrevResolution");
public static readonly int _VBufferPrevSliceCount = Shader.PropertyToID("_VBufferPrevSliceCount");
public static readonly int _VBufferPrevDepthEncodingParams = Shader.PropertyToID("_VBufferPrevDepthEncodingParams");
public static readonly int _VBufferPrevDepthDecodingParams = Shader.PropertyToID("_VBufferPrevDepthDecodingParams");
public static readonly int _VBufferCoordToViewDirWS = Shader.PropertyToID("_VBufferCoordToViewDirWS");
public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity");
public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting");
public static readonly int _VBufferLightingIntegral = Shader.PropertyToID("_VBufferLightingIntegral");
public static readonly int _VBufferLightingHistory = Shader.PropertyToID("_VBufferLightingHistory");
public static readonly int _VBufferLightingFeedback = Shader.PropertyToID("_VBufferLightingFeedback");
public static readonly int _VBufferSampleOffset = Shader.PropertyToID("_VBufferSampleOffset");
public static readonly int _VolumeBounds = Shader.PropertyToID("_VolumeBounds");
public static readonly int _VolumeData = Shader.PropertyToID("_VolumeData");
public static readonly int _NumVisibleDensityVolumes = Shader.PropertyToID("_NumVisibleDensityVolumes");
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/HomogeneousDensityVolume.cs


private void OnEnable()
{
DensityVolumeManager.manager.RegisterVolume(this);
DensityVolumeManager.manager.DeRegisterVolume(this);
}
private void Update()

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.compute


float4 reprojValue = SampleVBuffer(TEXTURE3D_PARAM(_VBufferLightingHistory, s_linear_clamp_sampler),
centerWS,
_PrevViewProjMatrix,
_VBufferResolution,
_VBufferSliceCount.xy,
_VBufferDepthEncodingParams,
_VBufferDepthDecodingParams,
_VBufferPrevResolution,
_VBufferPrevSliceCount.xy,
_VBufferPrevDepthEncodingParams,
_VBufferPrevDepthDecodingParams,
false, false, true);
// Compute the exponential moving average over 'n' frames:

342
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


[Serializable]
public struct DensityVolumeParameters
{
public Color albedo; // Single scattering albedo [0, 1]. Alpha is ignored
public float meanFreePath; // In meters [1, inf]. Should be chromatic - this is an optimization!
public float asymmetry; // Only used if (isLocal == false)
public Color albedo; // Single scattering albedo: [0, 1]. Alpha is ignored
public float meanFreePath; // In meters: [1, 1000000]. Should be chromatic - this is an optimization!
public float asymmetry; // Controls the phase function: [-1, 1]
public void Constrain()
{

Normal,
Ultra,
Count
}
class VBuffer
} // enum VolumetricLightingPreset
[Serializable]
public struct ControllerParameters
public float vBufferNearPlane; // Distance in meters
public float vBufferFarPlane; // Distance in meters
public float depthSliceDistributionUniformity; // Controls the exponential depth distribution: [0, 1]
} // struct ControllerParameters
public class VBuffer
{
public struct Parameters
{
public Vector4 resolution;
public Vector2 sliceCount;
public Vector4 depthEncodingParams;
public Vector4 depthDecodingParams;
public Parameters(int w, int h, int d, ControllerParameters controlParams)
{
resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
sliceCount = new Vector2(d, 1.0f / d);
depthEncodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
depthDecodingParams = Vector4.zero; // C# doesn't allow function calls before all members have been init
Update(controlParams);
}
public void Update(ControllerParameters controlParams)
{
float n = controlParams.vBufferNearPlane;
float f = controlParams.vBufferFarPlane;
float c = 2 - 2 * controlParams.depthSliceDistributionUniformity; // remap [0, 1] -> [2, 0]
depthEncodingParams = ComputeLogarithmicDepthEncodingParams(n, f, c);
depthDecodingParams = ComputeLogarithmicDepthDecodingParams(n, f, c);
}
} // struct Parameters
const int k_NumFrames = 2; // Double-buffer history and feedback
const int k_NumBuffers = 4; // See the list below
long m_ViewID = -1; // -1 is invalid; positive for Game Views, 0 otherwise
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
long m_ViewID = -1; // (m_ViewID > 0) if valid
RenderTexture[] m_Textures = null;
RenderTargetIdentifier[] m_Identifiers = null;
Parameters[] m_Params = null; // For the current and the previous frame
public long GetViewID()
{
return m_ViewID;
}
public bool IsValid()
{
return m_ViewID > 0 && m_Textures != null && m_Textures[0] != null;
}
public Parameters GetParameters(uint frameIndex)
{
return m_Params[frameIndex & 1];
}
public void SetParameters(Parameters parameters, uint frameIndex)
{
m_Params[frameIndex & 1] = parameters;
}
Debug.Assert(m_ViewID >= 0);
Debug.Assert(IsValid());
Debug.Assert(m_ViewID >= 0);
Debug.Assert(IsValid());
public RenderTargetIdentifier GetLightingHistoryBuffer() // From the previous frame
public RenderTargetIdentifier GetLightingHistoryBuffer(uint frameIndex) // From the previous frame
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexHistory + (Time.renderedFrameCount & 1)];
Debug.Assert(IsValid());
return m_Identifiers[k_IndexHistory + (frameIndex & 1)];
public RenderTargetIdentifier GetLightingFeedbackBuffer() // For the next frame
public RenderTargetIdentifier GetLightingFeedbackBuffer(uint frameIndex) // For the next frame
Debug.Assert(m_ViewID > 0); // Game View only
return m_Identifiers[k_IndexFeedback - (Time.renderedFrameCount & 1)];
Debug.Assert(IsValid());
return m_Identifiers[k_IndexFeedback - (frameIndex & 1)];
public void Create(long viewID, int w, int h, int d)
public void Create(long viewID, int w, int h, int d, ControllerParameters controlParams)
Debug.Assert(viewID >= 0);
Debug.Assert(viewID > 0);
// Only Game Views need history and feedback buffers.
bool isGameView = viewID > 0;
int n = isGameView ? 4 : 2;
m_Textures = new RenderTexture[n];
m_Identifiers = new RenderTargetIdentifier[n];
m_Textures = new RenderTexture[k_NumBuffers];
m_Identifiers = new RenderTargetIdentifier[k_NumBuffers];
m_Params = new Parameters[k_NumFrames];
for (int i = 0; i < n; i++)
for (int i = 0; i < k_NumBuffers; i++)
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i] = new RenderTexture(w, h, 0, RenderTextureFormat.ARGBHalf, RenderTextureReadWrite.Linear);
m_Textures[i].hideFlags = HideFlags.HideAndDontSave;
m_Textures[i].filterMode = FilterMode.Trilinear; // Custom
m_Textures[i].dimension = TextureDimension.Tex3D; // TODO: request the thick 3D tiling layout

m_Identifiers[i] = new RenderTargetIdentifier(m_Textures[i]);
}
// Start with the same parameters for both frames. Then incrementally update them.
Parameters parameters = new Parameters(w, h, d, controlParams);
m_Params[0] = parameters;
m_Params[1] = parameters;
}
public void Destroy()

for (int i = 0, n = m_Textures.Length; i < n; i++)
for (int i = 0; i < k_NumBuffers; i++)
{
if (m_Textures[i] != null)
{

m_ViewID = -1;
m_Textures = null;
m_Identifiers = null;
}
public void GetResolution(ref int w, ref int h, ref int d)
{
Debug.Assert(m_Textures != null);
Debug.Assert(m_Textures[0] != null);
Debug.Assert(m_Identifiers != null);
w = m_Textures[0].width;
h = m_Textures[0].height;
d = m_Textures[0].volumeDepth;
m_Params = null;
public long GetViewID()
{
return m_ViewID;
}
public bool IsValid()
{
return m_ViewID >= 0 && m_Textures != null && m_Textures[0] != null;
}
ComputeShader m_VolumeVoxelizationCS = null;
ComputeShader m_VolumetricLightingCS = null;
static ComputeShader m_VolumeVoxelizationCS = null;
static ComputeShader m_VolumetricLightingCS = null;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
List<VBuffer> m_VBuffers = null;
List<OrientedBBox> m_VisibleVolumeBounds = null;
List<DensityVolumeData> m_VisibleVolumeData = null;
public const int k_MaxVisibleVolumeCount = 512;
static ComputeBuffer s_VisibleVolumeBoundsBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
float m_VBufferNearPlane = 0.5f; // Distance in meters; dynamic modifications not handled by reprojection
float m_VBufferFarPlane = 64.0f; // Distance in meters; dynamic modifications not handled by reprojection
const float k_LogScale = 0.5f; // Tweak constant, controls the logarithmic depth distribution
static ComputeBuffer s_VisibleVolumeBoundsBuffer = null;
static ComputeBuffer s_VisibleVolumeDataBuffer = null;
public void Build(HDRenderPipelineAsset asset)
{

CoreUtils.SafeRelease(s_VisibleVolumeDataBuffer);
}
public void ResizeVBuffer(HDCamera camera, int screenWidth, int screenHeight)
public void ResizeVBufferAndUpdateProperties(HDCamera camera, uint frameIndex)
long viewID = camera.GetViewID();
var controller = camera.camera.GetComponent<VolumetricLightingController>();
Debug.Assert(viewID >= 0);
if (camera.camera.cameraType == CameraType.SceneView)
{
// HACK: since it's not possible to add a component to a scene camera,
// we take one from the "main" camera (if present).
Camera mainCamera = Camera.main;
if (mainCamera != null)
{
controller = mainCamera.GetComponent<VolumetricLightingController>();
}
}
if (controller == null) return;
int screenWidth = (int)camera.screenSize.x;
int screenHeight = (int)camera.screenSize.y;
long viewID = camera.GetViewID();
Debug.Assert(viewID > 0);
int w = 0, h = 0, d = 0;
ComputeVBufferResolutionAndScale(preset, screenWidth, screenHeight, ref w, ref h, ref d);

if (vBuffer != null)
{
int width = 0, height = 0, depth = 0;
vBuffer.GetResolution(ref width, ref height, ref depth);
VBuffer.Parameters frameParams = vBuffer.GetParameters(frameIndex);
if (w == width && h == height && d == depth)
if (w == frameParams.resolution.x &&
h == frameParams.resolution.y &&
d == frameParams.sliceCount.x)
// Everything matches, nothing to do here.
// The resolution matches.
// Depth parameters may have changed, so update those.
frameParams.Update(controller.parameters);
vBuffer.SetParameters(frameParams, frameIndex);
return;
}
}

m_VBuffers.Add(vBuffer);
}
vBuffer.Create(viewID, w, h, d);
vBuffer.Create(viewID, w, h, d, controller.parameters);
Debug.Assert(viewID >= 0);
Debug.Assert(viewID > 0);
VBuffer vBuffer = null;

// Since a single voxel corresponds to a tile (e.g. 8x8) of pixels,
// the VBuffer can potentially extend past the boundaries of the viewport.
// The function returns the fraction of the {width, height} of the VBuffer visible on screen.
// Note: for performance reasons, scale is unused (implicitly 1). The error is typically under 1%.
// Note: for performance reasons, the scale is unused (implicitly 1). The error is typically under 1%.
static Vector2 ComputeVBufferResolutionAndScale(VolumetricLightingPreset preset,
int screenWidth, int screenHeight,
ref int w, ref int h, ref int d)

float n = nearPlane;
float f = farPlane;
depthParams.x = Mathf.Log(c, 2) * (1.0f / Mathf.Log(c * (f - n) + 1, 2));
c = Mathf.Max(c, 0.001f); // Avoid NaNs
depthParams.x = Mathf.Log(c, 2) * depthParams.y;
depthParams.z = n - 1.0f / c; // Same
depthParams.w = 0.0f;

float n = nearPlane;
float f = farPlane;
c = Mathf.Max(c, 0.001f); // Avoid NaNs
depthParams.y = c * (f - n) + 1;
depthParams.y = Mathf.Log(c * (f - n) + 1, 2);
depthParams.z = n - 1.0f / c; // Same
depthParams.w = 0.0f;

return (1.0f / (4.0f * Mathf.PI)) * 1.5f * (1.0f - g * g) / (2.0f + g * g);
}
public void PushGlobalParams(HDCamera camera, CommandBuffer cmd)
public void PushGlobalParams(HDCamera camera, CommandBuffer cmd, uint frameIndex)
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return;
if (visualEnvironment.fogType != FogType.Volumetric) return;
// Modify the near plane.
// Warning: it can screw up the reprojection. However, we have to do it in order for clustered lighting to work correctly.
m_VBufferNearPlane = camera.camera.nearClipPlane;
// VisualEnvironment sets global fog parameters: _GlobalAsymmetry, _GlobalScattering, _GlobalExtinction.
Debug.Assert(vBuffer != null);
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
if (vBuffer == null)
{
// Set the neutral black texture.
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, CoreUtils.blackVolumeTexture);
return;
}
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();

cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, new Vector4(w, h, 1.0f / w, 1.0f / h));
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, new Vector4(d, 1.0f / d));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, ComputeLogarithmicDepthEncodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthDecodingParams, ComputeLogarithmicDepthDecodingParams(m_VBufferNearPlane, m_VBufferFarPlane, k_LogScale));
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, vBuffer.GetLightingIntegralBuffer());
var currFrameParams = vBuffer.GetParameters(frameIndex);
var prevFrameParams = vBuffer.GetParameters(frameIndex - 1);
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, currFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, currFrameParams.sliceCount);
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthEncodingParams, currFrameParams.depthEncodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferDepthDecodingParams, currFrameParams.depthDecodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevResolution, prevFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevSliceCount, prevFrameParams.sliceCount);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevDepthEncodingParams, prevFrameParams.depthEncodingParams);
cmd.SetGlobalVector( HDShaderIDs._VBufferPrevDepthDecodingParams, prevFrameParams.depthDecodingParams);
cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, vBuffer.GetLightingIntegralBuffer());
}
public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera camera, CommandBuffer cmd)

if (preset == VolumetricLightingPreset.Off) return densityVolumes;
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
if (visualEnvironment.fogType != FogType.Volumetric) return densityVolumes;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return densityVolumes;
using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{

m_VisibleVolumeData.Clear();
// Collect all visible finite volume data, and upload it to the GPU.
HomogeneousDensityVolume[] volumes = Object.FindObjectsOfType(typeof(HomogeneousDensityVolume)) as HomogeneousDensityVolume[];
HomogeneousDensityVolume[] volumes = DensityVolumeManager.manager.GetAllVolumes();
// Only test active finite volumes.
if (volume.enabled)
{
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// TODO: cache these?
var obb = OrientedBBox.Create(volume.transform);
// Handle camera-relative rendering.
obb.center -= camOffset;
// Handle camera-relative rendering.
obb.center -= camOffset;
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var data = volume.parameters.GetData();
// Frustum cull on the CPU for now. TODO: do it on the GPU.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
{
// TODO: cache these?
var data = volume.parameters.GetData();
m_VisibleVolumeBounds.Add(obb);
m_VisibleVolumeData.Add(data);
}
m_VisibleVolumeBounds.Add(obb);
m_VisibleVolumeData.Add(data);
}
}

// Fill the struct with pointers in order to share the data with the light loop.
densityVolumes.bounds = m_VisibleVolumeBounds;
densityVolumes.bounds = m_VisibleVolumeBounds;
densityVolumes.density = m_VisibleVolumeData;
return densityVolumes;

public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings)
public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return;
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
using (new ProfilingSample(cmd, "Volume Voxelization"))
{

// Use the workaround by running the full shader with 0 density
}
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
var frameParams = vBuffer.GetParameters(frameIndex);
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
// Compose the matrix which allows us to compute the world space view direction.
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
cmd.SetComputeTextureParam(m_VolumeVoxelizationCS, kernel, HDShaderIDs._VBufferDensity, vBuffer.GetDensityBuffer());
cmd.SetComputeBufferParam( m_VolumeVoxelizationCS, kernel, HDShaderIDs._VolumeBounds, s_VisibleVolumeBoundsBuffer);

cmd.SetComputeMatrixParam( m_VolumeVoxelizationCS, HDShaderIDs._VBufferCoordToViewDirWS, transform);
cmd.SetComputeIntParam( m_VolumeVoxelizationCS, HDShaderIDs._NumVisibleDensityVolumes, numVisibleVolumes);
int w = (int)resolution.x;
int h = (int)resolution.y;
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumeVoxelizationCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);

return coords;
}
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings)
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
if (visualEnvironment == null || visualEnvironment.fogType != FogType.Volumetric) return;
if (visualEnvironment.fogType != FogType.Volumetric) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
if (vBuffer == null) return;
VBuffer vBuffer = FindVBuffer(camera.GetViewID());
Debug.Assert(vBuffer != null);
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;

: "VolumetricLightingBruteforce");
}
int w = 0, h = 0, d = 0;
vBuffer.GetResolution(ref w, ref h, ref d);
var frameParams = vBuffer.GetParameters(frameIndex);
Vector4 resolution = frameParams.resolution;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
Vector4 resolution = new Vector4(w, h, 1.0f / w, 1.0f / h);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

// | x | x | x | x | x | x | x |
float[] zSeq = {7.0f/14.0f, 3.0f/14.0f, 11.0f/14.0f, 5.0f/14.0f, 9.0f/14.0f, 1.0f/14.0f, 13.0f/14.0f};
int rfc = Time.renderedFrameCount;
int sampleIndex = rfc % 7;
int sampleIndex = (int)frameIndex % 7;
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], rfc);
Vector4 offset = new Vector4(xySeq[sampleIndex].x, xySeq[sampleIndex].y, zSeq[sampleIndex], frameIndex);
// Get the interpolated asymmetry value.
var fog = VolumeManager.instance.stack.GetComponent<VolumetricFog>();

cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, vBuffer.GetLightingIntegralBuffer()); // Write
if (enableReprojection)
{
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer()); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer()); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, vBuffer.GetLightingFeedbackBuffer(frameIndex)); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, vBuffer.GetLightingHistoryBuffer(frameIndex)); // Read
int w = (int)resolution.x;
int h = (int)resolution.y;
// The shader defines GROUP_SIZE_1D = 8.
cmd.DispatchCompute(m_VolumetricLightingCS, kernel, (w + 7) / 8, (h + 7) / 8, 1);

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/ShaderVariables.hlsl


float4 _VBufferSliceCount; // { count, 1/count, 0, 0 }
float4 _VBufferDepthEncodingParams; // See the call site for description
float4 _VBufferDepthDecodingParams; // See the call site for description
// TODO: these are only used for reprojection.
// Once reprojection is performed in a separate pass, we should probably
// move these to a dedicated CBuffer to avoid polluting the global one.
float4 _VBufferPrevResolution;
float4 _VBufferPrevSliceCount;
float4 _VBufferPrevDepthEncodingParams;
float4 _VBufferPrevDepthDecodingParams;
CBUFFER_END
CBUFFER_START(UnityLightingParameters)

8
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer.meta


fileFormatVersion: 2
guid: fe988c96224a85949ae5a292c81d3d50
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

45
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs


using System.Collections.Generic;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public class DensityVolumeManager
{
static private DensityVolumeManager _instance = null;
private DensityVolumeManager()
{
volumes = new List<HomogeneousDensityVolume>();
}
public static DensityVolumeManager manager
{
get
{
if (_instance == null)
{
_instance = new DensityVolumeManager();
}
return _instance;
}
}
private List<HomogeneousDensityVolume> volumes = null;
public void RegisterVolume(HomogeneousDensityVolume volume)
{
volumes.Add(volume);
}
public void DeRegisterVolume(HomogeneousDensityVolume volume)
{
if (volumes.Contains(volume))
{
volumes.Remove(volume);
}
}
public HomogeneousDensityVolume[] GetAllVolumes()
{
return volumes.ToArray();
}
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/DensityVolumeManager.cs.meta


fileFormatVersion: 2
guid: e6e40fb2a8972a44eb511a71c483fb8b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

52
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs


namespace UnityEngine.Experimental.Rendering.HDPipeline
{
[ExecuteInEditMode]
[AddComponentMenu("Rendering/Volumetric Lighting Controller", 1101)]
public class VolumetricLightingController : MonoBehaviour
{
public VolumetricLightingSystem.ControllerParameters parameters;
public VolumetricLightingController()
{
parameters.vBufferNearPlane = 0.5f;
parameters.vBufferFarPlane = 64.0f;
parameters.depthSliceDistributionUniformity = 0.75f;
}
private void Awake()
{
}
private void OnEnable()
{
}
private void OnDisable()
{
}
private void Update()
{
}
private void OnValidate()
{
var camera = GetComponent<Camera>();
if (camera != null)
{
// We must not allow the V-Buffer to extend past the camera's frustum.
float n = camera.nearClipPlane;
float f = camera.farClipPlane;
parameters.vBufferFarPlane = Mathf.Clamp(parameters.vBufferFarPlane, n, f);
parameters.vBufferNearPlane = Mathf.Clamp(parameters.vBufferNearPlane, n, parameters.vBufferFarPlane);
parameters.depthSliceDistributionUniformity = Mathf.Clamp01(parameters.depthSliceDistributionUniformity);
}
else
{
Debug.Log("Volumetric Lighting Controller must be attached to a camera!");
}
}
}
} // UnityEngine.Experimental.Rendering.HDPipeline

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLightingController.cs.meta


fileFormatVersion: 2
guid: 4910d90e50201484ba448b791a192696
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

32
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPCustomBuildProcessor.cs


using UnityEditor;
using UnityEditor.Build;
using UnityEditor.Build.Reporting;
using UnityEngine;
class HDRPCustomBuildProcessor : IPreprocessBuildWithReport
{
public int callbackOrder { get { return 0; } }
public void OnPreprocessBuild(BuildReport report)
{
// Note: If you add new platform in this function, think about adding support in IsSupportedPlatform() function in HDRenderPipeline.cs
// If platform is supported all good
if (report.summary.platform == BuildTarget.StandaloneWindows ||
report.summary.platform == BuildTarget.StandaloneWindows64 ||
report.summary.platform == BuildTarget.StandaloneLinux64 ||
report.summary.platform == BuildTarget.StandaloneLinuxUniversal ||
report.summary.platform == BuildTarget.StandaloneOSX ||
report.summary.platform == BuildTarget.XboxOne ||
report.summary.platform == BuildTarget.PS4 /* ||
report.summary.platform == BuildTarget.Switch */)
{
return ;
}
string msg = "The platform " + report.summary.platform.ToString() + " is not supported with Hight Definition Render Pipeline";
// Throw an exception to stop the build
throw new BuildFailedException(msg);
}
}

11
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPCustomBuildProcessor.cs.meta


fileFormatVersion: 2
guid: 2ae263b0b434c7e4d9229fde8f6096c5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

/ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDRPVariantStripper.cs → /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPVariantStripper.cs

/ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/HDRPVariantStripper.cs.meta → /ScriptableRenderPipeline/HDRenderPipeline/HDRP/Editor/BuildPlayer/HDRPVariantStripper.cs.meta

正在加载...
取消
保存