浏览代码

Add FrameSettings to hdCamera

/main
sebastienlagarde 7 年前
当前提交
01cd6f98
共有 11 个文件被更改,包括 186 次插入178 次删除
  1. 19
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs
  2. 223
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs
  3. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs
  4. 72
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs
  5. 4
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs
  6. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs
  7. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs
  8. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/ExponentialFog.cs
  9. 6
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/LinearFog.cs
  10. 2
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs
  11. 10
      ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs

19
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Camera/HDCamera.cs


Vector2 m_ViewportScalePreviousFrame;
// Current mssa sample
MSAASamples m_msaaSamples;
FrameSettings m_frameSettings;
public int actualWidth { get { return m_ActualWidth; } }
public int actualHeight { get { return m_ActualHeight; } }

public FrameSettings frameSettings { get { return m_frameSettings; } }
public Matrix4x4 viewProjMatrix
{

// Pass all the systems that may want to update per-camera data here.
// That way you will never update an HDCamera and forget to update the dependent system.
public void Update(PostProcessLayer postProcessLayer, FrameSettings frameSettings, VolumetricLightingSystem vlSys)
public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys)
m_frameSettings = currentFrameSettings;
frameSettings.enablePostprocess;
m_frameSettings.enablePostprocess;
var nonJitteredCameraProj = camera.projectionMatrix;
var cameraProj = taaEnabled

var screenWidth = m_ActualWidth;
var screenHeight = m_ActualHeight;
#if !UNITY_SWITCH
if (frameSettings.enableStereo)
if (m_frameSettings.enableStereo)
{
screenWidth = XRSettings.eyeTextureWidth;
screenHeight = XRSettings.eyeTextureHeight;

// Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);
RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
m_HistoryRTSystem.Swap();
int maxWidth = RTHandles.maxWidth;

}
// Stopgap method used to extract stereo combined matrix state.
public void UpdateStereoDependentState(FrameSettings frameSettings, ref ScriptableCullingParameters cullingParams)
public void UpdateStereoDependentState(ref ScriptableCullingParameters cullingParams)
if (!frameSettings.enableStereo)
if (!m_frameSettings.enableStereo)
return;
// What constants in UnityPerPass need updating for stereo considerations?

223
ScriptableRenderPipeline/HDRenderPipeline/HDRP/HDRenderPipeline.cs


public Material GetBlitMaterial() { return m_Blit; }
FrameSettings m_FrameSettings; // Init every frame
ComputeBuffer m_DebugScreenSpaceTracingData = null;
ScreenSpaceTracingDebug[] m_DebugScreenSpaceTracingDataArray = new ScreenSpaceTracingDebug[1];

if (m_CurrentWidth > 0 && m_CurrentHeight > 0)
m_LightLoop.ReleaseResolutionDependentBuffers();
m_LightLoop.AllocResolutionDependentBuffers((int)hdCamera.screenSize.x, (int)hdCamera.screenSize.y, m_FrameSettings.enableStereo);
m_LightLoop.AllocResolutionDependentBuffers((int)hdCamera.screenSize.x, (int)hdCamera.screenSize.y, hdCamera.frameSettings.enableStereo);
}
// update recorded window resolution

using (new ProfilingSample(cmd, "Push Global Parameters", CustomSamplerId.PushGlobalParameters.GetSampler()))
{
// Set up UnityPerFrame CBuffer.
m_SSSBufferManager.PushGlobalParams(cmd, sssParameters, m_FrameSettings);
m_SSSBufferManager.PushGlobalParams(hdCamera, cmd, sssParameters);
m_DbufferManager.PushGlobalParams(cmd, m_FrameSettings);
m_DbufferManager.PushGlobalParams(hdCamera, cmd);
m_VolumetricLightingSystem.PushGlobalParams(hdCamera, cmd, m_FrameCount);

// Set up UnityPerView CBuffer.
hdCamera.SetupGlobalParams(cmd, m_Time, m_LastTime);
if (m_FrameSettings.enableStereo) hdCamera.SetupGlobalStereoParams(cmd);
if (hdCamera.frameSettings.enableStereo)
hdCamera.SetupGlobalStereoParams(cmd);
var previousDepthPyramidRT = hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid);
if (previousDepthPyramidRT != null)

}
}
public void UpdateShadowSettings()
public void UpdateShadowSettings(HDCamera hdCamera)
{
var shadowSettings = VolumeManager.instance.stack.GetComponent<HDShadowSettings>();

m_ShadowSettings.enabled = m_FrameSettings.enableShadow;
m_ShadowSettings.enabled = hdCamera.frameSettings.enableShadow;
}
public void ConfigureForShadowMask(bool enableBakeShadowMask, CommandBuffer cmd)

{
srcFrameSettings = m_Asset.GetFrameSettings();
}
FrameSettings currentFrameSettings = new FrameSettings();
FrameSettings.InitializeFrameSettings(camera, m_Asset.GetRenderPipelineSettings(), srcFrameSettings, ref m_FrameSettings);
FrameSettings.InitializeFrameSettings(camera, m_Asset.GetRenderPipelineSettings(), srcFrameSettings, ref currentFrameSettings);
// This is the main command buffer used for the frame.
var cmd = CommandBufferPool.Get("");

using (new ProfilingSample(cmd, "HDRenderPipeline::Render", CustomSamplerId.HDRenderPipelineRender.GetSampler()))
{
// Do anything we need to do upon a new frame.
m_LightLoop.NewFrame(m_FrameSettings);
m_LightLoop.NewFrame(currentFrameSettings);
// If we render a reflection view or a preview we should not display any debug information
// This need to be call before ApplyDebugDisplaySettings()

// Disable post process if we enable debug mode or if the post process layer is disabled
if (m_CurrentDebugDisplaySettings.IsDebugDisplayRemovePostprocess() || !CoreUtils.IsPostProcessingActive(postProcessLayer))
{
m_FrameSettings.enablePostprocess = false;
currentFrameSettings.enablePostprocess = false;
}
var hdCamera = HDCamera.Get(camera);

hdCamera = HDCamera.Create(camera, m_VolumetricLightingSystem);
}
hdCamera.Update(postProcessLayer, m_FrameSettings, m_VolumetricLightingSystem);
// From this point, we should only use frame settings from the camera
hdCamera.Update(currentFrameSettings, postProcessLayer, m_VolumetricLightingSystem);
UpdateShadowSettings();
UpdateShadowSettings(hdCamera);
if (!CullResults.GetCullingParameters(camera, m_FrameSettings.enableStereo, out cullingParams))
if (!CullResults.GetCullingParameters(camera, hdCamera.frameSettings.enableStereo, out cullingParams))
{
renderContext.Submit();
continue;

hdCamera.UpdateStereoDependentState(m_FrameSettings, ref cullingParams);
hdCamera.UpdateStereoDependentState(ref cullingParams);
#if UNITY_EDITOR
// emit scene view UI

}
#endif
if (m_FrameSettings.enableDBuffer)
if (hdCamera.frameSettings.enableDBuffer)
{
// decal system needs to be updated with current camera, it needs it to set up culling and light list generation parameters
DecalSystem.instance.CurrentCamera = camera;

m_DbufferManager.vsibleDecalCount = 0;
using (new ProfilingSample(cmd, "DBufferPrepareDrawData", CustomSamplerId.DBufferPrepareDrawData.GetSampler()))
{
if (m_FrameSettings.enableDBuffer)
if (hdCamera.frameSettings.enableDBuffer)
{
DecalSystem.instance.EndCull();
m_DbufferManager.vsibleDecalCount = DecalSystem.m_DecalsVisibleThisFrame;

}
}
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);
renderContext.SetupCameraProperties(camera, hdCamera.frameSettings.enableStereo);
PushGlobalParams(hdCamera, cmd, diffusionProfileSettings);

bool enableBakeShadowMask;
using (new ProfilingSample(cmd, "TP_PrepareLightsForGPU", CustomSamplerId.TPPrepareLightsForGPU.GetSampler()))
{
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, camera, m_ShadowSettings, m_CullResults, m_ReflectionProbeCullResults, densityVolumes) && m_FrameSettings.enableShadowMask;
enableBakeShadowMask = m_LightLoop.PrepareLightsForGPU(cmd, camera, m_ShadowSettings, m_CullResults, m_ReflectionProbeCullResults, densityVolumes) && hdCamera.frameSettings.enableShadowMask;
StartStereoRendering(renderContext, hdCamera.camera);
StartStereoRendering(renderContext, hdCamera);
ClearBuffers(hdCamera, cmd);

RenderDepthPyramid(hdCamera, cmd, renderContext, FullScreenDebugMode.DepthPyramid);
StopStereoRendering(renderContext, hdCamera.camera);
StopStereoRendering(renderContext, hdCamera);
if (m_CurrentDebugDisplaySettings.IsDebugMaterialDisplayEnabled())
{

}
else
{
StartStereoRendering(renderContext, hdCamera.camera);
StartStereoRendering(renderContext, hdCamera);
using (new ProfilingSample(cmd, "Render SSAO", CustomSamplerId.RenderSSAO.GetSampler()))
{

}
}
StopStereoRendering(renderContext, hdCamera.camera);
StopStereoRendering(renderContext, hdCamera);
if (m_FrameSettings.enableAsyncCompute)
if (hdCamera.frameSettings.enableAsyncCompute)
{
GPUFence startFence = cmd.CreateGPUFence();
renderContext.ExecuteCommandBuffer(cmd);

m_LightLoop.RenderShadows(renderContext, cmd, m_CullResults);
// Overwrite camera properties set during the shadow pass with the original camera properties.
renderContext.SetupCameraProperties(camera, m_FrameSettings.enableStereo);
renderContext.SetupCameraProperties(camera, hdCamera.frameSettings.enableStereo);
if (m_FrameSettings.enableStereo) hdCamera.SetupGlobalStereoParams(cmd);
if (hdCamera.frameSettings.enableStereo)
hdCamera.SetupGlobalStereoParams(cmd);
}
using (new ProfilingSample(cmd, "Deferred directional shadows", CustomSamplerId.RenderDeferredDirectionalShadow.GetSampler()))

}
m_LightLoop.RenderDeferredDirectionalShadow(hdCamera, m_DeferredShadowBuffer, GetDepthTexture(), cmd);
PushFullScreenDebugTexture(cmd, m_DeferredShadowBuffer, hdCamera, FullScreenDebugMode.DeferredShadows);
PushFullScreenDebugTexture(hdCamera, cmd, m_DeferredShadowBuffer, FullScreenDebugMode.DeferredShadows);
if (m_FrameSettings.enableAsyncCompute)
if (hdCamera.frameSettings.enableAsyncCompute)
{
m_LightLoop.BuildGPULightListAsyncEnd(hdCamera, cmd, buildGPULightListsCompleteFence);
}

{
// Set fog parameters for volumetric lighting.
var visualEnv = VolumeManager.instance.stack.GetComponent<VisualEnvironment>();
visualEnv.PushFogShaderParameters(cmd, m_FrameSettings);
visualEnv.PushFogShaderParameters(hdCamera, cmd);
m_VolumetricLightingSystem.VolumeVoxelizationPass(densityVolumes, hdCamera, cmd, m_FrameSettings, m_FrameCount);
m_VolumetricLightingSystem.VolumeVoxelizationPass(hdCamera, cmd, m_FrameCount, densityVolumes);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameSettings, m_FrameCount);
m_VolumetricLightingSystem.VolumetricLightingPass(hdCamera, cmd, m_FrameCount);
StartStereoRendering(renderContext, hdCamera.camera);
StartStereoRendering(renderContext, hdCamera);
m_SSSBufferManager.SubsurfaceScatteringPass(hdCamera, cmd, diffusionProfileSettings, m_FrameSettings,
m_SSSBufferManager.SubsurfaceScatteringPass(hdCamera, cmd, diffusionProfileSettings,
m_CameraColorBuffer, m_CameraSssDiffuseLightingBuffer, m_CameraDepthStencilBuffer, GetDepthTexture());
RenderSky(hdCamera, cmd);

RenderColorPyramid(hdCamera, cmd, renderContext, false);
AccumulateDistortion(m_CullResults, hdCamera, renderContext, cmd);
RenderDistortion(cmd, m_Asset.renderPipelineResources, hdCamera);
RenderDistortion(hdCamera, cmd, m_Asset.renderPipelineResources);
StopStereoRendering(renderContext, hdCamera.camera);
StopStereoRendering(renderContext, hdCamera);
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, hdCamera, FullScreenDebugMode.NanTracker);
PushColorPickerDebugTexture(cmd, m_CameraColorBuffer, hdCamera);
PushFullScreenDebugTexture(hdCamera, cmd, m_CameraColorBuffer, FullScreenDebugMode.NanTracker);
PushColorPickerDebugTexture(hdCamera, cmd, m_CameraColorBuffer);
StartStereoRendering(renderContext, hdCamera.camera);
StartStereoRendering(renderContext, hdCamera);
if (m_FrameSettings.enablePostprocess)
if (hdCamera.frameSettings.enablePostprocess)
{
RenderPostProcess(hdCamera, cmd, postProcessLayer);
}

}
}
StopStereoRendering(renderContext, hdCamera.camera);
StopStereoRendering(renderContext, hdCamera);
if (m_FrameSettings.enableStereo)
if (hdCamera.frameSettings.enableStereo)
renderContext.StereoEndRender(hdCamera.camera);
}

}
}
#endif
PushFullScreenDebugTexture(cmd, m_CameraColorBuffer, hdCamera, FullScreenDebugMode.ScreenSpaceTracing);
PushFullScreenDebugTexture(hdCamera, cmd, m_CameraColorBuffer, FullScreenDebugMode.ScreenSpaceTracing);
// Caution: RenderDebug need to take into account that we have flip the screen (so anything capture before the flip will be flipped)
RenderDebug(hdCamera, cmd);

}
void RenderOpaqueRenderList(CullResults cull,
Camera camera,
HDCamera hdCamera,
ScriptableRenderContext renderContext,
CommandBuffer cmd,
ShaderPassName passName,

Material overrideMaterial = null)
{
m_SinglePassName[0] = passName;
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_SinglePassName, rendererConfiguration, inRenderQueueRange, stateBlock, overrideMaterial);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_SinglePassName, rendererConfiguration, inRenderQueueRange, stateBlock, overrideMaterial);
Camera camera,
HDCamera hdCamera,
ScriptableRenderContext renderContext,
CommandBuffer cmd,
ShaderPassName[] passNames,

Material overrideMaterial = null)
{
if (!m_FrameSettings.enableOpaqueObjects)
if (!hdCamera.frameSettings.enableOpaqueObjects)
return;
// This is done here because DrawRenderers API lives outside command buffers so we need to make call this before doing any DrawRenders

var drawSettings = new DrawRendererSettings(camera, HDShaderPassNames.s_EmptyName)
var drawSettings = new DrawRendererSettings(hdCamera.camera, HDShaderPassNames.s_EmptyName)
{
rendererConfiguration = rendererConfiguration,
sorting = { flags = SortFlags.CommonOpaque }

}
void RenderTransparentRenderList(CullResults cull,
Camera camera,
HDCamera hdCamera,
ScriptableRenderContext renderContext,
CommandBuffer cmd,
ShaderPassName passName,

Material overrideMaterial = null)
{
m_SinglePassName[0] = passName;
RenderTransparentRenderList(cull, camera, renderContext, cmd, m_SinglePassName,
RenderTransparentRenderList(cull, hdCamera, renderContext, cmd, m_SinglePassName,
Camera camera,
HDCamera hdCamera,
ScriptableRenderContext renderContext,
CommandBuffer cmd,
ShaderPassName[] passNames,

Material overrideMaterial = null
)
{
if (!m_FrameSettings.enableTransparentObjects)
if (!hdCamera.frameSettings.enableTransparentObjects)
return;
// This is done here because DrawRenderers API lives outside command buffers so we need to make call this before doing any DrawRenders

var drawSettings = new DrawRendererSettings(camera, HDShaderPassNames.s_EmptyName)
var drawSettings = new DrawRendererSettings(hdCamera.camera, HDShaderPassNames.s_EmptyName)
{
rendererConfiguration = rendererConfiguration,
sorting = { flags = SortFlags.CommonTransparent }

void AccumulateDistortion(CullResults cullResults, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
if (!m_FrameSettings.enableDistortion)
if (!hdCamera.frameSettings.enableDistortion)
return;
using (new ProfilingSample(cmd, "Distortion", CustomSamplerId.Distortion.GetSampler()))

// Only transparent object can render distortion vectors
RenderTransparentRenderList(cullResults, hdCamera.camera, renderContext, cmd, HDShaderPassNames.s_DistortionVectorsName);
RenderTransparentRenderList(cullResults, hdCamera, renderContext, cmd, HDShaderPassNames.s_DistortionVectorsName);
void RenderDistortion(CommandBuffer cmd, RenderPipelineResources resources, HDCamera hdCamera)
void RenderDistortion(HDCamera hdCamera, CommandBuffer cmd, RenderPipelineResources resources)
if (!m_FrameSettings.enableDistortion)
if (!hdCamera.frameSettings.enableDistortion)
return;
using (new ProfilingSample(cmd, "ApplyDistortion", CustomSamplerId.ApplyDistortion.GetSampler()))

// by using the pass "ForwardOnly". In this case the .shader should not have "Forward" but only a "ForwardOnly" pass.
// It must also have a "DepthForwardOnly" and no "DepthOnly" pass as forward material (either deferred or forward only rendering) have always a depth pass.
// If a forward material have no depth prepass, then lighting can be incorrect (deferred sahdowing, SSAO), this may be acceptable depends on usage
bool addFullDepthPrepass = forcePrepass || m_FrameSettings.enableForwardRenderingOnly || m_FrameSettings.enableDepthPrepassWithDeferredRendering;
bool addFullDepthPrepass = forcePrepass || hdCamera.frameSettings.enableForwardRenderingOnly || hdCamera.frameSettings.enableDepthPrepassWithDeferredRendering;
var camera = hdCamera.camera;

{
// We render first the opaque object as opaque alpha tested are more costly to render and could be reject by early-z (but not Hi-z as it is disable with clip instruction)
// This is handled automatically with the RenderQueue value (OpaqueAlphaTested have a different value and thus are sorted after Opaque)
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyAndDepthForwardOnlyPassNames, 0, HDRenderQueue.k_RenderQueue_AllOpaque);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_DepthOnlyAndDepthForwardOnlyPassNames, 0, HDRenderQueue.k_RenderQueue_AllOpaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthForwardOnlyPassNames, 0, HDRenderQueue.k_RenderQueue_AllOpaque);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_DepthForwardOnlyPassNames, 0, HDRenderQueue.k_RenderQueue_AllOpaque);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, m_DepthOnlyPassNames, 0, renderQueueRange);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_DepthOnlyPassNames, 0, renderQueueRange);
if (m_FrameSettings.enableTransparentPrepass)
if (hdCamera.frameSettings.enableTransparentPrepass)
RenderTransparentRenderList(cull, camera, renderContext, cmd, m_TransparentDepthPrepassNames);
RenderTransparentRenderList(cull, hdCamera, renderContext, cmd, m_TransparentDepthPrepassNames);
}
}
}

void RenderGBuffer(CullResults cull, HDCamera hdCamera, bool enableShadowMask, ScriptableRenderContext renderContext, CommandBuffer cmd)
{
if (m_FrameSettings.enableForwardRenderingOnly)
if (hdCamera.frameSettings.enableForwardRenderingOnly)
return;
var camera = hdCamera.camera;

// setup GBuffer for rendering
HDUtils.SetRenderTarget(cmd, hdCamera, m_GbufferManager.GetBuffersRTI(enableShadowMask), m_CameraDepthStencilBuffer);
RenderOpaqueRenderList(cull, camera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, HDRenderQueue.k_RenderQueue_AllOpaque);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, HDShaderPassNames.s_GBufferName, m_currentRendererConfigurationBakedLighting, HDRenderQueue.k_RenderQueue_AllOpaque);
void RenderDBuffer(HDCamera camera, CommandBuffer cmd)
void RenderDBuffer(HDCamera hdCamera, CommandBuffer cmd)
if (!m_FrameSettings.enableDBuffer)
if (!hdCamera.frameSettings.enableDBuffer)
return;
using (new ProfilingSample(cmd, "DBufferRender", CustomSamplerId.DBufferRender.GetSampler()))

// Depth texture is now ready, bind it.
cmd.SetGlobalTexture(HDShaderIDs._CameraDepthTexture, GetDepthTexture());
m_DbufferManager.ClearTargets(cmd, camera);
HDUtils.SetRenderTarget(cmd, camera, m_DbufferManager.GetBuffersRTI(), m_CameraDepthStencilBuffer); // do not clear anymore
m_DbufferManager.ClearTargets(cmd, hdCamera);
HDUtils.SetRenderTarget(cmd, hdCamera, m_DbufferManager.GetBuffersRTI(), m_CameraDepthStencilBuffer); // do not clear anymore
m_DbufferManager.SetHTile(m_DbufferManager.bufferCount, cmd);
DecalSystem.instance.RenderIntoDBuffer(cmd);
m_DbufferManager.UnSetHTile(cmd);

{
using (new ProfilingSample(cmd, "DisplayDebug ViewMaterial", CustomSamplerId.DisplayDebugViewMaterial.GetSampler()))
{
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !m_FrameSettings.enableForwardRenderingOnly)
if (m_CurrentDebugDisplaySettings.materialDebugSettings.IsDebugGBufferEnabled() && !hdCamera.frameSettings.enableForwardRenderingOnly)
{
using (new ProfilingSample(cmd, "DebugViewMaterialGBuffer", CustomSamplerId.DebugViewMaterialGBuffer.GetSampler()))
{

HDUtils.SetRenderTarget(cmd, hdCamera, m_CameraColorBuffer, m_CameraDepthStencilBuffer, ClearFlag.All, CoreUtils.clearColorAllBlack);
// Render Opaque forward
RenderOpaqueRenderList(cull, hdCamera.camera, renderContext, cmd, m_AllForwardOpaquePassNames, m_currentRendererConfigurationBakedLighting, stateBlock: m_DepthStateOpaque);
RenderOpaqueRenderList(cull, hdCamera, renderContext, cmd, m_AllForwardOpaquePassNames, m_currentRendererConfigurationBakedLighting, stateBlock: m_DepthStateOpaque);
RenderTransparentRenderList(cull, hdCamera.camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, stateBlock: m_DepthStateOpaque);
RenderTransparentRenderList(cull, hdCamera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, stateBlock: m_DepthStateOpaque);
}
}

var camera = hdCamera.camera;
// Apply SSAO from PostProcessLayer
if (m_FrameSettings.enableSSAO && postProcessLayer != null && postProcessLayer.enabled)
if (hdCamera.frameSettings.enableSSAO && postProcessLayer != null && postProcessLayer.enabled)
{
var settings = postProcessLayer.GetSettings<AmbientOcclusion>();

cmd.SetGlobalTexture(HDShaderIDs._AmbientOcclusionTexture, m_AmbientOcclusionBuffer);
cmd.SetGlobalVector(HDShaderIDs._AmbientOcclusionParam, new Vector4(settings.color.value.r, settings.color.value.g, settings.color.value.b, settings.directLightingStrength.value));
PushFullScreenDebugTexture(cmd, m_AmbientOcclusionBuffer, hdCamera, FullScreenDebugMode.SSAO);
PushFullScreenDebugTexture(hdCamera, cmd, m_AmbientOcclusionBuffer, FullScreenDebugMode.SSAO);
return;
}
}

void RenderDeferredLighting(HDCamera hdCamera, CommandBuffer cmd)
{
if (m_FrameSettings.enableForwardRenderingOnly)
if (hdCamera.frameSettings.enableForwardRenderingOnly)
return;
m_MRTCache2[0] = m_CameraColorBuffer;

var options = new LightLoop.LightingPassOptions();
if (m_FrameSettings.enableSubsurfaceScattering)
if (hdCamera.frameSettings.enableSubsurfaceScattering)
{
// Output split lighting for materials asking for it (masked in the stencil buffer)
options.outputSplitLighting = true;

if (pass == ForwardPass.Opaque)
{
// In case of forward SSS we will bind all the required target. It is up to the shader to write into it or not.
if (m_FrameSettings.enableSubsurfaceScattering)
if (hdCamera.frameSettings.enableSubsurfaceScattering)
{
RenderTargetIdentifier[] m_MRTWithSSS =
new RenderTargetIdentifier[2 + m_SSSBufferManager.sssBufferCount];

HDShaderPassNames.s_ForwardOnlyName;
m_ForwardAndForwardOnlyPassNames[1] = HDShaderPassNames.s_ForwardName;
var passNames = m_FrameSettings.enableForwardRenderingOnly
var passNames = hdCamera.frameSettings.enableForwardRenderingOnly
? m_ForwardAndForwardOnlyPassNames
: m_ForwardOnlyPassNames;
var debugSSTThisPass = debugScreenSpaceTracing && (m_CurrentDebugDisplaySettings.lightingDebugSettings.debugLightingMode == DebugLightingMode.ScreenSpaceTracingReflection);

cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
}
RenderOpaqueRenderList(cullResults, camera, renderContext, cmd, passNames, m_currentRendererConfigurationBakedLighting);
RenderOpaqueRenderList(cullResults, hdCamera, renderContext, cmd, passNames, m_currentRendererConfigurationBakedLighting);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
}

if ((m_FrameSettings.enableDBuffer) && (DecalSystem.m_DecalsVisibleThisFrame > 0)) // enable d-buffer flag value is being interpreted more like enable decals in general now that we have clustered
if ((hdCamera.frameSettings.enableDBuffer) && (DecalSystem.m_DecalsVisibleThisFrame > 0)) // enable d-buffer flag value is being interpreted more like enable decals in general now that we have clustered
{
DecalSystem.instance.SetAtlas(cmd); // for clustered decals
}

cmd.SetGlobalBuffer(HDShaderIDs._DebugScreenSpaceTracingData, m_DebugScreenSpaceTracingData);
cmd.SetRandomWriteTarget(7, m_DebugScreenSpaceTracingData);
}
RenderTransparentRenderList(cullResults, camera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
RenderTransparentRenderList(cullResults, hdCamera, renderContext, cmd, m_AllTransparentPassNames, m_currentRendererConfigurationBakedLighting, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent);
if (debugSSTThisPass)
cmd.ClearRandomWriteTargets();
}

if (pass == ForwardPass.Opaque)
{
RenderOpaqueRenderList(cullResults, hdCamera.camera, renderContext, cmd, m_ForwardErrorPassNames, 0, null, null, m_ErrorMaterial);
RenderOpaqueRenderList(cullResults, hdCamera, renderContext, cmd, m_ForwardErrorPassNames, 0, null, null, m_ErrorMaterial);
RenderTransparentRenderList(cullResults, hdCamera.camera, renderContext, cmd, m_ForwardErrorPassNames, 0, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent, null, m_ErrorMaterial);
RenderTransparentRenderList(cullResults, hdCamera, renderContext, cmd, m_ForwardErrorPassNames, 0, pass == ForwardPass.PreRefraction ? HDRenderQueue.k_RenderQueue_PreRefraction : HDRenderQueue.k_RenderQueue_Transparent, null, m_ErrorMaterial);
}
}
}

if (!m_FrameSettings.enableTransparentPostpass)
if (!hdCamera.frameSettings.enableTransparentPostpass)
RenderTransparentRenderList(cullResults, hdCamera.camera, renderContext, cmd, m_TransparentDepthPostpassNames);
RenderTransparentRenderList(cullResults, hdCamera, renderContext, cmd, m_TransparentDepthPostpassNames);
void RenderObjectsVelocity(CullResults cullResults, HDCamera hdcamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
void RenderObjectsVelocity(CullResults cullResults, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
if (!m_FrameSettings.enableMotionVectors || !m_FrameSettings.enableObjectMotionVectors)
if (!hdCamera.frameSettings.enableMotionVectors || !hdCamera.frameSettings.enableObjectMotionVectors)
return;
using (new ProfilingSample(cmd, "Objects Velocity", CustomSamplerId.ObjectsVelocity.GetSampler()))

hdcamera.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
hdCamera.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
HDUtils.SetRenderTarget(cmd, hdcamera, m_VelocityBuffer, m_CameraDepthStencilBuffer);
RenderOpaqueRenderList(cullResults, hdcamera.camera, renderContext, cmd, HDShaderPassNames.s_MotionVectorsName, RendererConfiguration.PerObjectMotionVectors);
HDUtils.SetRenderTarget(cmd, hdCamera, m_VelocityBuffer, m_CameraDepthStencilBuffer);
RenderOpaqueRenderList(cullResults, hdCamera, renderContext, cmd, HDShaderPassNames.s_MotionVectorsName, RendererConfiguration.PerObjectMotionVectors);
void RenderCameraVelocity(CullResults cullResults, HDCamera hdcamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
void RenderCameraVelocity(CullResults cullResults, HDCamera hdCamera, ScriptableRenderContext renderContext, CommandBuffer cmd)
if (!m_FrameSettings.enableMotionVectors)
if (!hdCamera.frameSettings.enableMotionVectors)
return;
using (new ProfilingSample(cmd, "Camera Velocity", CustomSamplerId.CameraVelocity.GetSampler()))

hdcamera.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
hdCamera.camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;
HDUtils.DrawFullScreen(cmd, hdcamera, m_CameraMotionVectorsMaterial, m_VelocityBuffer, m_CameraDepthStencilBuffer, null, 0);
PushFullScreenDebugTexture(cmd, m_VelocityBuffer, hdcamera, FullScreenDebugMode.MotionVectors);
HDUtils.DrawFullScreen(cmd, hdCamera, m_CameraMotionVectorsMaterial, m_VelocityBuffer, m_CameraDepthStencilBuffer, null, 0);
PushFullScreenDebugTexture(hdCamera, cmd, m_VelocityBuffer, FullScreenDebugMode.MotionVectors);
}
}

{
if (!m_FrameSettings.enableRoughRefraction)
if (!hdCamera.frameSettings.enableRoughRefraction)
if (!m_FrameSettings.enableDistortion && !m_FrameSettings.enablePostprocess && !m_FrameSettings.enableSSR)
if (!hdCamera.frameSettings.enableDistortion && !hdCamera.frameSettings.enablePostprocess && !hdCamera.frameSettings.enableSSR)
return;
}

m_BufferPyramid.RenderColorPyramid(hdCamera, cmd, renderContext, m_CameraColorBuffer, cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
PushFullScreenDebugTextureMip(hdCamera, cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), isPreRefraction ? FullScreenDebugMode.PreRefractionColorPyramid : FullScreenDebugMode.FinalColorPyramid);
if (!m_FrameSettings.enableRoughRefraction)
if (!hdCamera.frameSettings.enableRoughRefraction)
return;
var cameraRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthPyramid)

m_BufferPyramid.RenderDepthPyramid(hdCamera, cmd, renderContext, GetDepthTexture(), cameraRT);
Vector2 pyramidScale = m_BufferPyramid.GetPyramidToScreenScale(hdCamera, cameraRT);
PushFullScreenDebugTextureMip(cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), hdCamera, debugMode);
PushFullScreenDebugTextureMip(hdCamera, cmd, cameraRT, m_BufferPyramid.GetPyramidLodCount(new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight)), new Vector4(pyramidScale.x, pyramidScale.y, 0.0f, 0.0f), debugMode);
}
void RenderPostProcess(HDCamera hdcamera, CommandBuffer cmd, PostProcessLayer layer)

}
// TODO TEMP: Not sure I want to keep this special case. Gotta see how to get rid of it (not sure it will work correctly for non-full viewports.
public void PushColorPickerDebugTexture(CommandBuffer cmd, RenderTargetIdentifier textureID, HDCamera hdCamera)
public void PushColorPickerDebugTexture(HDCamera hdCamera, CommandBuffer cmd, RenderTargetIdentifier textureID)
{
if (m_CurrentDebugDisplaySettings.colorPickerDebugSettings.colorPickerMode != ColorPickerDebugMode.None)
{

}
}
public void PushFullScreenDebugTexture(CommandBuffer cmd, RTHandleSystem.RTHandle textureID, HDCamera hdCamera, FullScreenDebugMode debugMode)
public void PushFullScreenDebugTexture(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle textureID, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

}
void PushFullScreenDebugTextureMip(CommandBuffer cmd, RTHandleSystem.RTHandle texture, int lodCount, Vector4 scaleBias, HDCamera hdCamera, FullScreenDebugMode debugMode)
void PushFullScreenDebugTextureMip(HDCamera hdCamera, CommandBuffer cmd, RTHandleSystem.RTHandle texture, int lodCount, Vector4 scaleBias, FullScreenDebugMode debugMode)
{
if (debugMode == m_CurrentDebugDisplaySettings.fullScreenDebugMode)
{

m_DebugFullScreen.SetTexture(HDShaderIDs._DepthPyramidTexture, hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthPyramid));
HDUtils.DrawFullScreen(cmd, hdCamera, m_DebugFullScreen, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
PushColorPickerDebugTexture(cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget, hdCamera);
PushColorPickerDebugTexture(hdCamera, cmd, (RenderTargetIdentifier)BuiltinRenderTextureType.CameraTarget);
}
// Then overlays

// TODO: As we are in development and have not all the setup pass we still clear the color in emissive buffer and gbuffer, but this will be removed later.
// Clear GBuffers
if (!m_FrameSettings.enableForwardRenderingOnly)
if (!hdCamera.frameSettings.enableForwardRenderingOnly)
{
using (new ProfilingSample(cmd, "Clear GBuffer", CustomSamplerId.ClearGBuffer.GetSampler()))
{

}
}
void StartStereoRendering(ScriptableRenderContext renderContext, Camera cam)
void StartStereoRendering(ScriptableRenderContext renderContext, HDCamera hdCamera)
if (m_FrameSettings.enableStereo)
renderContext.StartMultiEye(cam);
if (hdCamera.frameSettings.enableStereo)
renderContext.StartMultiEye(hdCamera.camera);
void StopStereoRendering(ScriptableRenderContext renderContext, Camera cam)
void StopStereoRendering(ScriptableRenderContext renderContext, HDCamera hdCamera)
if (m_FrameSettings.enableStereo)
renderContext.StopMultiEye(cam);
if (hdCamera.frameSettings.enableStereo)
renderContext.StopMultiEye(hdCamera.camera);
}
}
}

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Reflection/ReflectionSystemInternal.cs


hdCamera = HDCamera.Create(renderCamera, null);
}
hdCamera.Update(null, probe.frameSettings, null);
hdCamera.Update(probe.frameSettings, null, null);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
{

hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(null, probe.frameSettings, null);
hdCamera.Update(probe.frameSettings, null, null);
if (!IsRealtimeTextureValid(probe.realtimeTexture, hdCamera))
{

hdCamera = HDCamera.Create(camera, null);
}
hdCamera.Update(null, probe.frameSettings, null);
hdCamera.Update(probe.frameSettings, null, null);
return hdCamera;
}

72
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Lighting/Volumetrics/VolumetricLighting.cs


}
// For the initial allocation, no suballocation happens (the texture is full size).
VBufferParameters ComputeVBufferParameters(HDCamera camera, bool isInitialAllocation)
VBufferParameters ComputeVBufferParameters(HDCamera hdCamera, bool isInitialAllocation)
Vector3Int viewportResolution = ComputeVBufferResolution(preset, camera.camera.pixelWidth, camera.camera.pixelHeight);
Vector3Int viewportResolution = ComputeVBufferResolution(preset, hdCamera.camera.pixelWidth, hdCamera.camera.pixelHeight);
Vector3Int bufferResolution; // Could be higher due to sub-allocation (resource aliasing) in the RTHandle system
if (isInitialAllocation)

var controller = VolumeManager.instance.stack.GetComponent<VolumetricLightingController>();
// We must not allow the V-Buffer to extend outside of the camera's frustum.
float n = camera.camera.nearClipPlane;
float f = camera.camera.farClipPlane;
float n = hdCamera.camera.nearClipPlane;
float f = hdCamera.camera.farClipPlane;
Vector2 vBufferDepthRange = controller.depthRange.value;
vBufferDepthRange.y = Mathf.Clamp(vBufferDepthRange.y, n, f); // far

return new VBufferParameters(viewportResolution, bufferResolution, vBufferDepthRange, vBufferDepthDistributionUniformity);
}
public void InitializePerCameraData(HDCamera camera)
public void InitializePerCameraData(HDCamera hdCamera)
var parameters = ComputeVBufferParameters(camera, true);
camera.vBufferParams = new VBufferParameters[2];
camera.vBufferParams[0] = parameters;
camera.vBufferParams[1] = parameters;
var parameters = ComputeVBufferParameters(hdCamera, true);
hdCamera.vBufferParams = new VBufferParameters[2];
hdCamera.vBufferParams[0] = parameters;
hdCamera.vBufferParams[1] = parameters;
if (camera.camera.cameraType == CameraType.Game ||
camera.camera.cameraType == CameraType.SceneView)
if (hdCamera.camera.cameraType == CameraType.Game ||
hdCamera.camera.cameraType == CameraType.SceneView)
// We don't need reprojection for other view types, such as reflection and preview.
camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting, HistoryBufferAllocatorFunction);
// We don't need reprojection for other view types, such as reflection and preview.
hdCamera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting, HistoryBufferAllocatorFunction);
public void UpdatePerCameraData(HDCamera camera)
public void UpdatePerCameraData(HDCamera hdCamera)
var parameters = ComputeVBufferParameters(camera, false);
var parameters = ComputeVBufferParameters(hdCamera, false);
camera.vBufferParams[1] = camera.vBufferParams[0];
camera.vBufferParams[0] = parameters;
hdCamera.vBufferParams[1] = hdCamera.vBufferParams[0];
hdCamera.vBufferParams[0] = parameters;
// Note: resizing of history buffer is automatic (handled by the BufferedRTHandleSystem).
}

return (1.0f / (4.0f * Mathf.PI)) * 1.5f * (1.0f - g * g) / (2.0f + g * g);
}
public void PushGlobalParams(HDCamera camera, CommandBuffer cmd, uint frameIndex)
public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
{
if (preset == VolumetricLightingPreset.Off) return;

SetPreconvolvedAmbientLightProbe(cmd, fog.anisotropy);
var currFrameParams = camera.vBufferParams[0];
var prevFrameParams = camera.vBufferParams[1];
var currFrameParams = hdCamera.vBufferParams[0];
var prevFrameParams = hdCamera.vBufferParams[1];
cmd.SetGlobalVector( HDShaderIDs._VBufferResolution, currFrameParams.resolution);
cmd.SetGlobalVector( HDShaderIDs._VBufferSliceCount, currFrameParams.sliceCount);

cmd.SetGlobalTexture(HDShaderIDs._VBufferLighting, m_LightingBufferHandle);
}
public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera camera, CommandBuffer cmd)
public DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuffer cmd)
{
DensityVolumeList densityVolumes = new DensityVolumeList();

using (new ProfilingSample(cmd, "Prepare Visible Density Volume List"))
{
Vector3 camPosition = camera.camera.transform.position;
Vector3 camPosition = hdCamera.camera.transform.position;
Vector3 camOffset = Vector3.zero; // World-origin-relative
if (ShaderConfig.s_CameraRelativeRendering != 0)

// Frustum cull on the CPU for now. TODO: do it on the GPU.
// TODO: account for custom near and far planes of the V-Buffer's frustum.
// It's typically much shorter (along the Z axis) than the camera's frustum.
if (GeometryUtils.Overlap(obb, camera.frustum, 6, 8))
if (GeometryUtils.Overlap(obb, hdCamera.frustum, 6, 8))
{
// TODO: cache these?
var data = volume.parameters.GetData();

}
}
public void VolumeVoxelizationPass(DensityVolumeList densityVolumes, HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
public void VolumeVoxelizationPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex, DensityVolumeList densityVolumes)
{
if (preset == VolumetricLightingPreset.Off) return;

// Use the workaround by running the full shader with 0 density
}
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;
var frameParams = camera.vBufferParams[0];
var frameParams = hdCamera.vBufferParams[0];
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
float vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);
Texture3D volumeAtlas = DensityVolumeManager.manager.volumeAtlas.volumeAtlas;
Vector2 volumeAtlasDimensions = new Vector2(0.0f, 0.0f);

return coords;
}
public void VolumetricLightingPass(HDCamera camera, CommandBuffer cmd, FrameSettings settings, uint frameIndex)
public void VolumetricLightingPass(HDCamera hdCamera, CommandBuffer cmd, uint frameIndex)
{
if (preset == VolumetricLightingPreset.Off) return;

using (new ProfilingSample(cmd, "Volumetric Lighting"))
{
// Only available in the Play Mode because all the frame counters in the Edit Mode are broken.
bool enableClustered = settings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && camera.camera.cameraType == CameraType.Game;
bool enableClustered = hdCamera.frameSettings.lightLoopSettings.enableTileAndCluster;
bool enableReprojection = Application.isPlaying && hdCamera.camera.cameraType == CameraType.Game;
int kernel;

: "VolumetricLightingBruteforce");
}
var frameParams = camera.vBufferParams[0];
var frameParams = hdCamera.vBufferParams[0];
float vFoV = camera.camera.fieldOfView * Mathf.Deg2Rad;
float vFoV = hdCamera.camera.fieldOfView * Mathf.Deg2Rad;
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, camera.viewMatrix, false);
Matrix4x4 transform = HDUtils.ComputePixelCoordToWorldSpaceViewDirectionMatrix(vFoV, resolution, hdCamera.viewMatrix, false);
Vector2[] xySeq = GetHexagonalClosePackedSpheres7();

cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingIntegral, m_LightingBufferHandle); // Write
if (enableReprojection)
{
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Write
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingHistory, hdCamera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Read
cmd.SetComputeTextureParam(m_VolumetricLightingCS, kernel, HDShaderIDs._VBufferLightingFeedback, hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting)); // Write
}
int w = (int)resolution.x;

4
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/Decal/DBufferManager.cs


cmd.SetGlobalTexture(HDShaderIDs._DecalHTileTexture, m_HTile);
}
public void PushGlobalParams(CommandBuffer cmd, FrameSettings frameSettings)
public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd)
if (frameSettings.enableDBuffer)
if (hdCamera.frameSettings.enableDBuffer)
{
cmd.SetGlobalInt(HDShaderIDs._EnableDBuffer, vsibleDecalCount > 0 ? 1 : 0);
cmd.SetGlobalVector(HDShaderIDs._DecalAtlasResolution, new Vector2(HDUtils.hdrpSettings.decalSettings.atlasWidth, HDUtils.hdrpSettings.decalSettings.atlasHeight));

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Material/SubsurfaceScattering/SubsurfaceScatteringManager.cs


RTHandles.Release(m_HTile);
}
public void PushGlobalParams(CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings)
public void PushGlobalParams(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters)
cmd.SetGlobalInt(HDShaderIDs._EnableSubsurfaceScattering, frameSettings.enableSubsurfaceScattering ? 1 : 0);
cmd.SetGlobalInt(HDShaderIDs._EnableSubsurfaceScattering, hdCamera.frameSettings.enableSubsurfaceScattering ? 1 : 0);
unsafe
{
// Warning: Unity is not able to losslessly transfer integers larger than 2^24 to the shader system.

cmd.SetGlobalVectorArray(HDShaderIDs._ShapeParams, sssParameters.shapeParams);
cmd.SetGlobalVectorArray(HDShaderIDs._HalfRcpVariancesAndWeights, sssParameters.halfRcpVariancesAndWeights);
// To disable transmission, we simply nullify the transmissionTint
cmd.SetGlobalVectorArray(HDShaderIDs._TransmissionTintsAndFresnel0, frameSettings.enableTransmission ? sssParameters.transmissionTintsAndFresnel0 : sssParameters.disabledTransmissionTintsAndFresnel0);
cmd.SetGlobalVectorArray(HDShaderIDs._TransmissionTintsAndFresnel0, hdCamera.frameSettings.enableTransmission ? sssParameters.transmissionTintsAndFresnel0 : sssParameters.disabledTransmissionTintsAndFresnel0);
cmd.SetGlobalVectorArray(HDShaderIDs._WorldScales, sssParameters.worldScales);
}

}
// Combines specular lighting and diffuse lighting with subsurface scattering.
public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters, FrameSettings frameSettings,
public void SubsurfaceScatteringPass(HDCamera hdCamera, CommandBuffer cmd, DiffusionProfileSettings sssParameters,
if (sssParameters == null || !frameSettings.enableSubsurfaceScattering)
if (sssParameters == null || !hdCamera.frameSettings.enableSubsurfaceScattering)
return;
// TODO: For MSAA, at least initially, we can only support Jimenez, because we can't

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/AtmosphericScattering.cs


[Tooltip("Distance at which maximum mip of blurred sky texture is used as fog color.")]
public MinFloatParameter mipFogFar = new MinFloatParameter(1000.0f, 0.0f);
public abstract void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings);
public abstract void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd);
public static void PushNeutralShaderParameters(CommandBuffer cmd)
{

}
// Not used by the volumetric fog.
public void PushShaderParametersCommon(CommandBuffer cmd, FogType type, FrameSettings frameSettings)
public void PushShaderParametersCommon(HDCamera hdCamera, CommandBuffer cmd, FogType type)
Debug.Assert(frameSettings.enableAtmosphericScattering);
Debug.Assert(hdCamera.frameSettings.enableAtmosphericScattering);
cmd.SetGlobalInt(HDShaderIDs._AtmosphericScatteringType, (int)type);

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/ExponentialFog.cs


public FloatParameter fogBaseHeight = new FloatParameter(0.0f);
public ClampedFloatParameter fogHeightAttenuation = new ClampedFloatParameter(0.2f, 0.0f, 1.0f);
public override void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
public override void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
PushShaderParametersCommon(cmd, FogType.Exponential, frameSettings);
PushShaderParametersCommon(hdCamera, cmd, FogType.Exponential);
}
}

6
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/LinearFog.cs


public FloatParameter fogHeightStart = new FloatParameter(0.0f);
public FloatParameter fogHeightEnd = new FloatParameter(10.0f);
public override void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
public override void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
PushShaderParametersCommon(cmd, FogType.Linear, frameSettings);
PushShaderParametersCommon(hdCamera,cmd, FogType.Linear);
}
}

2
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/AtmosphericScattering/VolumetricFog.cs


}
}
public override void PushShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
public override void PushShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
{
DensityVolumeParameters param = new DensityVolumeParameters(albedo, meanFreePath, anisotropy);

10
ScriptableRenderPipeline/HDRenderPipeline/HDRP/Sky/VisualEnvironment.cs


public IntParameter skyType = new IntParameter(0);
public FogTypeParameter fogType = new FogTypeParameter(FogType.None);
public void PushFogShaderParameters(CommandBuffer cmd, FrameSettings frameSettings)
public void PushFogShaderParameters(HDCamera hdCamera, CommandBuffer cmd)
if (!frameSettings.enableAtmosphericScattering)
if (!hdCamera.frameSettings.enableAtmosphericScattering)
{
AtmosphericScattering.PushNeutralShaderParameters(cmd);
return;

case FogType.Linear:
{
var fogSettings = VolumeManager.instance.stack.GetComponent<LinearFog>();
fogSettings.PushShaderParameters(cmd, frameSettings);
fogSettings.PushShaderParameters(hdCamera, cmd);
fogSettings.PushShaderParameters(cmd, frameSettings);
fogSettings.PushShaderParameters(hdCamera, cmd);
fogSettings.PushShaderParameters(cmd, frameSettings);
fogSettings.PushShaderParameters(hdCamera, cmd);
break;
}
}

正在加载...
取消
保存